From f1d3cd5e25e64c0542dff270a0136a8e3eb25313 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 9 Oct 2017 16:49:00 +0300 Subject: [PATCH] Applied some clippy suggestions. --- hammond-data/Cargo.toml | 2 +- hammond-data/src/dbqueries.rs | 2 ++ hammond-data/src/feedparser.rs | 2 +- hammond-data/src/index_feed.rs | 19 ++++++++++--------- hammond-data/src/lib.rs | 1 + hammond-downloader/src/downloader.rs | 13 ++++++------- 6 files changed, 21 insertions(+), 18 deletions(-) diff --git a/hammond-data/Cargo.toml b/hammond-data/Cargo.toml index b95c3fc..1e7b2a6 100644 --- a/hammond-data/Cargo.toml +++ b/hammond-data/Cargo.toml @@ -20,4 +20,4 @@ dotenv = "*" [dev-dependencies] tempdir = "0.3.5" -rand = "0.3.16" \ No newline at end of file +rand = "0.3.16" diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 2ae0981..f2079e9 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -1,3 +1,5 @@ +#![cfg_attr(feature = "cargo-clippy", allow(let_and_return))] + use diesel::prelude::*; use models::{Episode, Podcast, Source}; diff --git a/hammond-data/src/feedparser.rs b/hammond-data/src/feedparser.rs index ed080ee..369c913 100644 --- a/hammond-data/src/feedparser.rs +++ b/hammond-data/src/feedparser.rs @@ -20,7 +20,7 @@ pub fn parse_podcast(chan: &Channel, source_id: i32) -> Result(item: &'a Item, parent_id: i32) -> Result> { +pub fn parse_episode(item: &Item, parent_id: i32) -> Result { let title = item.title(); let description = item.description(); let guid = item.guid().map(|x| x.value()); diff --git a/hammond-data/src/index_feed.rs b/hammond-data/src/index_feed.rs index 7e46239..6f60b63 100644 --- a/hammond-data/src/index_feed.rs +++ b/hammond-data/src/index_feed.rs @@ -1,4 +1,5 @@ #![allow(dead_code)] +#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))] use diesel::prelude::*; use diesel; @@ -39,7 +40,7 @@ fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> { } fn index_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<()> { - match dbqueries::load_episode(con, &ep.uri.unwrap()) { + match dbqueries::load_episode(con, ep.uri.unwrap()) { Ok(mut foo) => if foo.title() != ep.title || foo.published_date() != ep.published_date.as_ref().map(|x| x.as_str()) { @@ -74,7 +75,7 @@ fn insert_return_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result Result { index_episode(con, ep)?; - Ok(dbqueries::load_episode(con, &ep.uri.unwrap())?) + Ok(dbqueries::load_episode(con, ep.uri.unwrap())?) } pub fn index_loop(db: SqliteConnection, force: bool) -> Result<()> { @@ -105,18 +106,18 @@ fn complete_index_from_source( req.read_to_string(&mut buf)?; let chan = rss::Channel::from_str(&buf)?; - complete_index(mutex, chan, &source)?; + complete_index(mutex, &chan, source)?; Ok(()) } fn complete_index( mutex: Arc>, - chan: rss::Channel, + chan: &rss::Channel, parent: &Source, ) -> Result<()> { let tempdb = mutex.lock().unwrap(); - let pd = index_channel(&tempdb, &chan, parent)?; + let pd = index_channel(&tempdb, chan, parent)?; drop(tempdb); index_channel_items(mutex.clone(), chan.items(), &pd)?; @@ -125,7 +126,7 @@ fn complete_index( } fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) -> Result { - let pd = feedparser::parse_podcast(&chan, parent.id())?; + let pd = feedparser::parse_podcast(chan, parent.id())?; // Convert NewPodcast to Podcast let pd = insert_return_podcast(db, &pd)?; Ok(pd) @@ -138,13 +139,13 @@ fn index_channel_items( pd: &Podcast, ) -> Result<()> { let foo: Vec<_> = i.par_iter() - .map(|x| feedparser::parse_episode(&x, pd.id()).unwrap()) + .map(|x| feedparser::parse_episode(x, pd.id()).unwrap()) .collect(); foo.par_iter().for_each(|x| { let dbmutex = mutex.clone(); let db = dbmutex.lock().unwrap(); - index_episode(&db, &x).unwrap(); + index_episode(&db, x).unwrap(); }); Ok(()) } @@ -315,7 +316,7 @@ mod tests { let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); // Index the channel - complete_index(m.clone(), chan, &s).unwrap(); + complete_index(m.clone(), &chan, &s).unwrap(); }) .fold((), |(), _| ()); diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index da76500..6faf278 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -1,4 +1,5 @@ #![recursion_limit = "1024"] +#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))] #[macro_use] extern crate error_chain; diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 5b02e64..40d0055 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -62,12 +62,11 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { // TODO when for_each reaches stable: // Remove all the ugly folds(_) and replace map() with for_each(). .map(|x| -> Result<()> { - let mut eps; - if limit == 0 { - eps = dbqueries::get_pd_episodes(connection, &x)?; + let mut eps = if limit == 0 { + dbqueries::get_pd_episodes(connection, x)? } else { - eps = dbqueries::get_pd_episodes_limit(connection, &x, limit)?; - } + dbqueries::get_pd_episodes_limit(connection, x, limit)? + }; // It might be better to make it a hash of the title let dl_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), x.title()); @@ -79,7 +78,7 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { eps.iter_mut() .map(|y| -> Result<()> { // Check if its alrdy downloaded - if let Some(_) = y.local_uri() { + if y.local_uri().is_some() { // Not idiomatic but I am still fighting the borrow-checker. if Path::new(y.local_uri().unwrap().to_owned().as_str()).exists() { return Ok(()); @@ -90,7 +89,7 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { }; // Unreliable and hacky way to extract the file extension from the url. - let ext = y.uri().split(".").last().unwrap().to_owned(); + let ext = y.uri().split('.').last().unwrap().to_owned(); // Construct the download path. let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap().to_owned(), ext);