From 5f3d2d5bddf575fff750b5103b830e85935a88f1 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sat, 21 Oct 2017 21:21:06 +0300 Subject: [PATCH] Minor readability imprvments. --- hammond-data/src/dbqueries.rs | 4 +- hammond-data/src/index_feed.rs | 78 ++++++++++++++++------------------ hammond-gtk/src/utils.rs | 4 +- 3 files changed, 41 insertions(+), 45 deletions(-) diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 7f0ff31..bd726d5 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -39,7 +39,7 @@ pub fn get_episodes_with_limit(con: &SqliteConnection, limit: u32) -> QueryResul let eps = episode .order(epoch.desc()) - .limit(limit as i64) + .limit(i64::from(limit)) .load::(con); eps } @@ -68,7 +68,7 @@ pub fn get_pd_episodes_limit( let eps = Episode::belonging_to(parent) .order(epoch.desc()) - .limit(limit as i64) + .limit(i64::from(limit)) .load::(con); eps } diff --git a/hammond-data/src/index_feed.rs b/hammond-data/src/index_feed.rs index dbd35be..96f0021 100644 --- a/hammond-data/src/index_feed.rs +++ b/hammond-data/src/index_feed.rs @@ -14,6 +14,9 @@ use models::*; use errors::*; use feedparser; +#[derive(Debug)] +pub struct Feed(pub reqwest::Response, pub Source); + fn index_source(con: &SqliteConnection, foo: &NewSource) -> Result<()> { match dbqueries::load_source(con, foo.uri) { Ok(_) => Ok(()), @@ -79,15 +82,16 @@ fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result>, force: bool) -> Result<()> { - let mut f = fetch_feeds(&db.clone(), force)?; + let mut f = fetch_feeds(db, force)?; - f.par_iter_mut().for_each(|&mut (ref mut req, ref source)| { - let e = complete_index_from_source(req, source, &db.clone()); - if e.is_err() { - error!("Error While trying to update the database."); - error!("Error msg: {}", e.unwrap_err()); - }; - }); + f.par_iter_mut() + .for_each(|&mut Feed(ref mut req, ref source)| { + let e = complete_index_from_source(req, source, db); + if e.is_err() { + error!("Error While trying to update the database."); + error!("Error msg: {}", e.unwrap_err()); + }; + }); info!("Indexing done."); Ok(()) } @@ -118,7 +122,7 @@ fn complete_index( let pd = index_channel(&tempdb, chan, parent)?; drop(tempdb); - index_channel_items(&connection.clone(), chan.items(), &pd)?; + index_channel_items(connection, chan.items(), &pd); Ok(()) } @@ -130,44 +134,32 @@ fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) -> Ok(pd) } -fn index_channel_items( - mutex: &Arc>, - i: &[rss::Item], - pd: &Podcast, -) -> Result<()> { - let foo: Vec<_> = i.par_iter() +fn index_channel_items(connection: &Arc>, it: &[rss::Item], pd: &Podcast) { + it.par_iter() .map(|x| feedparser::parse_episode(x, pd.id())) - .collect(); - - foo.par_iter().for_each(|x| { - let dbmutex = mutex.clone(); - let db = dbmutex.lock().unwrap(); - let e = index_episode(&db, x); - if let Err(err) = e { - error!("Failed to index episode: {:?}.", x); - error!("Error msg: {}", err); - }; - }); - Ok(()) + .for_each(|x| { + let db = connection.lock().unwrap(); + let e = index_episode(&db, &x); + if let Err(err) = e { + error!("Failed to index episode: {:?}.", x); + error!("Error msg: {}", err); + }; + }); } // Maybe this can be refactored into an Iterator for lazy evaluation. -pub fn fetch_feeds( - connection: &Arc>, - force: bool, -) -> Result> { +pub fn fetch_feeds(connection: &Arc>, force: bool) -> Result> { let tempdb = connection.lock().unwrap(); let mut feeds = dbqueries::get_sources(&tempdb)?; drop(tempdb); - let results: Vec<(reqwest::Response, Source)> = feeds + let results: Vec = feeds .par_iter_mut() .filter_map(|x| { - let dbmutex = connection.clone(); - let db = dbmutex.lock().unwrap(); + let db = connection.lock().unwrap(); let l = refresh_source(&db, x, force); - if let Ok(res) = l { - Some(res) + if l.is_ok() { + l.ok() } else { error!("Error While trying to fetch from source: {}.", x.uri()); error!("Error msg: {}", l.unwrap_err()); @@ -183,7 +175,7 @@ pub fn refresh_source( connection: &SqliteConnection, feed: &mut Source, force: bool, -) -> Result<(reqwest::Response, Source)> { +) -> Result { use reqwest::header::{ETag, EntityTag, Headers, HttpDate, LastModified}; let client = reqwest::Client::new(); @@ -197,7 +189,9 @@ pub fn refresh_source( } if let Some(foo) = feed.last_modified() { - headers.set(LastModified(foo.parse::()?)); + if let Ok(x) = foo.parse::() { + headers.set(LastModified(x)); + } } // FIXME: I have fucked up somewhere here. @@ -219,7 +213,7 @@ pub fn refresh_source( // }; feed.update_etag(connection, &req)?; - Ok((req, feed.clone())) + Ok(Feed(req, feed.clone())) } #[cfg(test)] @@ -274,10 +268,10 @@ mod tests { index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap() }); - index_loop(&db.clone(), true).unwrap(); + index_loop(&db, true).unwrap(); // Run again to cover Unique constrains erros. - index_loop(&db.clone(), true).unwrap(); + index_loop(&db, true).unwrap(); } #[test] @@ -318,7 +312,7 @@ mod tests { let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); // Index the channel - complete_index(&m.clone(), &chan, &s).unwrap(); + complete_index(&m, &chan, &s).unwrap(); }); // Assert the index rows equal the controlled results diff --git a/hammond-gtk/src/utils.rs b/hammond-gtk/src/utils.rs index de4c724..03f5368 100644 --- a/hammond-gtk/src/utils.rs +++ b/hammond-gtk/src/utils.rs @@ -6,6 +6,7 @@ use gtk; // use gtk::prelude::*; use hammond_data; +use hammond_data::index_feed::Feed; use hammond_data::models::Source; use diesel::prelude::SqliteConnection; @@ -40,7 +41,8 @@ pub fn refresh_feed(db: &Arc>, stack: >k::Stack, sourc let foo_ = hammond_data::index_feed::refresh_source(&db_, &mut source_, false); drop(db_); - if let Ok((mut req, s)) = foo_ { + if let Ok(x) = foo_ { + let Feed(mut req, s) = x; let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, &db_clone); if s.is_err() { error!("Error While trying to update the database.");