From 3358fcd0b3e761eab00d109e6525e7f66fa120e3 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 15 Jan 2018 11:03:40 +0200 Subject: [PATCH] hammond_data::Feed: general cleanup of no longer needed stuff. --- hammond-data/benches/bench.rs | 2 +- hammond-data/src/feed.rs | 45 +++++++++++--------------- hammond-data/src/models/insertables.rs | 1 + hammond-data/src/models/queryables.rs | 5 ++- hammond-downloader/src/downloader.rs | 3 +- hammond-gtk/src/manager.rs | 3 +- hammond-gtk/src/utils.rs | 3 +- 7 files changed, 26 insertions(+), 36 deletions(-) diff --git a/hammond-data/benches/bench.rs b/hammond-data/benches/bench.rs index ffbaed2..6f79f9e 100644 --- a/hammond-data/benches/bench.rs +++ b/hammond-data/benches/bench.rs @@ -62,7 +62,7 @@ fn index_urls() { }) .collect(); - feeds.iter().for_each(|x| index(x)); + feeds.iter().for_each(|x| x.index().unwrap()); } #[bench] diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 0add944..01e3840 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -9,11 +9,14 @@ use rss; use dbqueries; use parser; -use models::queryables::{Episode, Podcast, Source}; +use models::queryables::{Podcast, Source}; use models::insertables::{NewEpisode, NewPodcast}; use database::connection; use errors::*; +#[cfg(test)] +use models::queryables::Episode; + #[derive(Debug)] /// Wrapper struct that hold a `Source` and the `rss::Channel` /// that corresponds to the `Source.uri` field. @@ -29,11 +32,8 @@ impl Feed { } /// Constructor that consumes a `Source` and a `rss::Channel` returns a `Feed` struct. - pub fn from_channel_source(chan: rss::Channel, s: i32) -> Feed { - Feed { - channel: chan, - source_id: s, - } + pub fn from_channel_source(channel: rss::Channel, source_id: i32) -> Feed { + Feed { channel, source_id } } /// docs @@ -85,15 +85,15 @@ impl Feed { self.parse_channel().into_podcast() } - #[allow(dead_code)] + #[cfg(test)] + /// This returns only the episodes in the xml feed. + /// Used for unit-tests only. fn get_episodes(&self) -> Result> { let pd = self.get_podcast()?; let eps = self.parse_channel_items(&pd); let db = connection(); let con = db.get()?; - // TODO: Make it parallel - // This returns only the episodes in the xml feed. let episodes: Vec<_> = eps.into_iter() .filter_map(|ep| ep.into_episode(&con).ok()) .collect(); @@ -102,25 +102,12 @@ impl Feed { } } -/// Handle the indexing of a `Feed` into the Database. -pub fn index(feed: &Feed) { - if let Err(err) = feed.index() { - error!("Error While trying to update the database."); - error!("Error msg: {}", err); - }; -} - -/// Consume a `Source` and return a `Feed`. -fn fetch(source: &mut Source) -> Result { - Feed::from_source(source) -} - /// Index a "list" of `Source`s. pub fn index_loop>(sources: S) { sources .into_par_iter() - .filter_map(|mut x| { - let foo = fetch(&mut x); + .filter_map(|mut source| { + let foo = Feed::from_source(&mut source); if let Err(err) = foo { error!("Error: {}", err); None @@ -128,7 +115,13 @@ pub fn index_loop>(sources: S) { foo.ok() } }) - .for_each(|x| index(&x)); + // Handle the indexing of a `Feed` into the Database. + .for_each(|feed| { + if let Err(err) = feed.index() { + error!("Error While trying to update the database."); + error!("Error msg: {}", err); + } + }); info!("Indexing done."); } @@ -208,7 +201,7 @@ mod tests { .collect(); // Index the channels - feeds.par_iter().for_each(|x| index(&x)); + feeds.par_iter().for_each(|x| x.index().unwrap()); // Assert the index rows equal the controlled results assert_eq!(dbqueries::get_sources().unwrap().len(), 4); diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index 01d2e10..972cd41 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -185,6 +185,7 @@ impl Update for NewEpisode { impl NewEpisode { // TODO: Refactor into batch indexes instead. + #[allow(dead_code)] pub(crate) fn into_episode(self, con: &SqliteConnection) -> Result { self.index(con)?; Ok(dbqueries::get_episode_from_pk( diff --git a/hammond-data/src/models/queryables.rs b/hammond-data/src/models/queryables.rs index 52d53fd..0b93a3c 100644 --- a/hammond-data/src/models/queryables.rs +++ b/hammond-data/src/models/queryables.rs @@ -13,7 +13,7 @@ use hyper::header::{ETag, EntityTag, HttpDate, IfModifiedSince, IfNoneMatch, Las use hyper_tls::HttpsConnector; use futures::prelude::*; -// use futures::future::ok; +// use futures::future::{ok, result}; use schema::{episode, podcast, source}; use feed::Feed; @@ -723,7 +723,6 @@ impl Source { ignore_etags: bool, ) -> Box> { let id = self.id(); - // TODO: make URI future let feed = request_constructor(&self, client, ignore_etags) .map_err(From::from) .and_then(move |res| { @@ -755,7 +754,7 @@ fn request_constructor( client: &Client>, ignore_etags: bool, ) -> Box> { - // FIXME: remove unwrap + // FIXME: remove unwrap somehow let uri = Uri::from_str(&s.uri()).unwrap(); let mut req = Request::new(Method::Get, uri); diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 09d2e44..437624c 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -217,7 +217,6 @@ pub fn cache_image(pd: &PodcastCoverQuery) -> Option { mod tests { use super::*; use hammond_data::Source; - use hammond_data::feed::index; use hammond_data::dbqueries; #[test] @@ -234,7 +233,7 @@ mod tests { // Convert Source it into a Feed and index it let feed = source.into_feed(true).unwrap(); - index(&feed); + feed.index().unwrap(); // Get the Podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into(); diff --git a/hammond-gtk/src/manager.rs b/hammond-gtk/src/manager.rs index f6ba174..fe9edd6 100644 --- a/hammond-gtk/src/manager.rs +++ b/hammond-gtk/src/manager.rs @@ -120,7 +120,6 @@ mod tests { use hammond_data::database; use hammond_data::utils::get_download_folder; - use hammond_data::feed::*; use hammond_data::{Episode, Source}; use hammond_data::dbqueries; @@ -144,7 +143,7 @@ mod tests { // Convert Source it into a Feed and index it let feed = source.into_feed(true).unwrap(); - index(&feed); + feed.index().unwrap(); // Get the Podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap(); diff --git a/hammond-gtk/src/utils.rs b/hammond-gtk/src/utils.rs index 207cb79..f73230e 100644 --- a/hammond-gtk/src/utils.rs +++ b/hammond-gtk/src/utils.rs @@ -81,7 +81,6 @@ pub fn get_pixbuf_from_path(pd: &PodcastCoverQuery, size: u32) -> Option #[cfg(test)] mod tests { use hammond_data::Source; - use hammond_data::feed::index; use hammond_data::dbqueries; use super::*; @@ -99,7 +98,7 @@ mod tests { // Convert Source it into a Feed and index it let feed = source.into_feed(true).unwrap(); - index(&feed); + feed.index().unwrap(); // Get the Podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();