diff --git a/Cargo.lock b/Cargo.lock index 64d6542..4a16cb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -564,10 +564,12 @@ dependencies = [ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "hammond-data 0.1.0", "hyper 0.11.12 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 1.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "reqwest 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-core 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -584,6 +586,8 @@ dependencies = [ "hammond-data 0.1.0", "hammond-downloader 0.1.0", "humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.11.12 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "loggerv 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -591,6 +595,7 @@ dependencies = [ "rayon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "send-cell 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-core 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] diff --git a/hammond-data/benches/bench.rs b/hammond-data/benches/bench.rs index 8e665a0..d019282 100644 --- a/hammond-data/benches/bench.rs +++ b/hammond-data/benches/bench.rs @@ -32,25 +32,6 @@ static URLS: &[(&[u8], &str)] = &[ (LAS, "https://feeds2.feedburner.com/TheLinuxActionShow"), ]; -static URLS2: &[&str] = &[ - "https://www.pcper.com/rss/podcasts-mp3.rss", - "https://feeds.feedburner.com/InterceptedWithJeremyScahill", - "http://www.badvoltage.org/feed/ogg/", - "https://www.theguardian.com/news/series/the-audio-long-read/podcast.xml", - "http://feeds.feedburner.com/coderradiomp3", - "https://rss.art19.com/steal-the-stars", - "https://feeds.mozilla-podcasts.org/irl", - "http://economicupdate.libsyn.com/rss", - "http://feeds.feedburner.com/linuxunplugged", - "http://ubuntupodcast.org/feed/ogg/", - "http://www.newrustacean.com/feed.xml", - "http://feeds.propublica.org/propublica/podcast", - "https://rss.acast.com/thetipoff", - "http://feeds.soundcloud.com/users/soundcloud:users:277306156/sounds.rss", - "http://revolutionspodcast.libsyn.com/rss/", - "https://www.greaterthancode.com/feed/podcast", -]; - fn index_urls() { let feeds: Vec<_> = URLS.iter() .map(|&(buff, url)| { @@ -84,24 +65,10 @@ fn bench_index_unchanged_feeds(b: &mut Bencher) { }); } -#[bench] -fn bench_get_normal_feeds(b: &mut Bencher) { - // Index first so it will only bench the comparison test case. - truncate_db().unwrap(); - URLS2.iter().for_each(|url| { - Source::from_url(url).unwrap(); - }); - - b.iter(|| { - let sources = hammond_data::dbqueries::get_sources().unwrap(); - index_loop(sources); - }); -} - #[bench] fn bench_get_future_feeds(b: &mut Bencher) { truncate_db().unwrap(); - URLS2.iter().for_each(|url| { + URLS.iter().for_each(|&(_, url)| { Source::from_url(url).unwrap(); }); diff --git a/hammond-data/src/database.rs b/hammond-data/src/database.rs index 0e2320c..7cd3e6e 100644 --- a/hammond-data/src/database.rs +++ b/hammond-data/src/database.rs @@ -38,7 +38,7 @@ lazy_static! { } /// Get an r2d2 SqliteConnection. -pub fn connection() -> Pool { +pub(crate) fn connection() -> Pool { POOL.clone() } diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 0f36c10..1493ce0 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -6,7 +6,8 @@ use diesel::prelude::*; use database::connection; use errors::*; -use models::{Episode, EpisodeCleanerQuery, EpisodeWidgetQuery, Podcast, PodcastCoverQuery, Source}; +use models::{Episode, EpisodeCleanerQuery, EpisodeMinimal, EpisodeWidgetQuery, Podcast, + PodcastCoverQuery, Source}; pub fn get_sources() -> Result> { use schema::source::dsl::*; @@ -238,13 +239,29 @@ pub fn get_podcast_from_source_id(sid: i32) -> Result { .map_err(From::from) } -pub fn get_episode_from_pk(con: &SqliteConnection, title_: &str, pid: i32) -> QueryResult { +pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result { use schema::episode::dsl::*; + let db = connection(); + let con = db.get()?; episode .filter(title.eq(title_)) .filter(podcast_id.eq(pid)) .get_result::(&*con) + .map_err(From::from) +} + +pub fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result { + use schema::episode::dsl::*; + let db = connection(); + let con = db.get()?; + + episode + .select((rowid, title, uri, epoch, duration, guid, podcast_id)) + .filter(title.eq(title_)) + .filter(podcast_id.eq(pid)) + .get_result::(&*con) + .map_err(From::from) } pub fn remove_feed(pd: &Podcast) -> Result<()> { diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 78c03eb..63050ca 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -1,19 +1,13 @@ //! Index Feeds. -use diesel::prelude::*; use futures::future::*; -use futures::prelude::*; -use rayon::iter::IntoParallelIterator; +// use futures::prelude::*; +use errors::*; +use models::{NewEpisode, NewPodcast, Podcast}; use rayon::prelude::*; use rss; - -use database::connection; -use dbqueries; -use errors::*; -use models::{NewEpisode, NewPodcast, Podcast, Source}; - -// #[cfg(test)] -// use models::queryables::Episode; +// use models::{IndexState, Source}; +// use pipeline::*; #[derive(Debug)] /// Wrapper struct that hold a `Source` id and the `rss::Channel` @@ -24,11 +18,6 @@ pub struct Feed { } impl Feed { - /// Constructor that consumes a `Source` and returns the corresponding `Feed` struct. - pub fn from_source(s: &mut Source) -> Result { - s.into_feed(false) - } - /// Constructor that consumes a `Source` and a `rss::Channel` returns a `Feed` struct. pub fn from_channel_source(channel: rss::Channel, source_id: i32) -> Feed { Feed { channel, source_id } @@ -40,31 +29,15 @@ impl Feed { self.index_channel_items(&pd) } - /// Docs - // FIXME: docs - // FIXME: lifetime stuff - pub fn index_future(self) -> Box> { - let indx = self.parse_podcast_futture() - .and_then(|pd| pd.into_podcast()) - .and_then(move |pd| self.index_channel_items(&pd)); - - Box::new(indx) - } - // TODO: Refactor transcactions and find a way to do it in parallel. fn index_channel_items(&self, pd: &Podcast) -> Result<()> { let episodes = self.parse_channel_items(pd); - let db = connection(); - let con = db.get()?; - let _ = con.transaction::<(), Error, _>(|| { - episodes.into_iter().for_each(|x| { - if let Err(err) = x.index(&con) { - error!("Failed to index episode: {:?}.", x.title()); - error!("Error msg: {}", err); - }; - }); - Ok(()) + episodes.iter().for_each(|x| { + if let Err(err) = x.index() { + error!("Failed to index episode: {:?}.", x.title()); + error!("Error msg: {}", err); + }; }); Ok(()) } @@ -73,6 +46,7 @@ impl Feed { NewPodcast::new(&self.channel, self.source_id) } + #[allow(dead_code)] fn parse_podcast_futture(&self) -> Box> { Box::new(ok(self.parse_podcast())) } @@ -86,69 +60,28 @@ impl Feed { new_episodes } - - // This could also retrurn a FutureResult>, Error> Instead - #[allow(dead_code)] - fn parse_episodes_future(&self, pd: &Podcast) -> Box>> { - let episodes = self.channel - .items() - .par_iter() - .map(|item| result(NewEpisode::new(item, pd.id()))) - .collect(); - - Box::new(episodes) - } - - // #[cfg(test)] - // /// This returns only the episodes in the xml feed. - // fn get_episodes(&self) -> Result> { - // let pd = self.get_podcast()?; - // let eps = self.parse_channel_items(&pd); - - // let db = connection(); - // let con = db.get()?; - // let episodes: Vec<_> = eps.into_iter() - // .filter_map(|ep| ep.into_episode(&con).ok()) - // .collect(); - - // Ok(episodes) - // } } +// fn parse_channel_items2( +// &self, +// pd: &Podcast, +// ) -> (Vec>, Vec>) { +// let items = self.channel.items(); +// let (insert, update): (Vec<_>, Vec<_>) = items +// .into_iter() +// .filter_map(|item| glue(item, pd.id()).ok()) +// .filter(|&state| state == IndexState::NotChanged) +// .partition(|&state| state == IndexState::Index); -/// Index a "list" of `Source`s. -pub fn index_loop>(sources: S) { - sources - .into_par_iter() - .filter_map(|mut source| { - let foo = Feed::from_source(&mut source); - if let Err(err) = foo { - error!("Error: {}", err); - None - } else { - foo.ok() - } - }) - // Handle the indexing of a `Feed` into the Database. - .for_each(|feed| { - if let Err(err) = feed.index() { - error!("Error While trying to update the database."); - error!("Error msg: {}", err); - } - }); - - info!("Indexing done."); -} - -/// Retrieves all `Sources` from the database and updates/indexes them. -pub fn index_all() -> Result<()> { - let sources = dbqueries::get_sources()?; - index_loop(sources); - Ok(()) -} +// (insert, update) +// } +// } #[cfg(test)] mod tests { + use Source; use database::truncate_db; + use dbqueries; + use pipeline; use std::fs; use std::io::BufReader; @@ -169,11 +102,12 @@ mod tests { // Index the urls into the source table. Source::from_url(url).unwrap(); }); + let sources = dbqueries::get_sources().unwrap(); + pipeline::pipeline(sources, true).unwrap(); - index_all().unwrap(); - + let sources = dbqueries::get_sources().unwrap(); // Run again to cover Unique constrains erros. - index_all().unwrap(); + pipeline::pipeline(sources, true).unwrap() } #[test] diff --git a/hammond-data/src/models/mod.rs b/hammond-data/src/models/mod.rs index 1d9a4e6..b637ae0 100644 --- a/hammond-data/src/models/mod.rs +++ b/hammond-data/src/models/mod.rs @@ -6,7 +6,8 @@ pub(crate) mod episode; pub(crate) mod podcast; pub(crate) mod source; -use diesel::prelude::*; +// use futures::prelude::*; +// use futures::future::*; pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal}; pub(crate) use self::new_podcast::NewPodcast; @@ -17,17 +18,20 @@ pub(crate) use self::episode::EpisodeCleanerQuery; pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::source::Source; +use errors::*; + #[allow(dead_code)] +#[derive(Debug, Clone, PartialEq)] pub enum IndexState { Index(T), - Update(T), + Update((T, i32)), NotChanged, } pub trait Insert { - fn insert(&self, &SqliteConnection) -> QueryResult; + fn insert(&self) -> Result<()>; } pub trait Update { - fn update(&self, &SqliteConnection, i32) -> QueryResult; + fn update(&self, i32) -> Result<()>; } diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 1933de6..33f9561 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -1,11 +1,13 @@ -use diesel; use diesel::prelude::*; + +use diesel; use schema::episode; use ammonia; use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822; use rss; +use database::connection; use dbqueries; use errors::*; use models::{Episode, Insert, Update}; @@ -44,43 +46,49 @@ impl From for NewEpisode { } impl Insert for NewEpisode { - fn insert(&self, con: &SqliteConnection) -> QueryResult { + fn insert(&self) -> Result<()> { use schema::episode::dsl::*; - diesel::insert_into(episode).values(self).execute(&*con) + let db = connection(); + let con = db.get()?; + + info!("Indexing {:?}", self.title); + diesel::insert_into(episode) + .values(self) + .execute(&*con) + .map_err(From::from) + .map(|_| ()) } } impl Update for NewEpisode { - fn update(&self, con: &SqliteConnection, episode_id: i32) -> QueryResult { + fn update(&self, episode_id: i32) -> Result<()> { use schema::episode::dsl::*; + let db = connection(); + let con = db.get()?; info!("Updating {:?}", self.title); diesel::update(episode.filter(rowid.eq(episode_id))) .set(self) .execute(&*con) + .map_err(From::from) + .map(|_| ()) } } impl NewEpisode { - #[allow(dead_code)] /// Parses an `rss::Item` into a `NewEpisode` Struct. pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) } - // TODO: Refactor into batch indexes instead. #[allow(dead_code)] - pub(crate) fn into_episode(self, con: &SqliteConnection) -> Result { - self.index(con)?; - Ok(dbqueries::get_episode_from_pk( - con, - &self.title, - self.podcast_id, - )?) + pub(crate) fn into_episode(self) -> Result { + self.index()?; + dbqueries::get_episode_from_pk(&self.title, self.podcast_id) } - pub(crate) fn index(&self, con: &SqliteConnection) -> QueryResult<()> { - let ep = dbqueries::get_episode_from_pk(con, &self.title, self.podcast_id); + pub(crate) fn index(&self) -> Result<()> { + let ep = dbqueries::get_episode_from_pk(&self.title, self.podcast_id); match ep { Ok(foo) => { @@ -92,11 +100,11 @@ impl NewEpisode { || foo.uri() != self.uri.as_ref().map(|s| s.as_str()) || foo.duration() != self.duration { - self.update(con, foo.rowid())?; + self.update(foo.rowid())?; } } Err(_) => { - self.insert(con)?; + self.insert()?; } } Ok(()) diff --git a/hammond-data/src/models/new_podcast.rs b/hammond-data/src/models/new_podcast.rs index c47e855..7ba7068 100644 --- a/hammond-data/src/models/new_podcast.rs +++ b/hammond-data/src/models/new_podcast.rs @@ -29,20 +29,31 @@ pub(crate) struct NewPodcast { } impl Insert for NewPodcast { - fn insert(&self, con: &SqliteConnection) -> QueryResult { + fn insert(&self) -> Result<()> { use schema::podcast::dsl::*; - diesel::insert_into(podcast).values(self).execute(&*con) + let db = connection(); + let con = db.get()?; + + diesel::insert_into(podcast) + .values(self) + .execute(&*con) + .map(|_| ()) + .map_err(From::from) } } impl Update for NewPodcast { - fn update(&self, con: &SqliteConnection, podcast_id: i32) -> QueryResult { + fn update(&self, podcast_id: i32) -> Result<()> { use schema::podcast::dsl::*; + let db = connection(); + let con = db.get()?; info!("Updating {}", self.title); diesel::update(podcast.filter(id.eq(podcast_id))) .set(self) .execute(&*con) + .map(|_| ()) + .map_err(From::from) } } @@ -87,19 +98,17 @@ impl NewPodcast { pub(crate) fn index(&self) -> Result<()> { let pd = dbqueries::get_podcast_from_source_id(self.source_id); - let db = connection(); - let con = db.get()?; match pd { Ok(foo) => { if (foo.link() != self.link) || (foo.title() != self.title) || (foo.image_uri() != self.image_uri.as_ref().map(|x| x.as_str())) { info!("NewEpisode: {:?}\n OldEpisode: {:?}", self, foo); - self.update(&con, foo.id())?; + self.update(foo.id())?; } } Err(_) => { - self.insert(&con)?; + self.insert()?; } } Ok(()) diff --git a/hammond-data/src/models/new_source.rs b/hammond-data/src/models/new_source.rs index dc572cf..8e1fef3 100644 --- a/hammond-data/src/models/new_source.rs +++ b/hammond-data/src/models/new_source.rs @@ -5,7 +5,8 @@ use diesel::prelude::*; use database::connection; use dbqueries; -use models::{Insert, Update}; +// use models::{Insert, Update}; +use models::Insert; use models::Source; use schema::source; @@ -24,9 +25,14 @@ pub(crate) struct NewSource { } impl Insert for NewSource { - fn insert(&self, con: &SqliteConnection) -> QueryResult { + fn insert(&self) -> Result<()> { use schema::source::dsl::*; - diesel::insert_into(source).values(self).execute(&*con) + let db = connection(); + let con = db.get()?; + + // FIXME: Insert or ignore + let _ = diesel::insert_into(source).values(self).execute(&*con); + Ok(()) } } @@ -39,19 +45,9 @@ impl NewSource { } } - fn index(&self) -> Result<()> { - let db = connection(); - let con = db.get()?; - - // Throw away the result like `insert or ignore` - // Diesel deos not support `insert or ignore` yet. - let _ = self.insert(&con); - Ok(()) - } - // Look out for when tryinto lands into stable. pub(crate) fn into_source(self) -> Result { - self.index()?; + self.insert()?; dbqueries::get_source_from_uri(&self.uri) } } diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index b4245be..4843aaa 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -1,5 +1,4 @@ use diesel::SaveChangesDsl; -use reqwest; use rss::Channel; use hyper; @@ -17,7 +16,6 @@ use feed::Feed; use models::NewSource; use schema::source; -use std::io::Read; use std::str::FromStr; #[derive(Queryable, Identifiable, AsChangeset, PartialEq)] @@ -79,26 +77,8 @@ impl Source { /// Extract Etag and LastModifier from res, and update self and the /// corresponding db row. - fn update_etag(&mut self, res: &reqwest::Response) -> Result<()> { - let headers = res.headers(); - - let etag = headers.get::(); - let lmod = headers.get::(); - - if self.http_etag() != etag.map(|x| x.tag()) || self.last_modified != lmod.map(|x| { - format!("{}", x) - }) { - self.http_etag = etag.map(|x| x.tag().to_string().to_owned()); - self.last_modified = lmod.map(|x| format!("{}", x)); - self.save()?; - } - - Ok(()) - } - - /// Extract Etag and LastModifier from res, and update self and the - /// corresponding db row. - fn update_etag2(mut self, res: &Response) -> Result<()> { + // FIXME: With &mut self the closure is of type FnMut instead of FnOnce. + fn update_etag(mut self, res: &Response) -> Result<()> { let headers = res.headers(); let etag = headers.get::(); @@ -123,43 +103,7 @@ impl Source { /// /// Consumes `self` and Returns the corresponding `Feed` Object. // TODO: Refactor into TryInto once it lands on stable. - pub fn into_feed(&mut self, ignore_etags: bool) -> Result { - use reqwest::header::{EntityTag, Headers, HttpDate, IfModifiedSince, IfNoneMatch}; - - let mut headers = Headers::new(); - - if !ignore_etags { - if let Some(foo) = self.http_etag() { - headers.set(IfNoneMatch::Items(vec![ - EntityTag::new(true, foo.to_owned()), - ])); - } - - if let Some(foo) = self.last_modified() { - if let Ok(x) = foo.parse::() { - headers.set(IfModifiedSince(x)); - } - } - } - - let client = reqwest::Client::builder().referer(false).build()?; - let mut res = client.get(self.uri()).headers(headers).send()?; - - info!("GET to {} , returned: {}", self.uri(), res.status()); - - self.update_etag(&res)?; - match_status(res.status())?; - - let mut buf = String::new(); - res.read_to_string(&mut buf)?; - let chan = Channel::from_str(&buf)?; - - Ok(Feed::from_channel_source(chan, self.id)) - } - - // FIXME: - /// Docs - pub fn into_fututre_feed( + pub fn into_feed( self, client: &Client>, ignore_etags: bool, @@ -168,7 +112,7 @@ impl Source { let feed = request_constructor(&self, client, ignore_etags) .map_err(From::from) .and_then(move |res| { - self.update_etag2(&res)?; + self.update_etag(&res)?; Ok(res) }) .and_then(|res| -> Result { @@ -278,7 +222,7 @@ mod tests { let url = "http://www.newrustacean.com/feed.xml"; let source = Source::from_url(url).unwrap(); - let feed = source.into_fututre_feed(&client, true); + let feed = source.into_feed(&client, true); assert!(core.run(feed).is_ok()); } diff --git a/hammond-data/src/parser.rs b/hammond-data/src/parser.rs index f8fb6cc..a06b319 100644 --- a/hammond-data/src/parser.rs +++ b/hammond-data/src/parser.rs @@ -2,7 +2,6 @@ use rss::Item; /// Parses an Item Itunes extension and returns it's duration value in seconds. // FIXME: Rafactor -// TODO: Write tests #[allow(non_snake_case)] pub(crate) fn parse_itunes_duration(item: &Item) -> Option { let duration = item.itunes_ext().map(|s| s.duration())??; diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index 6454d33..41142f4 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -2,14 +2,19 @@ //! Docs. use futures::future::*; -use futures::prelude::*; +// use futures::prelude::*; use hyper::Client; use hyper_tls::HttpsConnector; use tokio_core::reactor::Core; +use rss; + use Source; +use dbqueries; use errors::*; +use models::{IndexState, Insert, NewEpisode, NewEpisodeMinimal, Update}; +// use models::new_episode::NewEpisodeMinimal; // use Feed; use std; @@ -32,7 +37,7 @@ pub fn pipeline>(sources: S, ignore_etags: bool) .into_iter() // FIXME: Make proper indexing futures instead of wrapping up existing // blocking functions - .map(|s| s.into_fututre_feed(&client, ignore_etags).map(|feed| feed.index_future())) + .map(|s| s.into_feed(&client, ignore_etags).and_then(|feed| feed.index())) .collect(); let f = core.run(collect_futures(list))?; @@ -43,9 +48,54 @@ pub fn pipeline>(sources: S, ignore_etags: bool) Ok(()) } +#[allow(dead_code)] +pub(crate) fn determine_episode_state( + ep: NewEpisodeMinimal, + item: &rss::Item, +) -> Result> { + // determine if feed exists + let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; + + if !exists { + return Ok(IndexState::Index(ep.into_new_episode(item))); + } else { + let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.podcast_id())?; + let rowid = old.rowid(); + + if ep != old.into() { + return Ok(IndexState::Update((ep.into_new_episode(item), rowid))); + } else { + return Ok(IndexState::NotChanged); + } + } +} + +#[allow(dead_code)] +pub(crate) fn model_state(state: IndexState) -> Result<()> { + match state { + IndexState::NotChanged => Ok(()), + IndexState::Index(t) => t.insert(), + IndexState::Update((t, rowid)) => t.update(rowid), + } +} + +#[allow(dead_code)] +pub(crate) fn model_state_future( + state: IndexState, +) -> Box> { + Box::new(result(model_state(state))) +} + +#[allow(dead_code)] +pub(crate) fn glue(item: &rss::Item, id: i32) -> Result> { + let e = NewEpisodeMinimal::new(item, id)?; + determine_episode_state(e, &item) +} + // Weird magic from #rust irc channel // kudos to remexre -fn collect_futures( +/// docs +pub fn collect_futures( futures: Vec, ) -> Box>, Error = Error>> where diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index bf875e1..c75f1a2 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -153,7 +153,7 @@ mod tests { use self::tempdir::TempDir; use super::*; - use database::{connection, truncate_db}; + use database::truncate_db; use models::new_episode::NewEpisodeBuilder; use std::fs::File; use std::io::Write; @@ -169,14 +169,12 @@ mod tests { writeln!(tmp_file, "Foooo").unwrap(); // Setup episodes - let db = connection(); - let con = db.get().unwrap(); let n1 = NewEpisodeBuilder::default() .title("foo_bar".to_string()) .podcast_id(0) .build() .unwrap() - .into_episode(&con) + .into_episode() .unwrap(); let n2 = NewEpisodeBuilder::default() @@ -184,15 +182,14 @@ mod tests { .podcast_id(1) .build() .unwrap() - .into_episode(&con) + .into_episode() .unwrap(); - let mut ep1 = dbqueries::get_episode_from_pk(&con, n1.title(), n1.podcast_id()).unwrap(); - let mut ep2 = dbqueries::get_episode_from_pk(&con, n2.title(), n2.podcast_id()).unwrap(); + let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.podcast_id()).unwrap(); + let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.podcast_id()).unwrap(); ep1.set_local_uri(Some(valid_path.to_str().unwrap())); ep2.set_local_uri(Some(bad_path.to_str().unwrap())); - drop(con); ep1.save().unwrap(); ep2.save().unwrap(); @@ -214,24 +211,15 @@ mod tests { let _tmp_dir = helper_db(); download_checker().unwrap(); - let episode = { - let db = connection(); - let con = db.get().unwrap(); - dbqueries::get_episode_from_pk(&con, "bar_baz", 1).unwrap() - }; + let episode = dbqueries::get_episode_from_pk("bar_baz", 1).unwrap(); assert!(episode.local_uri().is_none()); } #[test] fn test_download_cleaner() { let _tmp_dir = helper_db(); - let mut episode: EpisodeCleanerQuery = { - let db = connection(); - let con = db.get().unwrap(); - dbqueries::get_episode_from_pk(&con, "foo_bar", 0) - .unwrap() - .into() - }; + let mut episode: EpisodeCleanerQuery = + dbqueries::get_episode_from_pk("foo_bar", 0).unwrap().into(); let valid_path = episode.local_uri().unwrap().to_owned(); delete_local_content(&mut episode).unwrap(); @@ -241,11 +229,7 @@ mod tests { #[test] fn test_played_cleaner_expired() { let _tmp_dir = helper_db(); - let mut episode = { - let db = connection(); - let con = db.get().unwrap(); - dbqueries::get_episode_from_pk(&con, "foo_bar", 0).unwrap() - }; + let mut episode = dbqueries::get_episode_from_pk("foo_bar", 0).unwrap(); let now_utc = Utc::now().timestamp() as i32; // let limit = now_utc - 172_800; let epoch = now_utc - 200_000; @@ -261,11 +245,7 @@ mod tests { #[test] fn test_played_cleaner_none() { let _tmp_dir = helper_db(); - let mut episode = { - let db = connection(); - let con = db.get().unwrap(); - dbqueries::get_episode_from_pk(&con, "foo_bar", 0).unwrap() - }; + let mut episode = dbqueries::get_episode_from_pk("foo_bar", 0).unwrap(); let now_utc = Utc::now().timestamp() as i32; // limit = 172_800; let epoch = now_utc - 20_000; diff --git a/hammond-downloader/Cargo.toml b/hammond-downloader/Cargo.toml index ada57e7..641a4d5 100644 --- a/hammond-downloader/Cargo.toml +++ b/hammond-downloader/Cargo.toml @@ -15,3 +15,7 @@ glob = "0.2.11" [dependencies.hammond-data] path = "../hammond-data" + +[dev-dependencies] +tokio-core = "0.1.12" +hyper-tls = "0.1.2" diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 7acba68..821fbce 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -219,20 +219,28 @@ mod tests { use hammond_data::Source; use hammond_data::dbqueries; + use hyper::Client; + use hyper_tls::HttpsConnector; + use tokio_core::reactor::Core; + #[test] // This test inserts an rss feed to your `XDG_DATA/hammond/hammond.db` so we make it explicit // to run it. #[ignore] fn test_cache_image() { - let url = "http://www.newrustacean.com/feed.xml"; + let mut core = Core::new().unwrap(); + let client = Client::configure() + .connector(HttpsConnector::new(4, &core.handle()).unwrap()) + .build(&core.handle()); + let url = "http://www.newrustacean.com/feed.xml"; // Create and index a source - let mut source = Source::from_url(url).unwrap(); + let source = Source::from_url(url).unwrap(); // Copy it's id let sid = source.id(); - - // Convert Source it into a Feed and index it - let feed = source.into_feed(true).unwrap(); + // Convert Source it into a future Feed and index it + let future = source.into_feed(&client, true); + let feed = core.run(future).unwrap(); feed.index().unwrap(); // Get the Podcast diff --git a/hammond-downloader/src/lib.rs b/hammond-downloader/src/lib.rs index 197da0d..8612c72 100644 --- a/hammond-downloader/src/lib.rs +++ b/hammond-downloader/src/lib.rs @@ -13,3 +13,8 @@ extern crate tempdir; pub mod downloader; pub mod errors; + +#[cfg(test)] +extern crate hyper_tls; +#[cfg(test)] +extern crate tokio_core; diff --git a/hammond-gtk/Cargo.toml b/hammond-gtk/Cargo.toml index 0dc8267..78ac267 100644 --- a/hammond-gtk/Cargo.toml +++ b/hammond-gtk/Cargo.toml @@ -30,3 +30,8 @@ path = "../hammond-data" [dependencies.hammond-downloader] path = "../hammond-downloader" + +[dev-dependencies] +hyper-tls = "0.1.2" +tokio-core = "0.1.12" +hyper = "0.11.12" diff --git a/hammond-gtk/src/main.rs b/hammond-gtk/src/main.rs index 7f63acd..fdfd53a 100644 --- a/hammond-gtk/src/main.rs +++ b/hammond-gtk/src/main.rs @@ -21,6 +21,13 @@ extern crate regex; extern crate send_cell; // extern crate rayon; +#[cfg(test)] +extern crate hyper; +#[cfg(test)] +extern crate hyper_tls; +#[cfg(test)] +extern crate tokio_core; + // use rayon::prelude::*; use log::LogLevel; diff --git a/hammond-gtk/src/manager.rs b/hammond-gtk/src/manager.rs index 68b3132..476edd7 100644 --- a/hammond-gtk/src/manager.rs +++ b/hammond-gtk/src/manager.rs @@ -117,8 +117,11 @@ pub fn add(id: i32, directory: &str, sender: Sender) { mod tests { use super::*; + use hyper::Client; + use hyper_tls::HttpsConnector; + use tokio_core::reactor::Core; + use hammond_data::{Episode, Source}; - use hammond_data::database; use hammond_data::dbqueries; use hammond_data::utils::get_download_folder; @@ -133,25 +136,26 @@ mod tests { // THIS IS NOT A RELIABLE TEST // Just quick sanity check fn test_start_dl() { - let url = "http://www.newrustacean.com/feed.xml"; + let mut core = Core::new().unwrap(); + let client = Client::configure() + .connector(HttpsConnector::new(4, &core.handle()).unwrap()) + .build(&core.handle()); + let url = "http://www.newrustacean.com/feed.xml"; // Create and index a source - let mut source = Source::from_url(url).unwrap(); + let source = Source::from_url(url).unwrap(); // Copy it's id let sid = source.id(); - - // Convert Source it into a Feed and index it - let feed = source.into_feed(true).unwrap(); + // Convert Source it into a future Feed and index it + let future = source.into_feed(&client, true); + let feed = core.run(future).unwrap(); feed.index().unwrap(); // Get the Podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap(); // Get an episode - let episode: Episode = { - let con = database::connection(); - dbqueries::get_episode_from_pk(&*con.get().unwrap(), "e000: Hello, world!", pd.id()) - .unwrap() - }; + let episode: Episode = + dbqueries::get_episode_from_pk("e000: Hello, world!", pd.id()).unwrap(); let (sender, _rx) = channel(); diff --git a/hammond-gtk/src/utils.rs b/hammond-gtk/src/utils.rs index 0cccde1..5f11ce7 100644 --- a/hammond-gtk/src/utils.rs +++ b/hammond-gtk/src/utils.rs @@ -84,20 +84,28 @@ mod tests { use hammond_data::Source; use hammond_data::dbqueries; + use hyper::Client; + use hyper_tls::HttpsConnector; + use tokio_core::reactor::Core; + #[test] // This test inserts an rss feed to your `XDG_DATA/hammond/hammond.db` so we make it explicit // to run it. #[ignore] fn test_get_pixbuf_from_path() { - let url = "http://www.newrustacean.com/feed.xml"; + let mut core = Core::new().unwrap(); + let client = Client::configure() + .connector(HttpsConnector::new(4, &core.handle()).unwrap()) + .build(&core.handle()); + let url = "http://www.newrustacean.com/feed.xml"; // Create and index a source - let mut source = Source::from_url(url).unwrap(); + let source = Source::from_url(url).unwrap(); // Copy it's id let sid = source.id(); - - // Convert Source it into a Feed and index it - let feed = source.into_feed(true).unwrap(); + // Convert Source it into a future Feed and index it + let future = source.into_feed(&client, true); + let feed = core.run(future).unwrap(); feed.index().unwrap(); // Get the Podcast