diff --git a/src/dbqueries.rs b/src/dbqueries.rs index 5243488..45095e4 100644 --- a/src/dbqueries.rs +++ b/src/dbqueries.rs @@ -8,12 +8,27 @@ pub fn get_sources(con: &SqliteConnection) -> QueryResult> { s } -pub fn get_podcasts(con: &SqliteConnection, parent: &Source) -> QueryResult> { - let pds = Podcast::belonging_to(parent).load::(con); - // debug!("Returned Podcasts:\n{:?}", pds); +pub fn get_podcasts(con: &SqliteConnection) -> QueryResult> { + use schema::podcast::dsl::*; + + let pds = podcast.load::(con); pds } + +pub fn get_episodes(con: &SqliteConnection) -> QueryResult> { + use schema::episode::dsl::*; + + let eps = episode.load::(con); + eps +} + +pub fn get_podcast(con: &SqliteConnection, parent: &Source) -> QueryResult> { + let pd = Podcast::belonging_to(parent).load::(con); + // debug!("Returned Podcasts:\n{:?}", pds); + pd +} + pub fn get_pd_episodes(con: &SqliteConnection, parent: &Podcast) -> QueryResult> { let eps = Episode::belonging_to(parent).load::(con); eps diff --git a/src/index_feed.rs b/src/index_feed.rs index 8a0791e..c6e7399 100644 --- a/src/index_feed.rs +++ b/src/index_feed.rs @@ -108,8 +108,8 @@ fn complete_index_from_source( fn complete_index( mutex: Arc>, chan: rss::Channel, - parent: &Source -) -> Result<()>{ + parent: &Source, +) -> Result<()> { let tempdb = mutex.lock().unwrap(); let pd = index_channel(&tempdb, &chan, parent)?; drop(tempdb); @@ -206,9 +206,12 @@ fn refresh_source( mod tests { extern crate tempdir; use diesel::prelude::*; + use rss; - use std::io::stdout; + use std::io::{stdout, BufReader}; use std::path::PathBuf; + use std::fs; + use std::collections::HashMap; use super::*; @@ -241,7 +244,7 @@ mod tests { #[test] /// Insert feeds and update/index them. - fn foo() { + fn test_index_loop() { let TempDB(_tmp_dir, db_path, db) = get_temp_db(); let inpt = vec![ @@ -263,9 +266,57 @@ mod tests { index_loop(db).unwrap(); } - // #[test] - // fn baz(){ - // let TempDB(tmp_dir, db_path, db) = get_temp_db(); + #[test] + fn test_complete_index() { + let TempDB(_tmp_dir, _db_path, db) = get_temp_db(); + // complete_index runs in parallel so it requires a mutex as argument. + let m = Arc::new(Mutex::new(db)); - // } + // map the filenames to their feeds url. + let mut urls = HashMap::new(); + urls.insert( + "Intercepted.xml", + "https://feeds.feedburner.com/InterceptedWithJeremyScahill", + ); + urls.insert( + "LinuxUnplugged.xml", + "http://feeds.feedburner.com/linuxunplugged", + ); + urls.insert( + "TheBreakthrough.xml", + "http://feeds.feedburner.com/propublica/podcast", + ); + urls.insert( + "R4Explanation.xml", + "https://request-for-explanation.github.io/podcast/rss.xml", + ); + + let feeds_path = fs::read_dir("./tests/feeds/").unwrap(); + // feeds_path.for_each(|x| println!("{}", x.unwrap().path().display())); + + feeds_path.for_each(|x| { + let x = x.unwrap(); + let name = x.file_name(); + let url = urls.get(name.to_str().unwrap()); + + let tempdb = m.lock().unwrap(); + // Create and insert a Source into db + let s = insert_return_source(&tempdb, url.unwrap()).unwrap(); + drop(tempdb); + + // open the xml file + let feed = fs::File::open(x.path()).unwrap(); + // parse it into a channel + let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); + + // Index the channel + complete_index(m.clone(), chan, &s).unwrap(); + }); + + // Assert the index rows equal the controlled results + let tempdb = m.lock().unwrap(); + assert_eq!(dbqueries::get_sources(&tempdb).unwrap().len(), 4); + assert_eq!(dbqueries::get_podcasts(&tempdb).unwrap().len(), 4); + assert_eq!(dbqueries::get_episodes(&tempdb).unwrap().len(), 274); + } }