diff --git a/hammond-data/src/database.rs b/hammond-data/src/database.rs index d164f94..25d7b60 100644 --- a/hammond-data/src/database.rs +++ b/hammond-data/src/database.rs @@ -10,7 +10,7 @@ use std::time::Duration; use errors::*; #[cfg(not(test))] -use xdg_; +use xdg_dirs; type Pool = Arc>>; @@ -22,7 +22,7 @@ lazy_static!{ #[cfg(not(test))] lazy_static! { - static ref DB_PATH: PathBuf = xdg_::HAMMOND_XDG.place_data_file("hammond.db").unwrap(); + static ref DB_PATH: PathBuf = xdg_dirs::HAMMOND_XDG.place_data_file("hammond.db").unwrap(); } #[cfg(test)] diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index e853ed1..5fd20c6 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -169,6 +169,25 @@ mod tests { index_all().unwrap(); } + #[test] + /// Insert feeds and update/index them. + fn test_fetch_loop() { + truncate_db().unwrap(); + let inpt = vec![ + "https://request-for-explanation.github.io/podcast/rss.xml", + "https://feeds.feedburner.com/InterceptedWithJeremyScahill", + "http://feeds.propublica.org/propublica/podcast", + "http://feeds.feedburner.com/linuxunplugged", + ]; + + inpt.iter().for_each(|url| { + // Index the urls into the source table. + Source::from_url(url).unwrap(); + }); + + fetch_all().unwrap(); + } + #[test] fn test_complete_index() { // vec of (path, url) tuples. diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index bf6374f..9679c98 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -36,7 +36,7 @@ mod schema; pub use models::queryables::{Episode, Podcast, Source}; -pub mod xdg_ { +pub mod xdg_dirs { use std::path::PathBuf; use xdg; diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index 2e151ce..7a0ddef 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -69,6 +69,9 @@ pub(crate) struct NewPodcast { link: String, description: String, image_uri: Option, + favorite: bool, + archive: bool, + always_dl: bool, source_id: i32, } @@ -127,9 +130,13 @@ pub(crate) struct NewPodcastBuilder { link: String, description: String, image_uri: Option, + favorite: bool, + archive: bool, + always_dl: bool, source_id: i32, } +#[allow(dead_code)] impl NewPodcastBuilder { pub(crate) fn new() -> NewPodcastBuilder { NewPodcastBuilder::default() @@ -160,12 +167,30 @@ impl NewPodcastBuilder { self } + pub(crate) fn favorite(mut self, s: bool) -> NewPodcastBuilder { + self.favorite = s; + self + } + + pub(crate) fn archive(mut self, s: bool) -> NewPodcastBuilder { + self.archive = s; + self + } + + pub(crate) fn always_dl(mut self, s: bool) -> NewPodcastBuilder { + self.always_dl = s; + self + } + pub(crate) fn build(self) -> NewPodcast { NewPodcast { title: self.title, link: self.link, description: self.description, image_uri: self.image_uri, + favorite: self.favorite, + archive: self.archive, + always_dl: self.always_dl, source_id: self.source_id, } } @@ -201,11 +226,15 @@ impl NewPodcast { pub(crate) struct NewEpisode { title: Option, uri: String, + local_uri: Option, description: Option, published_date: Option, length: Option, guid: Option, epoch: i32, + played: Option, + favorite: bool, + archive: bool, podcast_id: i32, } @@ -262,14 +291,19 @@ impl NewEpisode { pub(crate) struct NewEpisodeBuilder { title: Option, uri: String, + local_uri: Option, description: Option, published_date: Option, length: Option, guid: Option, epoch: i32, + played: Option, + favorite: bool, + archive: bool, podcast_id: i32, } +#[allow(dead_code)] impl NewEpisodeBuilder { pub(crate) fn new() -> NewEpisodeBuilder { NewEpisodeBuilder::default() @@ -285,6 +319,11 @@ impl NewEpisodeBuilder { self } + pub(crate) fn local_uri(mut self, s: Option) -> NewEpisodeBuilder { + self.local_uri = s; + self + } + pub(crate) fn description(mut self, s: Option) -> NewEpisodeBuilder { self.description = s; self @@ -300,6 +339,11 @@ impl NewEpisodeBuilder { self } + pub(crate) fn played(mut self, s: Option) -> NewEpisodeBuilder { + self.played = s; + self + } + pub(crate) fn guid(mut self, s: Option) -> NewEpisodeBuilder { self.guid = s; self @@ -315,15 +359,29 @@ impl NewEpisodeBuilder { self } + pub(crate) fn favorite(mut self, s: bool) -> NewEpisodeBuilder { + self.favorite = s; + self + } + + pub(crate) fn archive(mut self, s: bool) -> NewEpisodeBuilder { + self.archive = s; + self + } + pub(crate) fn build(self) -> NewEpisode { NewEpisode { title: self.title, uri: self.uri, + local_uri: self.local_uri, description: self.description, published_date: self.published_date, length: self.length, guid: self.guid, epoch: self.epoch, + played: self.played, + favorite: self.favorite, + archive: self.archive, podcast_id: self.podcast_id, } } diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index 859c27e..41161b0 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -10,24 +10,27 @@ use models::queryables::Episode; use std::path::Path; use std::fs; -// TODO: Write unit test. fn download_checker() -> Result<()> { let episodes = dbqueries::get_downloaded_episodes()?; - episodes.into_par_iter().for_each(|mut ep| { - if !Path::new(ep.local_uri().unwrap()).exists() { - ep.set_local_uri(None); - let res = ep.save(); - if let Err(err) = res { - error!("Error while trying to update episode: {:#?}", ep); - error!("Error: {}", err); - }; - } - }); + episodes + .into_par_iter() + .for_each(|mut ep| checker_helper(&mut ep)); Ok(()) } +fn checker_helper(ep: &mut Episode) { + if !Path::new(ep.local_uri().unwrap()).exists() { + ep.set_local_uri(None); + let res = ep.save(); + if let Err(err) = res { + error!("Error while trying to update episode: {:#?}", ep); + error!("Error: {}", err); + }; + } +} + // TODO: Write unit test. fn played_cleaner() -> Result<()> { let episodes = dbqueries::get_played_episodes()?; @@ -37,6 +40,7 @@ fn played_cleaner() -> Result<()> { if ep.local_uri().is_some() && ep.played().is_some() { let played = ep.played().unwrap(); // TODO: expose a config and a user set option. + // Chnage the test too when exposed let limit = played + 172_800; // add 2days in seconds if now_utc > limit { let e = delete_local_content(&mut ep); @@ -52,7 +56,6 @@ fn played_cleaner() -> Result<()> { Ok(()) } -// TODO: Write unit test. pub fn delete_local_content(ep: &mut Episode) -> Result<()> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); @@ -90,3 +93,132 @@ pub fn url_cleaner(s: &str) -> String { _ => s.trim().to_owned(), } } + +#[cfg(test)] +mod tests { + extern crate tempdir; + + use super::*; + use database::{connection, truncate_db}; + use models::insertables::NewEpisodeBuilder; + use self::tempdir::TempDir; + use std::fs::File; + use std::io::Write; + + fn helper_db() -> TempDir { + // Clean the db + truncate_db().unwrap(); + // Setup tmp file stuff + let tmp_dir = TempDir::new("hammond_test").unwrap(); + let valid_path = tmp_dir.path().join("virtual_dl.mp3"); + let bad_path = tmp_dir.path().join("invalid_thing.mp3"); + let mut tmp_file = File::create(&valid_path).unwrap(); + writeln!(tmp_file, "Foooo").unwrap(); + + // Setup episodes + let db = connection(); + let con = db.get().unwrap(); + NewEpisodeBuilder::new() + .uri("foo_bar".to_string()) + .local_uri(Some(valid_path.to_str().unwrap().to_owned())) + .build() + .into_episode(&con) + .unwrap(); + + NewEpisodeBuilder::new() + .uri("bar_baz".to_string()) + .local_uri(Some(bad_path.to_str().unwrap().to_owned())) + .build() + .into_episode(&con) + .unwrap(); + + tmp_dir + } + + #[test] + fn test_download_checker() { + let _tmp_dir = helper_db(); + download_checker().unwrap(); + let episodes = dbqueries::get_downloaded_episodes().unwrap(); + + assert_eq!(episodes.len(), 1); + assert_eq!("foo_bar", episodes.first().unwrap().uri()); + } + + #[test] + fn test_checker_helper() { + let _tmp_dir = helper_db(); + let mut episode = { + let db = connection(); + let con = db.get().unwrap(); + dbqueries::get_episode_from_uri(&con, "bar_baz").unwrap() + }; + + checker_helper(&mut episode); + assert!(episode.local_uri().is_none()); + } + + #[test] + fn test_download_cleaner() { + let _tmp_dir = helper_db(); + let mut episode = { + let db = connection(); + let con = db.get().unwrap(); + dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap() + }; + + let valid_path = episode.local_uri().unwrap().to_owned(); + delete_local_content(&mut episode).unwrap(); + assert_eq!(Path::new(&valid_path).exists(), false); + } + + #[test] + fn test_played_cleaner_expired() { + let _tmp_dir = helper_db(); + let mut episode = { + let db = connection(); + let con = db.get().unwrap(); + dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap() + }; + let now_utc = Utc::now().timestamp() as i32; + // let limit = now_utc - 172_800; + let epoch = now_utc - 200_000; + episode.set_played(Some(epoch)); + episode.save().unwrap(); + let valid_path = episode.local_uri().unwrap().to_owned(); + + // This should delete the file + played_cleaner().unwrap(); + assert_eq!(Path::new(&valid_path).exists(), false); + } + + #[test] + fn test_played_cleaner_none() { + let _tmp_dir = helper_db(); + let mut episode = { + let db = connection(); + let con = db.get().unwrap(); + dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap() + }; + let now_utc = Utc::now().timestamp() as i32; + // limit = 172_800; + let epoch = now_utc - 20_000; + episode.set_played(Some(epoch)); + episode.save().unwrap(); + let valid_path = episode.local_uri().unwrap().to_owned(); + + // This should not delete the file + played_cleaner().unwrap(); + assert_eq!(Path::new(&valid_path).exists(), true); + } + + #[test] + fn test_url_cleaner() { + let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3"; + let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184"; + + assert_eq!(url_cleaner(bad_url), good_url); + assert_eq!(url_cleaner(good_url), good_url); + assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url); + } +} diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 84865eb..164599e 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -9,7 +9,7 @@ use std::path::Path; use errors::*; use hammond_data::{Episode, Podcast}; -use hammond_data::xdg_::{DL_DIR, HAMMOND_CACHE}; +use hammond_data::xdg_dirs::{DL_DIR, HAMMOND_CACHE}; // TODO: Replace path that are of type &str with std::path. // TODO: Have a convention/document absolute/relative paths, if they should end with / or not.