Refactor the tests to use ? operator
As of ructc 1.27 #[test] can return errors. This improves a great deal the ergonomics.
This commit is contained in:
parent
7bbd9a1a4f
commit
89b99614a0
@ -66,7 +66,7 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
|
|||||||
|
|
||||||
/// Reset the database into a clean state.
|
/// Reset the database into a clean state.
|
||||||
// Test share a Temp file db.
|
// Test share a Temp file db.
|
||||||
#[allow(dead_code)]
|
#[cfg(test)]
|
||||||
pub fn truncate_db() -> Result<(), DataError> {
|
pub fn truncate_db() -> Result<(), DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|||||||
@ -448,24 +448,26 @@ pub fn update_none_to_played_now(parent: &Show) -> Result<usize, DataError> {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use database::*;
|
use database::*;
|
||||||
|
use failure::Error;
|
||||||
use pipeline;
|
use pipeline;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_update_none_to_played_now() {
|
fn test_update_none_to_played_now() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||||
com/InterceptedWithJeremyScahill";
|
com/InterceptedWithJeremyScahill";
|
||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url)?;
|
||||||
let id = source.id();
|
let id = source.id();
|
||||||
pipeline::run(vec![source]).unwrap();
|
pipeline::run(vec![source])?;
|
||||||
let pd = get_podcast_from_source_id(id).unwrap();
|
let pd = get_podcast_from_source_id(id)?;
|
||||||
|
|
||||||
let eps_num = get_pd_unplayed_episodes(&pd).unwrap().len();
|
let eps_num = get_pd_unplayed_episodes(&pd)?.len();
|
||||||
assert_ne!(eps_num, 0);
|
assert_ne!(eps_num, 0);
|
||||||
|
|
||||||
update_none_to_played_now(&pd).unwrap();
|
update_none_to_played_now(&pd)?;
|
||||||
let eps_num2 = get_pd_unplayed_episodes(&pd).unwrap().len();
|
let eps_num2 = get_pd_unplayed_episodes(&pd)?.len();
|
||||||
assert_eq!(eps_num2, 0);
|
assert_eq!(eps_num2, 0);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -126,6 +126,7 @@ fn batch_insert_episodes(episodes: &[NewEpisode]) {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use failure::Error;
|
||||||
use rss::Channel;
|
use rss::Channel;
|
||||||
use tokio_core::reactor::Core;
|
use tokio_core::reactor::Core;
|
||||||
|
|
||||||
@ -170,8 +171,8 @@ mod tests {
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_complete_index() {
|
fn test_complete_index() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let feeds: Vec<_> = URLS
|
let feeds: Vec<_> = URLS
|
||||||
.iter()
|
.iter()
|
||||||
@ -181,41 +182,44 @@ mod tests {
|
|||||||
get_feed(path, s.id())
|
get_feed(path, s.id())
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
||||||
let mut core = Core::new().unwrap();
|
let mut core = Core::new()?;
|
||||||
// Index the channels
|
// Index the channes
|
||||||
let list: Vec<_> = feeds.into_iter().map(|x| x.index()).collect();
|
let list: Vec<_> = feeds.into_iter().map(|x| x.index()).collect();
|
||||||
let _foo = core.run(join_all(list));
|
let _foo = core.run(join_all(list));
|
||||||
|
|
||||||
// Assert the index rows equal the controlled results
|
// Assert the index rows equal the controlled results
|
||||||
assert_eq!(dbqueries::get_sources().unwrap().len(), 5);
|
assert_eq!(dbqueries::get_sources()?.len(), 5);
|
||||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 5);
|
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
|
||||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 354);
|
assert_eq!(dbqueries::get_episodes()?.len(), 354);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_feed_parse_podcast() {
|
fn test_feed_parse_podcast() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||||
let feed = get_feed(path, 42);
|
let feed = get_feed(path, 42);
|
||||||
|
|
||||||
let file = fs::File::open(path).unwrap();
|
let file = fs::File::open(path)?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(feed.parse_podcast(), pd);
|
assert_eq!(feed.parse_podcast(), pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_feed_index_channel_items() {
|
fn test_feed_index_channel_items() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||||
let feed = get_feed(path, 42);
|
let feed = get_feed(path, 42);
|
||||||
let pd = feed.parse_podcast().to_podcast().unwrap();
|
let pd = feed.parse_podcast().to_podcast()?;
|
||||||
|
|
||||||
feed.index_channel_items(pd).wait().unwrap();
|
feed.index_channel_items(pd).wait()?;
|
||||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 1);
|
assert_eq!(dbqueries::get_podcasts()?.len(), 1);
|
||||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 43);
|
assert_eq!(dbqueries::get_episodes()?.len(), 43);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -312,6 +312,7 @@ impl NewEpisodeMinimal {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use database::truncate_db;
|
use database::truncate_db;
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
|
use failure::Error;
|
||||||
use models::new_episode::{NewEpisodeMinimal, NewEpisodeMinimalBuilder};
|
use models::new_episode::{NewEpisodeMinimal, NewEpisodeMinimalBuilder};
|
||||||
use models::*;
|
use models::*;
|
||||||
|
|
||||||
@ -477,68 +478,72 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_minimal_intercepted() {
|
fn test_new_episode_minimal_intercepted() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(14).unwrap();
|
let episode = channel.items().iter().nth(14).unwrap();
|
||||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_1);
|
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_1);
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(15).unwrap();
|
let episode = channel.items().iter().nth(15).unwrap();
|
||||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_2);
|
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_2);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_intercepted() {
|
fn test_new_episode_intercepted() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(14).unwrap();
|
let episode = channel.items().iter().nth(14).unwrap();
|
||||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
let ep = NewEpisode::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
|
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(15).unwrap();
|
let episode = channel.items().iter().nth(15).unwrap();
|
||||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
let ep = NewEpisode::new(&episode, 42)?;
|
||||||
|
|
||||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_minimal_lup() {
|
fn test_new_episode_minimal_lup() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(18).unwrap();
|
let episode = channel.items().iter().nth(18).unwrap();
|
||||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_1);
|
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_1);
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(19).unwrap();
|
let episode = channel.items().iter().nth(19).unwrap();
|
||||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_2);
|
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_2);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_lup() {
|
fn test_new_episode_lup() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(18).unwrap();
|
let episode = channel.items().iter().nth(18).unwrap();
|
||||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
let ep = NewEpisode::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_LUP_1);
|
assert_eq!(ep, *EXPECTED_LUP_1);
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(19).unwrap();
|
let episode = channel.items().iter().nth(19).unwrap();
|
||||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
let ep = NewEpisode::new(&episode, 42)?;
|
||||||
assert_eq!(ep, *EXPECTED_LUP_2);
|
assert_eq!(ep, *EXPECTED_LUP_2);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_minimal_into_new_episode() {
|
fn test_minimal_into_new_episode() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let item = channel.items().iter().nth(14).unwrap();
|
let item = channel.items().iter().nth(14).unwrap();
|
||||||
let ep = EXPECTED_MINIMAL_INTERCEPTED_1
|
let ep = EXPECTED_MINIMAL_INTERCEPTED_1
|
||||||
@ -555,42 +560,44 @@ mod tests {
|
|||||||
.clone()
|
.clone()
|
||||||
.into_new_episode(&item);
|
.into_new_episode(&item);
|
||||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_insert() {
|
fn test_new_episode_insert() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(14).unwrap();
|
let episode = channel.items().iter().nth(14).unwrap();
|
||||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
let new_ep = NewEpisode::new(&episode, 42)?;
|
||||||
new_ep.insert().unwrap();
|
new_ep.insert()?;
|
||||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
|
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
|
||||||
|
|
||||||
assert_eq!(new_ep, ep);
|
assert_eq!(new_ep, ep);
|
||||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
||||||
assert_eq!(&*EXPECTED_INTERCEPTED_1, &ep);
|
assert_eq!(&*EXPECTED_INTERCEPTED_1, &ep);
|
||||||
|
|
||||||
let episode = channel.items().iter().nth(15).unwrap();
|
let episode = channel.items().iter().nth(15).unwrap();
|
||||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
let new_ep = NewEpisode::new(&episode, 42)?;
|
||||||
new_ep.insert().unwrap();
|
new_ep.insert()?;
|
||||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
|
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
|
||||||
|
|
||||||
assert_eq!(new_ep, ep);
|
assert_eq!(new_ep, ep);
|
||||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
||||||
assert_eq!(&*EXPECTED_INTERCEPTED_2, &ep);
|
assert_eq!(&*EXPECTED_INTERCEPTED_2, &ep);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_update() {
|
fn test_new_episode_update() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let old = EXPECTED_INTERCEPTED_1.clone().to_episode().unwrap();
|
let old = EXPECTED_INTERCEPTED_1.clone().to_episode()?;
|
||||||
|
|
||||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||||
updated.update(old.rowid()).unwrap();
|
updated.update(old.rowid())?;
|
||||||
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id()).unwrap();
|
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id())?;
|
||||||
|
|
||||||
// Assert that updating does not change the rowid and show_id
|
// Assert that updating does not change the rowid and show_id
|
||||||
assert_ne!(old, new);
|
assert_ne!(old, new);
|
||||||
@ -599,11 +606,12 @@ mod tests {
|
|||||||
|
|
||||||
assert_eq!(updated, &new);
|
assert_eq!(updated, &new);
|
||||||
assert_ne!(updated, &old);
|
assert_ne!(updated, &old);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_index() {
|
fn test_new_episode_index() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||||
|
|
||||||
// First insert
|
// First insert
|
||||||
@ -611,7 +619,7 @@ mod tests {
|
|||||||
// Second identical, This should take the early return path
|
// Second identical, This should take the early return path
|
||||||
assert!(expected.index().is_ok());
|
assert!(expected.index().is_ok());
|
||||||
// Get the episode
|
// Get the episode
|
||||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||||
// Assert that NewPodcast is equal to the Indexed one
|
// Assert that NewPodcast is equal to the Indexed one
|
||||||
assert_eq!(*expected, old);
|
assert_eq!(*expected, old);
|
||||||
|
|
||||||
@ -620,31 +628,33 @@ mod tests {
|
|||||||
// Update the podcast
|
// Update the podcast
|
||||||
assert!(updated.index().is_ok());
|
assert!(updated.index().is_ok());
|
||||||
// Get the new Podcast
|
// Get the new Podcast
|
||||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||||
// Assert it's diff from the old one.
|
// Assert it's diff from the old one.
|
||||||
assert_ne!(new, old);
|
assert_ne!(new, old);
|
||||||
assert_eq!(*updated, new);
|
assert_eq!(*updated, new);
|
||||||
assert_eq!(new.rowid(), old.rowid());
|
assert_eq!(new.rowid(), old.rowid());
|
||||||
assert_eq!(new.show_id(), old.show_id());
|
assert_eq!(new.show_id(), old.show_id());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_to_episode() {
|
fn test_new_episode_to_episode() -> Result<(), Error> {
|
||||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||||
|
|
||||||
// Assert insert() produces the same result that you would get with to_podcast()
|
// Assert insert() produces the same result that you would get with to_podcast()
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
expected.insert().unwrap();
|
expected.insert()?;
|
||||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||||
let ep = expected.to_episode().unwrap();
|
let ep = expected.to_episode()?;
|
||||||
assert_eq!(old, ep);
|
assert_eq!(old, ep);
|
||||||
|
|
||||||
// Same as above, diff order
|
// Same as above, diff order
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let ep = expected.to_episode().unwrap();
|
let ep = expected.to_episode()?;
|
||||||
// This should error as a unique constrain violation
|
// This should error as a unique constrain violation
|
||||||
assert!(expected.insert().is_err());
|
assert!(expected.insert().is_err());
|
||||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||||
assert_eq!(old, ep);
|
assert_eq!(old, ep);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -157,6 +157,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
// use tokio_core::reactor::Core;
|
// use tokio_core::reactor::Core;
|
||||||
|
|
||||||
|
use failure::Error;
|
||||||
use rss::Channel;
|
use rss::Channel;
|
||||||
|
|
||||||
use database::truncate_db;
|
use database::truncate_db;
|
||||||
@ -286,73 +287,80 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_intercepted() {
|
fn test_new_podcast_intercepted() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_lup() {
|
fn test_new_podcast_lup() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_LUP, pd);
|
assert_eq!(*EXPECTED_LUP, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_thetipoff() {
|
fn test_new_podcast_thetipoff() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_TIPOFF, pd);
|
assert_eq!(*EXPECTED_TIPOFF, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_steal_the_stars() {
|
fn test_new_podcast_steal_the_stars() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_STARS, pd);
|
assert_eq!(*EXPECTED_STARS, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_greater_than_code() {
|
fn test_new_podcast_greater_than_code() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_CODE, pd);
|
assert_eq!(*EXPECTED_CODE, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_ellinofreneia() {
|
fn test_new_podcast_ellinofreneia() -> Result<(), Error> {
|
||||||
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml").unwrap();
|
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let pd = NewShow::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
|
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
// This maybe could be a doc test on insert.
|
// This maybe could be a doc test on insert.
|
||||||
fn test_new_podcast_insert() {
|
fn test_new_podcast_insert() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file))?;
|
||||||
|
|
||||||
let npd = NewShow::new(&channel, 42);
|
let npd = NewShow::new(&channel, 42);
|
||||||
npd.insert().unwrap();
|
npd.insert()?;
|
||||||
let pd = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let pd = dbqueries::get_podcast_from_source_id(42)?;
|
||||||
|
|
||||||
assert_eq!(npd, pd);
|
assert_eq!(npd, pd);
|
||||||
assert_eq!(*EXPECTED_INTERCEPTED, npd);
|
assert_eq!(*EXPECTED_INTERCEPTED, npd);
|
||||||
assert_eq!(&*EXPECTED_INTERCEPTED, &pd);
|
assert_eq!(&*EXPECTED_INTERCEPTED, &pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -360,31 +368,32 @@ mod tests {
|
|||||||
// Currently there's a test that only checks new description or title.
|
// Currently there's a test that only checks new description or title.
|
||||||
// If you have time and want to help, implement the test for the other fields
|
// If you have time and want to help, implement the test for the other fields
|
||||||
// too.
|
// too.
|
||||||
fn test_new_podcast_update() {
|
fn test_new_podcast_update() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let old = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
let old = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||||
|
|
||||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||||
updated.update(old.id()).unwrap();
|
updated.update(old.id())?;
|
||||||
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let new = dbqueries::get_podcast_from_source_id(42)?;
|
||||||
|
|
||||||
assert_ne!(old, new);
|
assert_ne!(old, new);
|
||||||
assert_eq!(old.id(), new.id());
|
assert_eq!(old.id(), new.id());
|
||||||
assert_eq!(old.source_id(), new.source_id());
|
assert_eq!(old.source_id(), new.source_id());
|
||||||
assert_eq!(updated, &new);
|
assert_eq!(updated, &new);
|
||||||
assert_ne!(updated, &old);
|
assert_ne!(updated, &old);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_podcast_index() {
|
fn test_new_podcast_index() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
// First insert
|
// First insert
|
||||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||||
// Second identical, This should take the early return path
|
// Second identical, This should take the early return path
|
||||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||||
// Get the podcast
|
// Get the podcast
|
||||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||||
// Assert that NewShow is equal to the Indexed one
|
// Assert that NewShow is equal to the Indexed one
|
||||||
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
||||||
|
|
||||||
@ -393,28 +402,30 @@ mod tests {
|
|||||||
// Update the podcast
|
// Update the podcast
|
||||||
assert!(updated.index().is_ok());
|
assert!(updated.index().is_ok());
|
||||||
// Get the new Show
|
// Get the new Show
|
||||||
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let new = dbqueries::get_podcast_from_source_id(42)?;
|
||||||
// Assert it's diff from the old one.
|
// Assert it's diff from the old one.
|
||||||
assert_ne!(new, old);
|
assert_ne!(new, old);
|
||||||
assert_eq!(new.id(), old.id());
|
assert_eq!(new.id(), old.id());
|
||||||
assert_eq!(new.source_id(), old.source_id());
|
assert_eq!(new.source_id(), old.source_id());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_to_podcast() {
|
fn test_to_podcast() -> Result<(), Error> {
|
||||||
// Assert insert() produces the same result that you would get with to_podcast()
|
// Assert insert() produces the same result that you would get with to_podcast()
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
EXPECTED_INTERCEPTED.insert().unwrap();
|
EXPECTED_INTERCEPTED.insert()?;
|
||||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||||
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||||
assert_eq!(old, pd);
|
assert_eq!(old, pd);
|
||||||
|
|
||||||
// Same as above, diff order
|
// Same as above, diff order
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||||
// This should error as a unique constrain violation
|
// This should error as a unique constrain violation
|
||||||
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
||||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||||
assert_eq!(old, pd);
|
assert_eq!(old, pd);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -275,29 +275,31 @@ fn response_to_channel(res: Response) -> impl Future<Item = Channel, Error = Dat
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use failure::Error;
|
||||||
use tokio_core::reactor::Core;
|
use tokio_core::reactor::Core;
|
||||||
|
|
||||||
use database::truncate_db;
|
use database::truncate_db;
|
||||||
use utils::get_feed;
|
use utils::get_feed;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_into_feed() {
|
fn test_into_feed() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
|
|
||||||
let mut core = Core::new().unwrap();
|
let mut core = Core::new()?;
|
||||||
let client = Client::configure()
|
let client = Client::configure()
|
||||||
.connector(HttpsConnector::new(4, &core.handle()).unwrap())
|
.connector(HttpsConnector::new(4, &core.handle())?)
|
||||||
.build(&core.handle());
|
.build(&core.handle());
|
||||||
|
|
||||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||||
com/InterceptedWithJeremyScahill";
|
com/InterceptedWithJeremyScahill";
|
||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url)?;
|
||||||
let id = source.id();
|
let id = source.id();
|
||||||
|
|
||||||
let feed = source.into_feed(client);
|
let feed = source.into_feed(client);
|
||||||
let feed = core.run(feed).unwrap();
|
let feed = core.run(feed)?;
|
||||||
|
|
||||||
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
||||||
assert_eq!(expected, feed);
|
assert_eq!(expected, feed);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -100,9 +100,10 @@ pub fn extract_sources<R: Read>(reader: R) -> Result<HashSet<Opml>, reader::Erro
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use chrono::Local;
|
use chrono::Local;
|
||||||
|
use failure::Error;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_extract() {
|
fn test_extract() -> Result<(), Error> {
|
||||||
let int_title = String::from("Intercepted with Jeremy Scahill");
|
let int_title = String::from("Intercepted with Jeremy Scahill");
|
||||||
let int_url = String::from("https://feeds.feedburner.com/InterceptedWithJeremyScahill");
|
let int_url = String::from("https://feeds.feedburner.com/InterceptedWithJeremyScahill");
|
||||||
let int_desc =
|
let int_desc =
|
||||||
@ -160,6 +161,7 @@ mod tests {
|
|||||||
url: dec_url
|
url: dec_url
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
assert_eq!(extract_sources(sample1.as_bytes()).unwrap(), map);
|
assert_eq!(extract_sources(sample1.as_bytes())?, map);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -85,6 +85,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
use database::truncate_db;
|
use database::truncate_db;
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
|
use failure::Error;
|
||||||
use Source;
|
use Source;
|
||||||
|
|
||||||
// (path, url) tuples.
|
// (path, url) tuples.
|
||||||
@ -100,28 +101,29 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
/// Insert feeds and update/index them.
|
/// Insert feeds and update/index them.
|
||||||
fn test_pipeline() {
|
fn test_pipeline() -> Result<(), Error> {
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
let bad_url = "https://gitlab.gnome.org/World/podcasts.atom";
|
let bad_url = "https://gitlab.gnome.org/World/podcasts.atom";
|
||||||
// if a stream returns error/None it stops
|
// if a stream returns error/None it stops
|
||||||
// bad we want to parse all feeds regardless if one fails
|
// bad we want to parse all feeds regardless if one fails
|
||||||
Source::from_url(bad_url).unwrap();
|
Source::from_url(bad_url)?;
|
||||||
|
|
||||||
URLS.iter().for_each(|url| {
|
URLS.iter().for_each(|url| {
|
||||||
// Index the urls into the source table.
|
// Index the urls into the source table.
|
||||||
Source::from_url(url).unwrap();
|
Source::from_url(url).unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
let sources = dbqueries::get_sources().unwrap();
|
let sources = dbqueries::get_sources()?;
|
||||||
run(sources).unwrap();
|
run(sources)?;
|
||||||
|
|
||||||
let sources = dbqueries::get_sources().unwrap();
|
let sources = dbqueries::get_sources()?;
|
||||||
// Run again to cover Unique constrains erros.
|
// Run again to cover Unique constrains erros.
|
||||||
run(sources).unwrap();
|
run(sources)?;
|
||||||
|
|
||||||
// Assert the index rows equal the controlled results
|
// Assert the index rows equal the controlled results
|
||||||
assert_eq!(dbqueries::get_sources().unwrap().len(), 6);
|
assert_eq!(dbqueries::get_sources()?.len(), 6);
|
||||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 5);
|
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
|
||||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 354);
|
assert_eq!(dbqueries::get_episodes()?.len(), 354);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -162,6 +162,7 @@ mod tests {
|
|||||||
use self::tempdir::TempDir;
|
use self::tempdir::TempDir;
|
||||||
use super::*;
|
use super::*;
|
||||||
use chrono::Duration;
|
use chrono::Duration;
|
||||||
|
use failure::Error;
|
||||||
|
|
||||||
use database::truncate_db;
|
use database::truncate_db;
|
||||||
use models::NewEpisodeBuilder;
|
use models::NewEpisodeBuilder;
|
||||||
@ -169,15 +170,15 @@ mod tests {
|
|||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
fn helper_db() -> TempDir {
|
fn helper_db() -> Result<TempDir, Error> {
|
||||||
// Clean the db
|
// Clean the db
|
||||||
truncate_db().unwrap();
|
truncate_db()?;
|
||||||
// Setup tmp file stuff
|
// Setup tmp file stuff
|
||||||
let tmp_dir = TempDir::new("podcasts_test").unwrap();
|
let tmp_dir = TempDir::new("podcasts_test")?;
|
||||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||||
let bad_path = tmp_dir.path().join("invalid_thing.mp3");
|
let bad_path = tmp_dir.path().join("invalid_thing.mp3");
|
||||||
let mut tmp_file = File::create(&valid_path).unwrap();
|
let mut tmp_file = File::create(&valid_path)?;
|
||||||
writeln!(tmp_file, "Foooo").unwrap();
|
writeln!(tmp_file, "Foooo")?;
|
||||||
|
|
||||||
// Setup episodes
|
// Setup episodes
|
||||||
let n1 = NewEpisodeBuilder::default()
|
let n1 = NewEpisodeBuilder::default()
|
||||||
@ -185,33 +186,31 @@ mod tests {
|
|||||||
.show_id(0)
|
.show_id(0)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_episode()
|
.to_episode()?;
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let n2 = NewEpisodeBuilder::default()
|
let n2 = NewEpisodeBuilder::default()
|
||||||
.title("bar_baz".to_string())
|
.title("bar_baz".to_string())
|
||||||
.show_id(1)
|
.show_id(1)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_episode()
|
.to_episode()?;
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let mut ep1 = dbqueries::get_episode_cleaner_from_pk(n1.title(), n1.show_id()).unwrap();
|
let mut ep1 = dbqueries::get_episode_cleaner_from_pk(n1.title(), n1.show_id())?;
|
||||||
let mut ep2 = dbqueries::get_episode_cleaner_from_pk(n2.title(), n2.show_id()).unwrap();
|
let mut ep2 = dbqueries::get_episode_cleaner_from_pk(n2.title(), n2.show_id())?;
|
||||||
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
||||||
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
||||||
|
|
||||||
ep1.save().unwrap();
|
ep1.save()?;
|
||||||
ep2.save().unwrap();
|
ep2.save()?;
|
||||||
|
|
||||||
tmp_dir
|
Ok(tmp_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_download_checker() {
|
fn test_download_checker() -> Result<(), Error> {
|
||||||
let tmp_dir = helper_db();
|
let tmp_dir = helper_db()?;
|
||||||
download_checker().unwrap();
|
download_checker()?;
|
||||||
let episodes = dbqueries::get_downloaded_episodes().unwrap();
|
let episodes = dbqueries::get_downloaded_episodes()?;
|
||||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||||
|
|
||||||
assert_eq!(episodes.len(), 1);
|
assert_eq!(episodes.len(), 1);
|
||||||
@ -220,70 +219,75 @@ mod tests {
|
|||||||
episodes.first().unwrap().local_uri()
|
episodes.first().unwrap().local_uri()
|
||||||
);
|
);
|
||||||
|
|
||||||
let _tmp_dir = helper_db();
|
let _tmp_dir = helper_db()?;
|
||||||
download_checker().unwrap();
|
download_checker()?;
|
||||||
let episode = dbqueries::get_episode_cleaner_from_pk("bar_baz", 1).unwrap();
|
let episode = dbqueries::get_episode_cleaner_from_pk("bar_baz", 1)?;
|
||||||
assert!(episode.local_uri().is_none());
|
assert!(episode.local_uri().is_none());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_download_cleaner() {
|
fn test_download_cleaner() -> Result<(), Error> {
|
||||||
let _tmp_dir = helper_db();
|
let _tmp_dir = helper_db()?;
|
||||||
let mut episode: EpisodeCleanerModel = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)
|
let mut episode: EpisodeCleanerModel =
|
||||||
.unwrap()
|
dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?.into();
|
||||||
.into();
|
|
||||||
|
|
||||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||||
delete_local_content(&mut episode).unwrap();
|
delete_local_content(&mut episode)?;
|
||||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_played_cleaner_expired() {
|
fn test_played_cleaner_expired() -> Result<(), Error> {
|
||||||
let _tmp_dir = helper_db();
|
let _tmp_dir = helper_db()?;
|
||||||
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0).unwrap();
|
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
|
||||||
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
||||||
let epoch = cleanup_date.timestamp() as i32 - 1;
|
let epoch = cleanup_date.timestamp() as i32 - 1;
|
||||||
episode.set_played(Some(epoch));
|
episode.set_played(Some(epoch));
|
||||||
episode.save().unwrap();
|
episode.save()?;
|
||||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||||
|
|
||||||
// This should delete the file
|
// This should delete the file
|
||||||
played_cleaner(cleanup_date).unwrap();
|
played_cleaner(cleanup_date)?;
|
||||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_played_cleaner_none() {
|
fn test_played_cleaner_none() -> Result<(), Error> {
|
||||||
let _tmp_dir = helper_db();
|
let _tmp_dir = helper_db()?;
|
||||||
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0).unwrap();
|
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
|
||||||
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
||||||
let epoch = cleanup_date.timestamp() as i32 + 1;
|
let epoch = cleanup_date.timestamp() as i32 + 1;
|
||||||
episode.set_played(Some(epoch));
|
episode.set_played(Some(epoch));
|
||||||
episode.save().unwrap();
|
episode.save()?;
|
||||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||||
|
|
||||||
// This should not delete the file
|
// This should not delete the file
|
||||||
played_cleaner(cleanup_date).unwrap();
|
played_cleaner(cleanup_date)?;
|
||||||
assert_eq!(Path::new(&valid_path).exists(), true);
|
assert_eq!(Path::new(&valid_path).exists(), true);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_url_cleaner() {
|
fn test_url_cleaner() -> Result<(), Error> {
|
||||||
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3";
|
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3";
|
||||||
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
|
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
|
||||||
|
|
||||||
assert_eq!(url_cleaner(bad_url), good_url);
|
assert_eq!(url_cleaner(bad_url), good_url);
|
||||||
assert_eq!(url_cleaner(good_url), good_url);
|
assert_eq!(url_cleaner(good_url), good_url);
|
||||||
assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url);
|
assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
// This test needs access to local system so we ignore it by default.
|
// This test needs access to local system so we ignore it by default.
|
||||||
#[ignore]
|
#[ignore]
|
||||||
fn test_get_dl_folder() {
|
fn test_get_dl_folder() -> Result<(), Error> {
|
||||||
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
|
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
|
||||||
assert_eq!(get_download_folder("foo").unwrap(), foo_);
|
assert_eq!(get_download_folder("foo")?, foo_);
|
||||||
let _ = fs::remove_dir_all(foo_);
|
let _ = fs::remove_dir_all(foo_);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -236,6 +236,7 @@ pub fn cache_image(pd: &ShowCoverModel) -> Result<String, DownloadError> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use failure::Error;
|
||||||
use podcasts_data::dbqueries;
|
use podcasts_data::dbqueries;
|
||||||
use podcasts_data::pipeline;
|
use podcasts_data::pipeline;
|
||||||
use podcasts_data::Source;
|
use podcasts_data::Source;
|
||||||
@ -246,17 +247,17 @@ mod tests {
|
|||||||
// This test inserts an rss feed to your `XDG_DATA/podcasts/podcasts.db` so we make it explicit
|
// This test inserts an rss feed to your `XDG_DATA/podcasts/podcasts.db` so we make it explicit
|
||||||
// to run it.
|
// to run it.
|
||||||
#[ignore]
|
#[ignore]
|
||||||
fn test_cache_image() {
|
fn test_cache_image() -> Result<(), Error> {
|
||||||
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
||||||
// Create and index a source
|
// Create and index a source
|
||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url)?;
|
||||||
// Copy it's id
|
// Copy it's id
|
||||||
let sid = source.id();
|
let sid = source.id();
|
||||||
// Convert Source it into a future Feed and index it
|
// Convert Source it into a future Feed and index it
|
||||||
pipeline::run(vec![source]).unwrap();
|
pipeline::run(vec![source])?;
|
||||||
|
|
||||||
// Get the Podcast
|
// Get the Podcast
|
||||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into();
|
let pd = dbqueries::get_podcast_from_source_id(sid)?.into();
|
||||||
|
|
||||||
let img_path = cache_image(&pd);
|
let img_path = cache_image(&pd);
|
||||||
let foo_ = format!(
|
let foo_ = format!(
|
||||||
@ -264,7 +265,8 @@ mod tests {
|
|||||||
PODCASTS_CACHE.to_str().unwrap(),
|
PODCASTS_CACHE.to_str().unwrap(),
|
||||||
pd.title()
|
pd.title()
|
||||||
);
|
);
|
||||||
assert_eq!(img_path.unwrap(), foo_);
|
assert_eq!(img_path?, foo_);
|
||||||
fs::remove_file(foo_).unwrap();
|
fs::remove_file(foo_)?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -127,26 +127,26 @@ mod tests {
|
|||||||
#[ignore]
|
#[ignore]
|
||||||
// THIS IS NOT A RELIABLE TEST
|
// THIS IS NOT A RELIABLE TEST
|
||||||
// Just quick sanity check
|
// Just quick sanity check
|
||||||
fn test_start_dl() {
|
fn test_start_dl() -> Result<(), Error> {
|
||||||
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
||||||
// Create and index a source
|
// Create and index a source
|
||||||
let mut source = Source::from_url(url).unwrap();
|
let mut source = Source::from_url(url)?;
|
||||||
// Copy its id
|
// Copy its id
|
||||||
let sid = source.id();
|
let sid = source.id();
|
||||||
source.set_http_etag(None);
|
source.set_http_etag(None);
|
||||||
source.set_last_modified(None);
|
source.set_last_modified(None);
|
||||||
source.save().unwrap();
|
source.save()?;
|
||||||
pipeline::run(vec![source]).unwrap();
|
pipeline::run(vec![source])?;
|
||||||
|
|
||||||
// Get the podcast
|
// Get the podcast
|
||||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
let pd = dbqueries::get_podcast_from_source_id(sid)?;
|
||||||
let title = "Coming soon... The Tip Off";
|
let title = "Coming soon... The Tip Off";
|
||||||
// Get an episode
|
// Get an episode
|
||||||
let episode: Episode = dbqueries::get_episode_from_pk(title, pd.id()).unwrap();
|
let episode: Episode = dbqueries::get_episode_from_pk(title, pd.id())?;
|
||||||
|
|
||||||
let download_fold = get_download_folder(&pd.title()).unwrap();
|
let download_fold = get_download_folder(&pd.title())?;
|
||||||
let fold2 = download_fold.clone();
|
let fold2 = download_fold.clone();
|
||||||
add(episode.rowid(), download_fold).unwrap();
|
add(episode.rowid(), download_fold)?;
|
||||||
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 1);
|
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 1);
|
||||||
|
|
||||||
// Give it some time to download the file
|
// Give it some time to download the file
|
||||||
@ -155,37 +155,37 @@ mod tests {
|
|||||||
let final_path = format!("{}/{}.mp3", &fold2, episode.rowid());
|
let final_path = format!("{}/{}.mp3", &fold2, episode.rowid());
|
||||||
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 0);
|
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 0);
|
||||||
assert!(Path::new(&final_path).exists());
|
assert!(Path::new(&final_path).exists());
|
||||||
fs::remove_file(final_path).unwrap();
|
fs::remove_file(final_path)?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
// This test needs access to local system so we ignore it by default.
|
// This test needs access to local system so we ignore it by default.
|
||||||
#[ignore]
|
#[ignore]
|
||||||
fn test_dl_steal_the_stars() {
|
fn test_dl_steal_the_stars() -> Result<(), Error> {
|
||||||
let url =
|
let url =
|
||||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars";
|
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars";
|
||||||
// Create and index a source
|
// Create and index a source
|
||||||
let mut source = Source::from_url(url).unwrap();
|
let mut source = Source::from_url(url)?;
|
||||||
// Copy its id
|
// Copy its id
|
||||||
let sid = source.id();
|
let sid = source.id();
|
||||||
source.set_http_etag(None);
|
source.set_http_etag(None);
|
||||||
source.set_last_modified(None);
|
source.set_last_modified(None);
|
||||||
source.save().unwrap();
|
source.save()?;
|
||||||
pipeline::run(vec![source]).unwrap();
|
pipeline::run(vec![source])?;
|
||||||
|
|
||||||
// Get the podcast
|
// Get the podcast
|
||||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
let pd = dbqueries::get_podcast_from_source_id(sid)?;
|
||||||
let title = "Introducing Steal the Stars";
|
let title = "Introducing Steal the Stars";
|
||||||
// Get an episode
|
// Get an episode
|
||||||
let mut episode = dbqueries::get_episode_from_pk(title, pd.id())
|
let mut episode = dbqueries::get_episode_from_pk(title, pd.id())?.into();
|
||||||
.unwrap()
|
let download_fold = get_download_folder(&pd.title())?;
|
||||||
.into();
|
|
||||||
let download_fold = get_download_folder(&pd.title()).unwrap();
|
|
||||||
|
|
||||||
get_episode(&mut episode, &download_fold, None).unwrap();
|
get_episode(&mut episode, &download_fold, None)?;
|
||||||
|
|
||||||
let final_path = format!("{}/{}.mp3", &download_fold, episode.rowid());
|
let final_path = format!("{}/{}.mp3", &download_fold, episode.rowid());
|
||||||
assert!(Path::new(&final_path).exists());
|
assert!(Path::new(&final_path).exists());
|
||||||
fs::remove_file(final_path).unwrap();
|
fs::remove_file(final_path)?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -395,6 +395,7 @@ pub(crate) fn on_import_clicked(window: >k::ApplicationWindow, sender: &Sender
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use failure::Error;
|
||||||
// use podcasts_data::Source;
|
// use podcasts_data::Source;
|
||||||
// use podcasts_data::dbqueries;
|
// use podcasts_data::dbqueries;
|
||||||
|
|
||||||
@ -419,29 +420,32 @@ mod tests {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_itunes_to_rss() {
|
fn test_itunes_to_rss() -> Result<(), Error> {
|
||||||
let itunes_url = "https://itunes.apple.com/podcast/id1195206601";
|
let itunes_url = "https://itunes.apple.com/podcast/id1195206601";
|
||||||
let rss_url = String::from("http://feeds.feedburner.com/InterceptedWithJeremyScahill");
|
let rss_url = String::from("http://feeds.feedburner.com/InterceptedWithJeremyScahill");
|
||||||
assert_eq!(rss_url, itunes_to_rss(itunes_url).unwrap());
|
assert_eq!(rss_url, itunes_to_rss(itunes_url)?);
|
||||||
|
|
||||||
let itunes_url = "https://itunes.apple.com/podcast/id000000000000000";
|
let itunes_url = "https://itunes.apple.com/podcast/id000000000000000";
|
||||||
assert!(itunes_to_rss(itunes_url).is_err());
|
assert!(itunes_to_rss(itunes_url).is_err());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_itunes_id() {
|
fn test_itunes_id() -> Result<(), Error> {
|
||||||
let id = 1195206601;
|
let id = 1195206601;
|
||||||
let itunes_url = "https://itunes.apple.com/podcast/id1195206601";
|
let itunes_url = "https://itunes.apple.com/podcast/id1195206601";
|
||||||
assert_eq!(id, itunes_id_from_url(itunes_url).unwrap());
|
assert_eq!(id, itunes_id_from_url(itunes_url).unwrap());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_itunes_lookup_id() {
|
fn test_itunes_lookup_id() -> Result<(), Error> {
|
||||||
let id = 1195206601;
|
let id = 1195206601;
|
||||||
let rss_url = "http://feeds.feedburner.com/InterceptedWithJeremyScahill";
|
let rss_url = "http://feeds.feedburner.com/InterceptedWithJeremyScahill";
|
||||||
assert_eq!(rss_url, lookup_id(id).unwrap());
|
assert_eq!(rss_url, lookup_id(id)?);
|
||||||
|
|
||||||
let id = 000000000;
|
let id = 000000000;
|
||||||
assert!(lookup_id(id).is_err());
|
assert!(lookup_id(id).is_err());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user