Refactor the tests to use ? operator

As of ructc 1.27 #[test] can return errors. This improves a great
deal the ergonomics.
This commit is contained in:
Jordan Petridis 2018-08-18 17:02:31 +03:00
parent 7bbd9a1a4f
commit 89b99614a0
No known key found for this signature in database
GPG Key ID: E8523968931763BE
12 changed files with 250 additions and 207 deletions

View File

@ -66,7 +66,7 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
/// Reset the database into a clean state.
// Test share a Temp file db.
#[allow(dead_code)]
#[cfg(test)]
pub fn truncate_db() -> Result<(), DataError> {
let db = connection();
let con = db.get()?;

View File

@ -448,24 +448,26 @@ pub fn update_none_to_played_now(parent: &Show) -> Result<usize, DataError> {
mod tests {
use super::*;
use database::*;
use failure::Error;
use pipeline;
#[test]
fn test_update_none_to_played_now() {
truncate_db().unwrap();
fn test_update_none_to_played_now() -> Result<(), Error> {
truncate_db()?;
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
com/InterceptedWithJeremyScahill";
let source = Source::from_url(url).unwrap();
let source = Source::from_url(url)?;
let id = source.id();
pipeline::run(vec![source]).unwrap();
let pd = get_podcast_from_source_id(id).unwrap();
pipeline::run(vec![source])?;
let pd = get_podcast_from_source_id(id)?;
let eps_num = get_pd_unplayed_episodes(&pd).unwrap().len();
let eps_num = get_pd_unplayed_episodes(&pd)?.len();
assert_ne!(eps_num, 0);
update_none_to_played_now(&pd).unwrap();
let eps_num2 = get_pd_unplayed_episodes(&pd).unwrap().len();
update_none_to_played_now(&pd)?;
let eps_num2 = get_pd_unplayed_episodes(&pd)?.len();
assert_eq!(eps_num2, 0);
Ok(())
}
}

View File

@ -126,6 +126,7 @@ fn batch_insert_episodes(episodes: &[NewEpisode]) {
#[cfg(test)]
mod tests {
use failure::Error;
use rss::Channel;
use tokio_core::reactor::Core;
@ -170,8 +171,8 @@ mod tests {
};
#[test]
fn test_complete_index() {
truncate_db().unwrap();
fn test_complete_index() -> Result<(), Error> {
truncate_db()?;
let feeds: Vec<_> = URLS
.iter()
@ -181,41 +182,44 @@ mod tests {
get_feed(path, s.id())
}).collect();
let mut core = Core::new().unwrap();
// Index the channels
let mut core = Core::new()?;
// Index the channes
let list: Vec<_> = feeds.into_iter().map(|x| x.index()).collect();
let _foo = core.run(join_all(list));
// Assert the index rows equal the controlled results
assert_eq!(dbqueries::get_sources().unwrap().len(), 5);
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 5);
assert_eq!(dbqueries::get_episodes().unwrap().len(), 354);
assert_eq!(dbqueries::get_sources()?.len(), 5);
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
assert_eq!(dbqueries::get_episodes()?.len(), 354);
Ok(())
}
#[test]
fn test_feed_parse_podcast() {
truncate_db().unwrap();
fn test_feed_parse_podcast() -> Result<(), Error> {
truncate_db()?;
let path = "tests/feeds/2018-01-20-Intercepted.xml";
let feed = get_feed(path, 42);
let file = fs::File::open(path).unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
let file = fs::File::open(path)?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(feed.parse_podcast(), pd);
Ok(())
}
#[test]
fn test_feed_index_channel_items() {
truncate_db().unwrap();
fn test_feed_index_channel_items() -> Result<(), Error> {
truncate_db()?;
let path = "tests/feeds/2018-01-20-Intercepted.xml";
let feed = get_feed(path, 42);
let pd = feed.parse_podcast().to_podcast().unwrap();
let pd = feed.parse_podcast().to_podcast()?;
feed.index_channel_items(pd).wait().unwrap();
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 1);
assert_eq!(dbqueries::get_episodes().unwrap().len(), 43);
feed.index_channel_items(pd).wait()?;
assert_eq!(dbqueries::get_podcasts()?.len(), 1);
assert_eq!(dbqueries::get_episodes()?.len(), 43);
Ok(())
}
}

View File

@ -312,6 +312,7 @@ impl NewEpisodeMinimal {
mod tests {
use database::truncate_db;
use dbqueries;
use failure::Error;
use models::new_episode::{NewEpisodeMinimal, NewEpisodeMinimalBuilder};
use models::*;
@ -477,68 +478,72 @@ mod tests {
}
#[test]
fn test_new_episode_minimal_intercepted() {
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_episode_minimal_intercepted() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let episode = channel.items().iter().nth(14).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_1);
let episode = channel.items().iter().nth(15).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_2);
Ok(())
}
#[test]
fn test_new_episode_intercepted() {
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_episode_intercepted() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let episode = channel.items().iter().nth(14).unwrap();
let ep = NewEpisode::new(&episode, 42).unwrap();
let ep = NewEpisode::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
let episode = channel.items().iter().nth(15).unwrap();
let ep = NewEpisode::new(&episode, 42).unwrap();
let ep = NewEpisode::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
Ok(())
}
#[test]
fn test_new_episode_minimal_lup() {
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_episode_minimal_lup() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let episode = channel.items().iter().nth(18).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_1);
let episode = channel.items().iter().nth(19).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
let ep = NewEpisodeMinimal::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_2);
Ok(())
}
#[test]
fn test_new_episode_lup() {
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_episode_lup() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let episode = channel.items().iter().nth(18).unwrap();
let ep = NewEpisode::new(&episode, 42).unwrap();
let ep = NewEpisode::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_LUP_1);
let episode = channel.items().iter().nth(19).unwrap();
let ep = NewEpisode::new(&episode, 42).unwrap();
let ep = NewEpisode::new(&episode, 42)?;
assert_eq!(ep, *EXPECTED_LUP_2);
Ok(())
}
#[test]
fn test_minimal_into_new_episode() {
truncate_db().unwrap();
fn test_minimal_into_new_episode() -> Result<(), Error> {
truncate_db()?;
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let item = channel.items().iter().nth(14).unwrap();
let ep = EXPECTED_MINIMAL_INTERCEPTED_1
@ -555,42 +560,44 @@ mod tests {
.clone()
.into_new_episode(&item);
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
Ok(())
}
#[test]
fn test_new_episode_insert() {
truncate_db().unwrap();
fn test_new_episode_insert() -> Result<(), Error> {
truncate_db()?;
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let episode = channel.items().iter().nth(14).unwrap();
let new_ep = NewEpisode::new(&episode, 42).unwrap();
new_ep.insert().unwrap();
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
let new_ep = NewEpisode::new(&episode, 42)?;
new_ep.insert()?;
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
assert_eq!(new_ep, ep);
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
assert_eq!(&*EXPECTED_INTERCEPTED_1, &ep);
let episode = channel.items().iter().nth(15).unwrap();
let new_ep = NewEpisode::new(&episode, 42).unwrap();
new_ep.insert().unwrap();
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
let new_ep = NewEpisode::new(&episode, 42)?;
new_ep.insert()?;
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
assert_eq!(new_ep, ep);
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
assert_eq!(&*EXPECTED_INTERCEPTED_2, &ep);
Ok(())
}
#[test]
fn test_new_episode_update() {
truncate_db().unwrap();
let old = EXPECTED_INTERCEPTED_1.clone().to_episode().unwrap();
fn test_new_episode_update() -> Result<(), Error> {
truncate_db()?;
let old = EXPECTED_INTERCEPTED_1.clone().to_episode()?;
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
updated.update(old.rowid()).unwrap();
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id()).unwrap();
updated.update(old.rowid())?;
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id())?;
// Assert that updating does not change the rowid and show_id
assert_ne!(old, new);
@ -599,11 +606,12 @@ mod tests {
assert_eq!(updated, &new);
assert_ne!(updated, &old);
Ok(())
}
#[test]
fn test_new_episode_index() {
truncate_db().unwrap();
fn test_new_episode_index() -> Result<(), Error> {
truncate_db()?;
let expected = &*EXPECTED_INTERCEPTED_1;
// First insert
@ -611,7 +619,7 @@ mod tests {
// Second identical, This should take the early return path
assert!(expected.index().is_ok());
// Get the episode
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
// Assert that NewPodcast is equal to the Indexed one
assert_eq!(*expected, old);
@ -620,31 +628,33 @@ mod tests {
// Update the podcast
assert!(updated.index().is_ok());
// Get the new Podcast
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
// Assert it's diff from the old one.
assert_ne!(new, old);
assert_eq!(*updated, new);
assert_eq!(new.rowid(), old.rowid());
assert_eq!(new.show_id(), old.show_id());
Ok(())
}
#[test]
fn test_new_episode_to_episode() {
fn test_new_episode_to_episode() -> Result<(), Error> {
let expected = &*EXPECTED_INTERCEPTED_1;
// Assert insert() produces the same result that you would get with to_podcast()
truncate_db().unwrap();
expected.insert().unwrap();
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
let ep = expected.to_episode().unwrap();
truncate_db()?;
expected.insert()?;
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
let ep = expected.to_episode()?;
assert_eq!(old, ep);
// Same as above, diff order
truncate_db().unwrap();
let ep = expected.to_episode().unwrap();
truncate_db()?;
let ep = expected.to_episode()?;
// This should error as a unique constrain violation
assert!(expected.insert().is_err());
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
assert_eq!(old, ep);
Ok(())
}
}

View File

@ -157,6 +157,7 @@ mod tests {
use super::*;
// use tokio_core::reactor::Core;
use failure::Error;
use rss::Channel;
use database::truncate_db;
@ -286,73 +287,80 @@ mod tests {
}
#[test]
fn test_new_podcast_intercepted() {
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_intercepted() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(*EXPECTED_INTERCEPTED, pd);
Ok(())
}
#[test]
fn test_new_podcast_lup() {
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_lup() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(*EXPECTED_LUP, pd);
Ok(())
}
#[test]
fn test_new_podcast_thetipoff() {
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_thetipoff() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(*EXPECTED_TIPOFF, pd);
Ok(())
}
#[test]
fn test_new_podcast_steal_the_stars() {
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_steal_the_stars() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(*EXPECTED_STARS, pd);
Ok(())
}
#[test]
fn test_new_podcast_greater_than_code() {
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_greater_than_code() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(*EXPECTED_CODE, pd);
Ok(())
}
#[test]
fn test_new_podcast_ellinofreneia() {
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_ellinofreneia() -> Result<(), Error> {
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let pd = NewShow::new(&channel, 42);
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
Ok(())
}
#[test]
// This maybe could be a doc test on insert.
fn test_new_podcast_insert() {
truncate_db().unwrap();
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
let channel = Channel::read_from(BufReader::new(file)).unwrap();
fn test_new_podcast_insert() -> Result<(), Error> {
truncate_db()?;
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
let channel = Channel::read_from(BufReader::new(file))?;
let npd = NewShow::new(&channel, 42);
npd.insert().unwrap();
let pd = dbqueries::get_podcast_from_source_id(42).unwrap();
npd.insert()?;
let pd = dbqueries::get_podcast_from_source_id(42)?;
assert_eq!(npd, pd);
assert_eq!(*EXPECTED_INTERCEPTED, npd);
assert_eq!(&*EXPECTED_INTERCEPTED, &pd);
Ok(())
}
#[test]
@ -360,31 +368,32 @@ mod tests {
// Currently there's a test that only checks new description or title.
// If you have time and want to help, implement the test for the other fields
// too.
fn test_new_podcast_update() {
truncate_db().unwrap();
let old = EXPECTED_INTERCEPTED.to_podcast().unwrap();
fn test_new_podcast_update() -> Result<(), Error> {
truncate_db()?;
let old = EXPECTED_INTERCEPTED.to_podcast()?;
let updated = &*UPDATED_DESC_INTERCEPTED;
updated.update(old.id()).unwrap();
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
updated.update(old.id())?;
let new = dbqueries::get_podcast_from_source_id(42)?;
assert_ne!(old, new);
assert_eq!(old.id(), new.id());
assert_eq!(old.source_id(), new.source_id());
assert_eq!(updated, &new);
assert_ne!(updated, &old);
Ok(())
}
#[test]
fn test_new_podcast_index() {
truncate_db().unwrap();
fn test_new_podcast_index() -> Result<(), Error> {
truncate_db()?;
// First insert
assert!(EXPECTED_INTERCEPTED.index().is_ok());
// Second identical, This should take the early return path
assert!(EXPECTED_INTERCEPTED.index().is_ok());
// Get the podcast
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
let old = dbqueries::get_podcast_from_source_id(42)?;
// Assert that NewShow is equal to the Indexed one
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
@ -393,28 +402,30 @@ mod tests {
// Update the podcast
assert!(updated.index().is_ok());
// Get the new Show
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
let new = dbqueries::get_podcast_from_source_id(42)?;
// Assert it's diff from the old one.
assert_ne!(new, old);
assert_eq!(new.id(), old.id());
assert_eq!(new.source_id(), old.source_id());
Ok(())
}
#[test]
fn test_to_podcast() {
fn test_to_podcast() -> Result<(), Error> {
// Assert insert() produces the same result that you would get with to_podcast()
truncate_db().unwrap();
EXPECTED_INTERCEPTED.insert().unwrap();
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
truncate_db()?;
EXPECTED_INTERCEPTED.insert()?;
let old = dbqueries::get_podcast_from_source_id(42)?;
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
assert_eq!(old, pd);
// Same as above, diff order
truncate_db().unwrap();
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
truncate_db()?;
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
// This should error as a unique constrain violation
assert!(EXPECTED_INTERCEPTED.insert().is_err());
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
let old = dbqueries::get_podcast_from_source_id(42)?;
assert_eq!(old, pd);
Ok(())
}
}

View File

@ -275,29 +275,31 @@ fn response_to_channel(res: Response) -> impl Future<Item = Channel, Error = Dat
#[cfg(test)]
mod tests {
use super::*;
use failure::Error;
use tokio_core::reactor::Core;
use database::truncate_db;
use utils::get_feed;
#[test]
fn test_into_feed() {
truncate_db().unwrap();
fn test_into_feed() -> Result<(), Error> {
truncate_db()?;
let mut core = Core::new().unwrap();
let mut core = Core::new()?;
let client = Client::configure()
.connector(HttpsConnector::new(4, &core.handle()).unwrap())
.connector(HttpsConnector::new(4, &core.handle())?)
.build(&core.handle());
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
com/InterceptedWithJeremyScahill";
let source = Source::from_url(url).unwrap();
let source = Source::from_url(url)?;
let id = source.id();
let feed = source.into_feed(client);
let feed = core.run(feed).unwrap();
let feed = core.run(feed)?;
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
assert_eq!(expected, feed);
Ok(())
}
}

View File

@ -100,9 +100,10 @@ pub fn extract_sources<R: Read>(reader: R) -> Result<HashSet<Opml>, reader::Erro
mod tests {
use super::*;
use chrono::Local;
use failure::Error;
#[test]
fn test_extract() {
fn test_extract() -> Result<(), Error> {
let int_title = String::from("Intercepted with Jeremy Scahill");
let int_url = String::from("https://feeds.feedburner.com/InterceptedWithJeremyScahill");
let int_desc =
@ -160,6 +161,7 @@ mod tests {
url: dec_url
},
];
assert_eq!(extract_sources(sample1.as_bytes()).unwrap(), map);
assert_eq!(extract_sources(sample1.as_bytes())?, map);
Ok(())
}
}

View File

@ -85,6 +85,7 @@ mod tests {
use super::*;
use database::truncate_db;
use dbqueries;
use failure::Error;
use Source;
// (path, url) tuples.
@ -100,28 +101,29 @@ mod tests {
#[test]
/// Insert feeds and update/index them.
fn test_pipeline() {
truncate_db().unwrap();
fn test_pipeline() -> Result<(), Error> {
truncate_db()?;
let bad_url = "https://gitlab.gnome.org/World/podcasts.atom";
// if a stream returns error/None it stops
// bad we want to parse all feeds regardless if one fails
Source::from_url(bad_url).unwrap();
Source::from_url(bad_url)?;
URLS.iter().for_each(|url| {
// Index the urls into the source table.
Source::from_url(url).unwrap();
});
let sources = dbqueries::get_sources().unwrap();
run(sources).unwrap();
let sources = dbqueries::get_sources()?;
run(sources)?;
let sources = dbqueries::get_sources().unwrap();
let sources = dbqueries::get_sources()?;
// Run again to cover Unique constrains erros.
run(sources).unwrap();
run(sources)?;
// Assert the index rows equal the controlled results
assert_eq!(dbqueries::get_sources().unwrap().len(), 6);
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 5);
assert_eq!(dbqueries::get_episodes().unwrap().len(), 354);
assert_eq!(dbqueries::get_sources()?.len(), 6);
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
assert_eq!(dbqueries::get_episodes()?.len(), 354);
Ok(())
}
}

View File

@ -162,6 +162,7 @@ mod tests {
use self::tempdir::TempDir;
use super::*;
use chrono::Duration;
use failure::Error;
use database::truncate_db;
use models::NewEpisodeBuilder;
@ -169,15 +170,15 @@ mod tests {
use std::fs::File;
use std::io::Write;
fn helper_db() -> TempDir {
fn helper_db() -> Result<TempDir, Error> {
// Clean the db
truncate_db().unwrap();
truncate_db()?;
// Setup tmp file stuff
let tmp_dir = TempDir::new("podcasts_test").unwrap();
let tmp_dir = TempDir::new("podcasts_test")?;
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
let bad_path = tmp_dir.path().join("invalid_thing.mp3");
let mut tmp_file = File::create(&valid_path).unwrap();
writeln!(tmp_file, "Foooo").unwrap();
let mut tmp_file = File::create(&valid_path)?;
writeln!(tmp_file, "Foooo")?;
// Setup episodes
let n1 = NewEpisodeBuilder::default()
@ -185,33 +186,31 @@ mod tests {
.show_id(0)
.build()
.unwrap()
.to_episode()
.unwrap();
.to_episode()?;
let n2 = NewEpisodeBuilder::default()
.title("bar_baz".to_string())
.show_id(1)
.build()
.unwrap()
.to_episode()
.unwrap();
.to_episode()?;
let mut ep1 = dbqueries::get_episode_cleaner_from_pk(n1.title(), n1.show_id()).unwrap();
let mut ep2 = dbqueries::get_episode_cleaner_from_pk(n2.title(), n2.show_id()).unwrap();
let mut ep1 = dbqueries::get_episode_cleaner_from_pk(n1.title(), n1.show_id())?;
let mut ep2 = dbqueries::get_episode_cleaner_from_pk(n2.title(), n2.show_id())?;
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
ep1.save().unwrap();
ep2.save().unwrap();
ep1.save()?;
ep2.save()?;
tmp_dir
Ok(tmp_dir)
}
#[test]
fn test_download_checker() {
let tmp_dir = helper_db();
download_checker().unwrap();
let episodes = dbqueries::get_downloaded_episodes().unwrap();
fn test_download_checker() -> Result<(), Error> {
let tmp_dir = helper_db()?;
download_checker()?;
let episodes = dbqueries::get_downloaded_episodes()?;
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
assert_eq!(episodes.len(), 1);
@ -220,70 +219,75 @@ mod tests {
episodes.first().unwrap().local_uri()
);
let _tmp_dir = helper_db();
download_checker().unwrap();
let episode = dbqueries::get_episode_cleaner_from_pk("bar_baz", 1).unwrap();
let _tmp_dir = helper_db()?;
download_checker()?;
let episode = dbqueries::get_episode_cleaner_from_pk("bar_baz", 1)?;
assert!(episode.local_uri().is_none());
Ok(())
}
#[test]
fn test_download_cleaner() {
let _tmp_dir = helper_db();
let mut episode: EpisodeCleanerModel = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)
.unwrap()
.into();
fn test_download_cleaner() -> Result<(), Error> {
let _tmp_dir = helper_db()?;
let mut episode: EpisodeCleanerModel =
dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?.into();
let valid_path = episode.local_uri().unwrap().to_owned();
delete_local_content(&mut episode).unwrap();
delete_local_content(&mut episode)?;
assert_eq!(Path::new(&valid_path).exists(), false);
Ok(())
}
#[test]
fn test_played_cleaner_expired() {
let _tmp_dir = helper_db();
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0).unwrap();
fn test_played_cleaner_expired() -> Result<(), Error> {
let _tmp_dir = helper_db()?;
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
let cleanup_date = Utc::now() - Duration::seconds(1000);
let epoch = cleanup_date.timestamp() as i32 - 1;
episode.set_played(Some(epoch));
episode.save().unwrap();
episode.save()?;
let valid_path = episode.local_uri().unwrap().to_owned();
// This should delete the file
played_cleaner(cleanup_date).unwrap();
played_cleaner(cleanup_date)?;
assert_eq!(Path::new(&valid_path).exists(), false);
Ok(())
}
#[test]
fn test_played_cleaner_none() {
let _tmp_dir = helper_db();
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0).unwrap();
fn test_played_cleaner_none() -> Result<(), Error> {
let _tmp_dir = helper_db()?;
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
let cleanup_date = Utc::now() - Duration::seconds(1000);
let epoch = cleanup_date.timestamp() as i32 + 1;
episode.set_played(Some(epoch));
episode.save().unwrap();
episode.save()?;
let valid_path = episode.local_uri().unwrap().to_owned();
// This should not delete the file
played_cleaner(cleanup_date).unwrap();
played_cleaner(cleanup_date)?;
assert_eq!(Path::new(&valid_path).exists(), true);
Ok(())
}
#[test]
fn test_url_cleaner() {
fn test_url_cleaner() -> Result<(), Error> {
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3";
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
assert_eq!(url_cleaner(bad_url), good_url);
assert_eq!(url_cleaner(good_url), good_url);
assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url);
Ok(())
}
#[test]
// This test needs access to local system so we ignore it by default.
#[ignore]
fn test_get_dl_folder() {
fn test_get_dl_folder() -> Result<(), Error> {
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
assert_eq!(get_download_folder("foo").unwrap(), foo_);
assert_eq!(get_download_folder("foo")?, foo_);
let _ = fs::remove_dir_all(foo_);
Ok(())
}
}

View File

@ -236,6 +236,7 @@ pub fn cache_image(pd: &ShowCoverModel) -> Result<String, DownloadError> {
#[cfg(test)]
mod tests {
use super::*;
use failure::Error;
use podcasts_data::dbqueries;
use podcasts_data::pipeline;
use podcasts_data::Source;
@ -246,17 +247,17 @@ mod tests {
// This test inserts an rss feed to your `XDG_DATA/podcasts/podcasts.db` so we make it explicit
// to run it.
#[ignore]
fn test_cache_image() {
fn test_cache_image() -> Result<(), Error> {
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
// Create and index a source
let source = Source::from_url(url).unwrap();
let source = Source::from_url(url)?;
// Copy it's id
let sid = source.id();
// Convert Source it into a future Feed and index it
pipeline::run(vec![source]).unwrap();
pipeline::run(vec![source])?;
// Get the Podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into();
let pd = dbqueries::get_podcast_from_source_id(sid)?.into();
let img_path = cache_image(&pd);
let foo_ = format!(
@ -264,7 +265,8 @@ mod tests {
PODCASTS_CACHE.to_str().unwrap(),
pd.title()
);
assert_eq!(img_path.unwrap(), foo_);
fs::remove_file(foo_).unwrap();
assert_eq!(img_path?, foo_);
fs::remove_file(foo_)?;
Ok(())
}
}

View File

@ -127,26 +127,26 @@ mod tests {
#[ignore]
// THIS IS NOT A RELIABLE TEST
// Just quick sanity check
fn test_start_dl() {
fn test_start_dl() -> Result<(), Error> {
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
// Create and index a source
let mut source = Source::from_url(url).unwrap();
let mut source = Source::from_url(url)?;
// Copy its id
let sid = source.id();
source.set_http_etag(None);
source.set_last_modified(None);
source.save().unwrap();
pipeline::run(vec![source]).unwrap();
source.save()?;
pipeline::run(vec![source])?;
// Get the podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
let pd = dbqueries::get_podcast_from_source_id(sid)?;
let title = "Coming soon... The Tip Off";
// Get an episode
let episode: Episode = dbqueries::get_episode_from_pk(title, pd.id()).unwrap();
let episode: Episode = dbqueries::get_episode_from_pk(title, pd.id())?;
let download_fold = get_download_folder(&pd.title()).unwrap();
let download_fold = get_download_folder(&pd.title())?;
let fold2 = download_fold.clone();
add(episode.rowid(), download_fold).unwrap();
add(episode.rowid(), download_fold)?;
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 1);
// Give it some time to download the file
@ -155,37 +155,37 @@ mod tests {
let final_path = format!("{}/{}.mp3", &fold2, episode.rowid());
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 0);
assert!(Path::new(&final_path).exists());
fs::remove_file(final_path).unwrap();
fs::remove_file(final_path)?;
Ok(())
}
#[test]
// This test needs access to local system so we ignore it by default.
#[ignore]
fn test_dl_steal_the_stars() {
fn test_dl_steal_the_stars() -> Result<(), Error> {
let url =
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars";
// Create and index a source
let mut source = Source::from_url(url).unwrap();
let mut source = Source::from_url(url)?;
// Copy its id
let sid = source.id();
source.set_http_etag(None);
source.set_last_modified(None);
source.save().unwrap();
pipeline::run(vec![source]).unwrap();
source.save()?;
pipeline::run(vec![source])?;
// Get the podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
let pd = dbqueries::get_podcast_from_source_id(sid)?;
let title = "Introducing Steal the Stars";
// Get an episode
let mut episode = dbqueries::get_episode_from_pk(title, pd.id())
.unwrap()
.into();
let download_fold = get_download_folder(&pd.title()).unwrap();
let mut episode = dbqueries::get_episode_from_pk(title, pd.id())?.into();
let download_fold = get_download_folder(&pd.title())?;
get_episode(&mut episode, &download_fold, None).unwrap();
get_episode(&mut episode, &download_fold, None)?;
let final_path = format!("{}/{}.mp3", &download_fold, episode.rowid());
assert!(Path::new(&final_path).exists());
fs::remove_file(final_path).unwrap();
fs::remove_file(final_path)?;
Ok(())
}
}

View File

@ -395,6 +395,7 @@ pub(crate) fn on_import_clicked(window: &gtk::ApplicationWindow, sender: &Sender
#[cfg(test)]
mod tests {
use super::*;
use failure::Error;
// use podcasts_data::Source;
// use podcasts_data::dbqueries;
@ -419,29 +420,32 @@ mod tests {
// }
#[test]
fn test_itunes_to_rss() {
fn test_itunes_to_rss() -> Result<(), Error> {
let itunes_url = "https://itunes.apple.com/podcast/id1195206601";
let rss_url = String::from("http://feeds.feedburner.com/InterceptedWithJeremyScahill");
assert_eq!(rss_url, itunes_to_rss(itunes_url).unwrap());
assert_eq!(rss_url, itunes_to_rss(itunes_url)?);
let itunes_url = "https://itunes.apple.com/podcast/id000000000000000";
assert!(itunes_to_rss(itunes_url).is_err());
Ok(())
}
#[test]
fn test_itunes_id() {
fn test_itunes_id() -> Result<(), Error> {
let id = 1195206601;
let itunes_url = "https://itunes.apple.com/podcast/id1195206601";
assert_eq!(id, itunes_id_from_url(itunes_url).unwrap());
Ok(())
}
#[test]
fn test_itunes_lookup_id() {
fn test_itunes_lookup_id() -> Result<(), Error> {
let id = 1195206601;
let rss_url = "http://feeds.feedburner.com/InterceptedWithJeremyScahill";
assert_eq!(rss_url, lookup_id(id).unwrap());
assert_eq!(rss_url, lookup_id(id)?);
let id = 000000000;
assert!(lookup_id(id).is_err());
Ok(())
}
}