diff --git a/podcasts-data/src/dbqueries.rs b/podcasts-data/src/dbqueries.rs index fce5f88..63f5fbd 100644 --- a/podcasts-data/src/dbqueries.rs +++ b/podcasts-data/src/dbqueries.rs @@ -427,7 +427,7 @@ mod tests { com/InterceptedWithJeremyScahill"; let source = Source::from_url(url).unwrap(); let id = source.id(); - pipeline::run(vec![source], true).unwrap(); + pipeline::run(vec![source]).unwrap(); let pd = get_podcast_from_source_id(id).unwrap(); let eps_num = get_pd_unplayed_episodes(&pd).unwrap().len(); diff --git a/podcasts-data/src/models/source.rs b/podcasts-data/src/models/source.rs index 1749f6c..ea8aa6d 100644 --- a/podcasts-data/src/models/source.rs +++ b/podcasts-data/src/models/source.rs @@ -177,12 +177,11 @@ impl Source { pub fn into_feed( self, client: Client>, - ignore_etags: bool, ) -> impl Future { let id = self.id(); let response = loop_fn(self, move |source| { source - .request_constructor(&client.clone(), ignore_etags) + .request_constructor(&client.clone()) .then(|res| match res { Ok(response) => Ok(Loop::Break(response)), Err(err) => match err { @@ -206,12 +205,9 @@ impl Source { }) } - // TODO: make ignore_etags an Enum for better ergonomics. - // #bools_are_just_2variant_enmus fn request_constructor( self, client: &Client>, - ignore_etags: bool, ) -> impl Future { // FIXME: remove unwrap somehow let uri = Uri::from_str(self.uri()).unwrap(); @@ -220,16 +216,14 @@ impl Source { // Set the UserAgent cause ppl still seem to check it for some reason... req.headers_mut().set(UserAgent::new(USER_AGENT)); - if !ignore_etags { - if let Some(etag) = self.http_etag() { - let tag = vec![EntityTag::new(true, etag.to_owned())]; - req.headers_mut().set(IfNoneMatch::Items(tag)); - } + if let Some(etag) = self.http_etag() { + let tag = vec![EntityTag::new(true, etag.to_owned())]; + req.headers_mut().set(IfNoneMatch::Items(tag)); + } - if let Some(lmod) = self.last_modified() { - if let Ok(date) = lmod.parse::() { - req.headers_mut().set(IfModifiedSince(date)); - } + if let Some(lmod) = self.last_modified() { + if let Ok(date) = lmod.parse::() { + req.headers_mut().set(IfModifiedSince(date)); } } @@ -273,7 +267,7 @@ mod tests { let source = Source::from_url(url).unwrap(); let id = source.id(); - let feed = source.into_feed(client, true); + let feed = source.into_feed(client); let feed = core.run(feed).unwrap(); let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id); diff --git a/podcasts-data/src/pipeline.rs b/podcasts-data/src/pipeline.rs index 7275ac8..c9a8749 100644 --- a/podcasts-data/src/pipeline.rs +++ b/podcasts-data/src/pipeline.rs @@ -48,14 +48,13 @@ type HttpsClient = Client>; /// Convert `rss::Channel` into `Feed` -> Index Podcast -> Index Episodes. pub fn pipeline<'a, S>( sources: S, - ignore_etags: bool, client: &HttpsClient, ) -> impl Future, Error = DataError> + 'a where S: Stream + 'a, { sources - .and_then(clone!(client => move |s| s.into_feed(client.clone(), ignore_etags))) + .and_then(clone!(client => move |s| s.into_feed(client.clone()))) .and_then(|feed| rayon::scope(|s| s.spawn_future(feed.index()))) // the stream will stop at the first error so // we ensure that everything will succeded regardless. @@ -66,7 +65,7 @@ where /// Creates a tokio `reactor::Core`, and a `hyper::Client` and /// runs the pipeline to completion. The `reactor::Core` is dropped afterwards. -pub fn run(sources: S, ignore_etags: bool) -> Result<(), DataError> +pub fn run(sources: S) -> Result<(), DataError> where S: IntoIterator, { @@ -77,7 +76,7 @@ where .build(&handle); let stream = iter_ok::<_, DataError>(sources); - let p = pipeline(stream, ignore_etags, &client); + let p = pipeline(stream, &client); core.run(p).map(|_| ()) } @@ -114,11 +113,11 @@ mod tests { }); let sources = dbqueries::get_sources().unwrap(); - run(sources, true).unwrap(); + run(sources).unwrap(); let sources = dbqueries::get_sources().unwrap(); // Run again to cover Unique constrains erros. - run(sources, true).unwrap(); + run(sources).unwrap(); // Assert the index rows equal the controlled results assert_eq!(dbqueries::get_sources().unwrap().len(), 6); diff --git a/podcasts-downloader/src/downloader.rs b/podcasts-downloader/src/downloader.rs index f566324..5dedf57 100644 --- a/podcasts-downloader/src/downloader.rs +++ b/podcasts-downloader/src/downloader.rs @@ -253,7 +253,7 @@ mod tests { // Copy it's id let sid = source.id(); // Convert Source it into a future Feed and index it - pipeline::run(vec![source], true).unwrap(); + pipeline::run(vec![source]).unwrap(); // Get the Podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into(); diff --git a/podcasts-gtk/src/manager.rs b/podcasts-gtk/src/manager.rs index c6be72e..e3804e2 100644 --- a/podcasts-gtk/src/manager.rs +++ b/podcasts-gtk/src/manager.rs @@ -133,7 +133,7 @@ mod tests { let source = Source::from_url(url).unwrap(); // Copy its id let sid = source.id(); - pipeline::run(vec![source], true).unwrap(); + pipeline::run(vec![source]).unwrap(); // Get the podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap(); @@ -164,7 +164,7 @@ mod tests { let source = Source::from_url(url).unwrap(); // Copy its id let sid = source.id(); - pipeline::run(vec![source], true).unwrap(); + pipeline::run(vec![source]).unwrap(); // Get the podcast let pd = dbqueries::get_podcast_from_source_id(sid).unwrap(); diff --git a/podcasts-gtk/src/utils.rs b/podcasts-gtk/src/utils.rs index c00ad8c..c78411d 100644 --- a/podcasts-gtk/src/utils.rs +++ b/podcasts-gtk/src/utils.rs @@ -205,7 +205,7 @@ where rayon::spawn(move || { if let Some(s) = source { // Refresh only specified feeds - pipeline::run(s, false) + pipeline::run(s) .map_err(|err| error!("Error: {}", err)) .map_err(|_| error!("Error while trying to update the database.")) .ok(); @@ -213,7 +213,7 @@ where // Refresh all the feeds dbqueries::get_sources() .map(|s| s.into_iter()) - .and_then(|s| pipeline::run(s, false)) + .and_then(pipeline::run) .map_err(|err| error!("Error: {}", err)) .ok(); };