Source: Remove ignore_etags option

This is never used anywhere else apart from the testsuite. Instead
of ignoring etags we should instead not save them if the feed does
not return 200 or 304. See #64.
This commit is contained in:
Jordan Petridis 2018-08-14 11:54:10 +03:00
parent c53ad56a6d
commit 471f6ff93b
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
6 changed files with 20 additions and 27 deletions

View File

@ -427,7 +427,7 @@ mod tests {
com/InterceptedWithJeremyScahill";
let source = Source::from_url(url).unwrap();
let id = source.id();
pipeline::run(vec![source], true).unwrap();
pipeline::run(vec![source]).unwrap();
let pd = get_podcast_from_source_id(id).unwrap();
let eps_num = get_pd_unplayed_episodes(&pd).unwrap().len();

View File

@ -177,12 +177,11 @@ impl Source {
pub fn into_feed(
self,
client: Client<HttpsConnector<HttpConnector>>,
ignore_etags: bool,
) -> impl Future<Item = Feed, Error = DataError> {
let id = self.id();
let response = loop_fn(self, move |source| {
source
.request_constructor(&client.clone(), ignore_etags)
.request_constructor(&client.clone())
.then(|res| match res {
Ok(response) => Ok(Loop::Break(response)),
Err(err) => match err {
@ -206,12 +205,9 @@ impl Source {
})
}
// TODO: make ignore_etags an Enum for better ergonomics.
// #bools_are_just_2variant_enmus
fn request_constructor(
self,
client: &Client<HttpsConnector<HttpConnector>>,
ignore_etags: bool,
) -> impl Future<Item = Response, Error = DataError> {
// FIXME: remove unwrap somehow
let uri = Uri::from_str(self.uri()).unwrap();
@ -220,16 +216,14 @@ impl Source {
// Set the UserAgent cause ppl still seem to check it for some reason...
req.headers_mut().set(UserAgent::new(USER_AGENT));
if !ignore_etags {
if let Some(etag) = self.http_etag() {
let tag = vec![EntityTag::new(true, etag.to_owned())];
req.headers_mut().set(IfNoneMatch::Items(tag));
}
if let Some(etag) = self.http_etag() {
let tag = vec![EntityTag::new(true, etag.to_owned())];
req.headers_mut().set(IfNoneMatch::Items(tag));
}
if let Some(lmod) = self.last_modified() {
if let Ok(date) = lmod.parse::<HttpDate>() {
req.headers_mut().set(IfModifiedSince(date));
}
if let Some(lmod) = self.last_modified() {
if let Ok(date) = lmod.parse::<HttpDate>() {
req.headers_mut().set(IfModifiedSince(date));
}
}
@ -273,7 +267,7 @@ mod tests {
let source = Source::from_url(url).unwrap();
let id = source.id();
let feed = source.into_feed(client, true);
let feed = source.into_feed(client);
let feed = core.run(feed).unwrap();
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);

View File

@ -48,14 +48,13 @@ type HttpsClient = Client<HttpsConnector<HttpConnector>>;
/// Convert `rss::Channel` into `Feed` -> Index Podcast -> Index Episodes.
pub fn pipeline<'a, S>(
sources: S,
ignore_etags: bool,
client: &HttpsClient,
) -> impl Future<Item = Vec<()>, Error = DataError> + 'a
where
S: Stream<Item = Source, Error = DataError> + 'a,
{
sources
.and_then(clone!(client => move |s| s.into_feed(client.clone(), ignore_etags)))
.and_then(clone!(client => move |s| s.into_feed(client.clone())))
.and_then(|feed| rayon::scope(|s| s.spawn_future(feed.index())))
// the stream will stop at the first error so
// we ensure that everything will succeded regardless.
@ -66,7 +65,7 @@ where
/// Creates a tokio `reactor::Core`, and a `hyper::Client` and
/// runs the pipeline to completion. The `reactor::Core` is dropped afterwards.
pub fn run<S>(sources: S, ignore_etags: bool) -> Result<(), DataError>
pub fn run<S>(sources: S) -> Result<(), DataError>
where
S: IntoIterator<Item = Source>,
{
@ -77,7 +76,7 @@ where
.build(&handle);
let stream = iter_ok::<_, DataError>(sources);
let p = pipeline(stream, ignore_etags, &client);
let p = pipeline(stream, &client);
core.run(p).map(|_| ())
}
@ -114,11 +113,11 @@ mod tests {
});
let sources = dbqueries::get_sources().unwrap();
run(sources, true).unwrap();
run(sources).unwrap();
let sources = dbqueries::get_sources().unwrap();
// Run again to cover Unique constrains erros.
run(sources, true).unwrap();
run(sources).unwrap();
// Assert the index rows equal the controlled results
assert_eq!(dbqueries::get_sources().unwrap().len(), 6);

View File

@ -253,7 +253,7 @@ mod tests {
// Copy it's id
let sid = source.id();
// Convert Source it into a future Feed and index it
pipeline::run(vec![source], true).unwrap();
pipeline::run(vec![source]).unwrap();
// Get the Podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into();

View File

@ -133,7 +133,7 @@ mod tests {
let source = Source::from_url(url).unwrap();
// Copy its id
let sid = source.id();
pipeline::run(vec![source], true).unwrap();
pipeline::run(vec![source]).unwrap();
// Get the podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
@ -164,7 +164,7 @@ mod tests {
let source = Source::from_url(url).unwrap();
// Copy its id
let sid = source.id();
pipeline::run(vec![source], true).unwrap();
pipeline::run(vec![source]).unwrap();
// Get the podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();

View File

@ -205,7 +205,7 @@ where
rayon::spawn(move || {
if let Some(s) = source {
// Refresh only specified feeds
pipeline::run(s, false)
pipeline::run(s)
.map_err(|err| error!("Error: {}", err))
.map_err(|_| error!("Error while trying to update the database."))
.ok();
@ -213,7 +213,7 @@ where
// Refresh all the feeds
dbqueries::get_sources()
.map(|s| s.into_iter())
.and_then(|s| pipeline::run(s, false))
.and_then(pipeline::run)
.map_err(|err| error!("Error: {}", err))
.ok();
};