Source: Remove ignore_etags option
This is never used anywhere else apart from the testsuite. Instead of ignoring etags we should instead not save them if the feed does not return 200 or 304. See #64.
This commit is contained in:
parent
c53ad56a6d
commit
471f6ff93b
@ -427,7 +427,7 @@ mod tests {
|
|||||||
com/InterceptedWithJeremyScahill";
|
com/InterceptedWithJeremyScahill";
|
||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url).unwrap();
|
||||||
let id = source.id();
|
let id = source.id();
|
||||||
pipeline::run(vec![source], true).unwrap();
|
pipeline::run(vec![source]).unwrap();
|
||||||
let pd = get_podcast_from_source_id(id).unwrap();
|
let pd = get_podcast_from_source_id(id).unwrap();
|
||||||
|
|
||||||
let eps_num = get_pd_unplayed_episodes(&pd).unwrap().len();
|
let eps_num = get_pd_unplayed_episodes(&pd).unwrap().len();
|
||||||
|
|||||||
@ -177,12 +177,11 @@ impl Source {
|
|||||||
pub fn into_feed(
|
pub fn into_feed(
|
||||||
self,
|
self,
|
||||||
client: Client<HttpsConnector<HttpConnector>>,
|
client: Client<HttpsConnector<HttpConnector>>,
|
||||||
ignore_etags: bool,
|
|
||||||
) -> impl Future<Item = Feed, Error = DataError> {
|
) -> impl Future<Item = Feed, Error = DataError> {
|
||||||
let id = self.id();
|
let id = self.id();
|
||||||
let response = loop_fn(self, move |source| {
|
let response = loop_fn(self, move |source| {
|
||||||
source
|
source
|
||||||
.request_constructor(&client.clone(), ignore_etags)
|
.request_constructor(&client.clone())
|
||||||
.then(|res| match res {
|
.then(|res| match res {
|
||||||
Ok(response) => Ok(Loop::Break(response)),
|
Ok(response) => Ok(Loop::Break(response)),
|
||||||
Err(err) => match err {
|
Err(err) => match err {
|
||||||
@ -206,12 +205,9 @@ impl Source {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: make ignore_etags an Enum for better ergonomics.
|
|
||||||
// #bools_are_just_2variant_enmus
|
|
||||||
fn request_constructor(
|
fn request_constructor(
|
||||||
self,
|
self,
|
||||||
client: &Client<HttpsConnector<HttpConnector>>,
|
client: &Client<HttpsConnector<HttpConnector>>,
|
||||||
ignore_etags: bool,
|
|
||||||
) -> impl Future<Item = Response, Error = DataError> {
|
) -> impl Future<Item = Response, Error = DataError> {
|
||||||
// FIXME: remove unwrap somehow
|
// FIXME: remove unwrap somehow
|
||||||
let uri = Uri::from_str(self.uri()).unwrap();
|
let uri = Uri::from_str(self.uri()).unwrap();
|
||||||
@ -220,7 +216,6 @@ impl Source {
|
|||||||
// Set the UserAgent cause ppl still seem to check it for some reason...
|
// Set the UserAgent cause ppl still seem to check it for some reason...
|
||||||
req.headers_mut().set(UserAgent::new(USER_AGENT));
|
req.headers_mut().set(UserAgent::new(USER_AGENT));
|
||||||
|
|
||||||
if !ignore_etags {
|
|
||||||
if let Some(etag) = self.http_etag() {
|
if let Some(etag) = self.http_etag() {
|
||||||
let tag = vec![EntityTag::new(true, etag.to_owned())];
|
let tag = vec![EntityTag::new(true, etag.to_owned())];
|
||||||
req.headers_mut().set(IfNoneMatch::Items(tag));
|
req.headers_mut().set(IfNoneMatch::Items(tag));
|
||||||
@ -231,7 +226,6 @@ impl Source {
|
|||||||
req.headers_mut().set(IfModifiedSince(date));
|
req.headers_mut().set(IfModifiedSince(date));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
client
|
client
|
||||||
.request(req)
|
.request(req)
|
||||||
@ -273,7 +267,7 @@ mod tests {
|
|||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url).unwrap();
|
||||||
let id = source.id();
|
let id = source.id();
|
||||||
|
|
||||||
let feed = source.into_feed(client, true);
|
let feed = source.into_feed(client);
|
||||||
let feed = core.run(feed).unwrap();
|
let feed = core.run(feed).unwrap();
|
||||||
|
|
||||||
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
||||||
|
|||||||
@ -48,14 +48,13 @@ type HttpsClient = Client<HttpsConnector<HttpConnector>>;
|
|||||||
/// Convert `rss::Channel` into `Feed` -> Index Podcast -> Index Episodes.
|
/// Convert `rss::Channel` into `Feed` -> Index Podcast -> Index Episodes.
|
||||||
pub fn pipeline<'a, S>(
|
pub fn pipeline<'a, S>(
|
||||||
sources: S,
|
sources: S,
|
||||||
ignore_etags: bool,
|
|
||||||
client: &HttpsClient,
|
client: &HttpsClient,
|
||||||
) -> impl Future<Item = Vec<()>, Error = DataError> + 'a
|
) -> impl Future<Item = Vec<()>, Error = DataError> + 'a
|
||||||
where
|
where
|
||||||
S: Stream<Item = Source, Error = DataError> + 'a,
|
S: Stream<Item = Source, Error = DataError> + 'a,
|
||||||
{
|
{
|
||||||
sources
|
sources
|
||||||
.and_then(clone!(client => move |s| s.into_feed(client.clone(), ignore_etags)))
|
.and_then(clone!(client => move |s| s.into_feed(client.clone())))
|
||||||
.and_then(|feed| rayon::scope(|s| s.spawn_future(feed.index())))
|
.and_then(|feed| rayon::scope(|s| s.spawn_future(feed.index())))
|
||||||
// the stream will stop at the first error so
|
// the stream will stop at the first error so
|
||||||
// we ensure that everything will succeded regardless.
|
// we ensure that everything will succeded regardless.
|
||||||
@ -66,7 +65,7 @@ where
|
|||||||
|
|
||||||
/// Creates a tokio `reactor::Core`, and a `hyper::Client` and
|
/// Creates a tokio `reactor::Core`, and a `hyper::Client` and
|
||||||
/// runs the pipeline to completion. The `reactor::Core` is dropped afterwards.
|
/// runs the pipeline to completion. The `reactor::Core` is dropped afterwards.
|
||||||
pub fn run<S>(sources: S, ignore_etags: bool) -> Result<(), DataError>
|
pub fn run<S>(sources: S) -> Result<(), DataError>
|
||||||
where
|
where
|
||||||
S: IntoIterator<Item = Source>,
|
S: IntoIterator<Item = Source>,
|
||||||
{
|
{
|
||||||
@ -77,7 +76,7 @@ where
|
|||||||
.build(&handle);
|
.build(&handle);
|
||||||
|
|
||||||
let stream = iter_ok::<_, DataError>(sources);
|
let stream = iter_ok::<_, DataError>(sources);
|
||||||
let p = pipeline(stream, ignore_etags, &client);
|
let p = pipeline(stream, &client);
|
||||||
core.run(p).map(|_| ())
|
core.run(p).map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,11 +113,11 @@ mod tests {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let sources = dbqueries::get_sources().unwrap();
|
let sources = dbqueries::get_sources().unwrap();
|
||||||
run(sources, true).unwrap();
|
run(sources).unwrap();
|
||||||
|
|
||||||
let sources = dbqueries::get_sources().unwrap();
|
let sources = dbqueries::get_sources().unwrap();
|
||||||
// Run again to cover Unique constrains erros.
|
// Run again to cover Unique constrains erros.
|
||||||
run(sources, true).unwrap();
|
run(sources).unwrap();
|
||||||
|
|
||||||
// Assert the index rows equal the controlled results
|
// Assert the index rows equal the controlled results
|
||||||
assert_eq!(dbqueries::get_sources().unwrap().len(), 6);
|
assert_eq!(dbqueries::get_sources().unwrap().len(), 6);
|
||||||
|
|||||||
@ -253,7 +253,7 @@ mod tests {
|
|||||||
// Copy it's id
|
// Copy it's id
|
||||||
let sid = source.id();
|
let sid = source.id();
|
||||||
// Convert Source it into a future Feed and index it
|
// Convert Source it into a future Feed and index it
|
||||||
pipeline::run(vec![source], true).unwrap();
|
pipeline::run(vec![source]).unwrap();
|
||||||
|
|
||||||
// Get the Podcast
|
// Get the Podcast
|
||||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into();
|
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into();
|
||||||
|
|||||||
@ -133,7 +133,7 @@ mod tests {
|
|||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url).unwrap();
|
||||||
// Copy its id
|
// Copy its id
|
||||||
let sid = source.id();
|
let sid = source.id();
|
||||||
pipeline::run(vec![source], true).unwrap();
|
pipeline::run(vec![source]).unwrap();
|
||||||
|
|
||||||
// Get the podcast
|
// Get the podcast
|
||||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||||
@ -164,7 +164,7 @@ mod tests {
|
|||||||
let source = Source::from_url(url).unwrap();
|
let source = Source::from_url(url).unwrap();
|
||||||
// Copy its id
|
// Copy its id
|
||||||
let sid = source.id();
|
let sid = source.id();
|
||||||
pipeline::run(vec![source], true).unwrap();
|
pipeline::run(vec![source]).unwrap();
|
||||||
|
|
||||||
// Get the podcast
|
// Get the podcast
|
||||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||||
|
|||||||
@ -205,7 +205,7 @@ where
|
|||||||
rayon::spawn(move || {
|
rayon::spawn(move || {
|
||||||
if let Some(s) = source {
|
if let Some(s) = source {
|
||||||
// Refresh only specified feeds
|
// Refresh only specified feeds
|
||||||
pipeline::run(s, false)
|
pipeline::run(s)
|
||||||
.map_err(|err| error!("Error: {}", err))
|
.map_err(|err| error!("Error: {}", err))
|
||||||
.map_err(|_| error!("Error while trying to update the database."))
|
.map_err(|_| error!("Error while trying to update the database."))
|
||||||
.ok();
|
.ok();
|
||||||
@ -213,7 +213,7 @@ where
|
|||||||
// Refresh all the feeds
|
// Refresh all the feeds
|
||||||
dbqueries::get_sources()
|
dbqueries::get_sources()
|
||||||
.map(|s| s.into_iter())
|
.map(|s| s.into_iter())
|
||||||
.and_then(|s| pipeline::run(s, false))
|
.and_then(pipeline::run)
|
||||||
.map_err(|err| error!("Error: {}", err))
|
.map_err(|err| error!("Error: {}", err))
|
||||||
.ok();
|
.ok();
|
||||||
};
|
};
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user