Closes #2. Kudo to @jwykeham for the fix!.

This commit is contained in:
Jordan Petridis 2017-12-29 02:45:02 +02:00
parent 55b6fccefd
commit ca06a16bd9
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
5 changed files with 33 additions and 29 deletions

View File

@ -31,6 +31,3 @@
- [ ] Make Podcast cover fetchng and loading not block the execution of the program at startup.
- [ ] Lazy evaluate episode loading based on the show_widget's scrolling.
**FIXME:**
- [ ] Fix Etag/Last-modified implementation. [#2](https://gitlab.gnome.org/alatiera/Hammond/issues/2)

View File

@ -26,7 +26,7 @@ pub struct Feed {
impl Feed {
/// Constructor that consumes a `Source` and returns the corresponding `Feed` struct.
pub fn from_source(s: Source) -> Result<Feed> {
s.into_feed()
s.into_feed(false)
}
/// Constructor that consumes a `Source` and a `rss::Channel` returns a `Feed` struct.
@ -112,19 +112,22 @@ pub fn index(feed: &Feed) {
/// Consume a `Source` and return a `Feed`.
fn fetch(source: Source) -> Result<Feed> {
let uri = source.uri().to_owned();
let feed = Feed::from_source(source);
if feed.is_err() {
error!("Error While trying to fetch from source url: {}.", uri);
}
feed
Feed::from_source(source)
}
/// Index a "list" of `Source`s.
pub fn index_loop<S: IntoParallelIterator<Item = Source>>(sources: S) {
sources
.into_par_iter()
.filter_map(|x| fetch(x).ok())
.filter_map(|x| {
let foo = fetch(x);
if let Err(err) = foo {
error!("Error: {}", err);
None
} else {
foo.ok()
}
})
.for_each(|x| index(&x));
info!("Indexing done.");
@ -223,8 +226,8 @@ mod tests {
assert_eq!(s1, s2);
assert_eq!(s1.id(), s2.id());
let f1 = s1.into_feed().unwrap();
let f2 = s2.into_feed().unwrap();
let f1 = s1.into_feed(false).unwrap();
let f2 = s2.into_feed(false).unwrap();
let p1 = f1.get_podcast().unwrap();
let p2 = {

View File

@ -640,18 +640,22 @@ impl<'a> Source {
///
/// Consumes `self` and Returns the corresponding `Feed` Object.
// TODO: Refactor into TryInto once it lands on stable.
pub fn into_feed(mut self) -> Result<Feed> {
use reqwest::header::{ETag, EntityTag, Headers, HttpDate, LastModified};
pub fn into_feed(mut self, ignore_etags: bool) -> Result<Feed> {
use reqwest::header::{EntityTag, Headers, HttpDate, IfModifiedSince, IfNoneMatch};
let mut headers = Headers::new();
if let Some(foo) = self.http_etag() {
headers.set(ETag(EntityTag::new(true, foo.to_owned())));
}
if !ignore_etags {
if let Some(foo) = self.http_etag() {
headers.set(IfNoneMatch::Items(vec![
EntityTag::new(true, foo.to_owned()),
]));
}
if let Some(foo) = self.last_modified() {
if let Ok(x) = foo.parse::<HttpDate>() {
headers.set(LastModified(x));
if let Some(foo) = self.last_modified() {
if let Ok(x) = foo.parse::<HttpDate>() {
headers.set(IfModifiedSince(x));
}
}
}
@ -663,17 +667,17 @@ impl<'a> Source {
info!("GET to {} , returned: {}", self.uri(), req.status());
self.update_etag(&req)?;
// TODO match on more stuff
// 301: Permanent redirect of the url
// 302: Temporary redirect of the url
// 304: Up to date Feed, checked with the Etag
// 410: Feed deleted
// match req.status() {
// reqwest::StatusCode::NotModified => (),
// _ => (),
// };
self.update_etag(&req)?;
match req.status() {
reqwest::StatusCode::NotModified => bail!("304, skipping.."),
_ => (),
};
let mut buf = String::new();
req.read_to_string(&mut buf)?;

View File

@ -214,7 +214,7 @@ mod tests {
let sid = source.id().clone();
// Convert Source it into a Feed and index it
let feed = source.into_feed().unwrap();
let feed = source.into_feed(true).unwrap();
index(&feed);
// Get the Podcast

View File

@ -113,7 +113,7 @@ mod tests {
let sid = source.id().clone();
// Convert Source it into a Feed and index it
let feed = source.into_feed().unwrap();
let feed = source.into_feed(true).unwrap();
index(&feed);
// Get the Podcast