From ee9cede921dd575f0cc491d61e6124a34363358e Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sat, 13 Jan 2018 06:08:51 +0200 Subject: [PATCH] hammond_data: Remove Source dependancy from Feed struct. --- hammond-data/src/feed.rs | 24 ++++++++++++------------ hammond-data/src/models/queryables.rs | 4 ++-- hammond-data/src/pipeline.rs | 2 +- hammond-downloader/src/downloader.rs | 2 +- hammond-gtk/src/manager.rs | 2 +- hammond-gtk/src/utils.rs | 2 +- 6 files changed, 18 insertions(+), 18 deletions(-) diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 750c4a0..8fa4fc9 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -1,4 +1,4 @@ -//! Index and retrieve Feeds. +//! Index Feeds. use rayon::prelude::*; use diesel::prelude::*; @@ -20,20 +20,20 @@ use errors::*; /// that corresponds to the `Source.uri` field. pub struct Feed { channel: rss::Channel, - source: Source, + source_id: i32, } impl Feed { /// Constructor that consumes a `Source` and returns the corresponding `Feed` struct. - pub fn from_source(s: Source) -> Result { + pub fn from_source(s: &mut Source) -> Result { s.into_feed(false) } /// Constructor that consumes a `Source` and a `rss::Channel` returns a `Feed` struct. - pub fn from_channel_source(chan: rss::Channel, s: Source) -> Feed { + pub fn from_channel_source(chan: rss::Channel, s: i32) -> Feed { Feed { channel: chan, - source: s, + source_id: s, } } @@ -68,7 +68,7 @@ impl Feed { } fn parse_channel(&self) -> NewPodcast { - parser::new_podcast(&self.channel, *self.source.id()) + parser::new_podcast(&self.channel, self.source_id) } fn parse_channel_items(&self, pd: &Podcast) -> Vec { @@ -111,7 +111,7 @@ pub fn index(feed: &Feed) { } /// Consume a `Source` and return a `Feed`. -fn fetch(source: Source) -> Result { +fn fetch(source: &mut Source) -> Result { Feed::from_source(source) } @@ -119,8 +119,8 @@ fn fetch(source: Source) -> Result { pub fn index_loop>(sources: S) { sources .into_par_iter() - .filter_map(|x| { - let foo = fetch(x); + .filter_map(|mut x| { + let foo = fetch(&mut x); if let Err(err) = foo { error!("Error: {}", err); None @@ -203,7 +203,7 @@ mod tests { let feed = fs::File::open(path).unwrap(); // parse it into a channel let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); - Feed::from_channel_source(chan, s) + Feed::from_channel_source(chan, *s.id()) }) .collect(); @@ -221,8 +221,8 @@ mod tests { truncate_db().unwrap(); let url = "https://feeds.feedburner.com/InterceptedWithJeremyScahill"; - let s1 = Source::from_url(url).unwrap(); - let s2 = Source::from_url(url).unwrap(); + let mut s1 = Source::from_url(url).unwrap(); + let mut s2 = Source::from_url(url).unwrap(); assert_eq!(s1, s2); assert_eq!(s1.id(), s2.id()); diff --git a/hammond-data/src/models/queryables.rs b/hammond-data/src/models/queryables.rs index b659193..2557e88 100644 --- a/hammond-data/src/models/queryables.rs +++ b/hammond-data/src/models/queryables.rs @@ -670,7 +670,7 @@ impl<'a> Source { /// /// Consumes `self` and Returns the corresponding `Feed` Object. // TODO: Refactor into TryInto once it lands on stable. - pub fn into_feed(mut self, ignore_etags: bool) -> Result { + pub fn into_feed(&mut self, ignore_etags: bool) -> Result { use reqwest::header::{EntityTag, Headers, HttpDate, IfModifiedSince, IfNoneMatch}; use reqwest::StatusCode; @@ -725,7 +725,7 @@ impl<'a> Source { req.read_to_string(&mut buf)?; let chan = Channel::from_str(&buf)?; - Ok(Feed::from_channel_source(chan, self)) + Ok(Feed::from_channel_source(chan, self.id)) } /// Construct a new `Source` with the given `uri` and index it. diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index e37897a..ebc6367 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -11,7 +11,6 @@ use hyper_tls::HttpsConnector; use futures::{Future, Stream}; // use futures::future::join_all; -use tokio_core::reactor::Core; // use std::io::{self, Write}; use std::str::FromStr; @@ -63,6 +62,7 @@ fn res_to_channel(res: hyper::Response) -> Box