Apply clippy suggestions.

This commit is contained in:
Jordan Petridis 2018-01-19 10:32:25 +02:00
parent 5c5faafc72
commit e6b0cfccb5
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
10 changed files with 67 additions and 69 deletions

View File

@ -37,7 +37,7 @@ lazy_static! {
static ref DB_PATH: PathBuf = TEMPDIR.path().join("hammond.db");
}
/// Get an r2d2 SqliteConnection.
/// Get an r2d2 `SqliteConnection`.
pub(crate) fn connection() -> Pool {
POOL.clone()
}

View File

@ -11,6 +11,8 @@ use models::{IndexState, Update};
use models::{NewEpisode, NewPodcast, Podcast};
use pipeline::*;
type InsertUpdate = (Vec<NewEpisode>, Vec<(NewEpisode, i32)>);
#[derive(Debug)]
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
/// that corresponds to the `Source.uri` field.
@ -53,8 +55,8 @@ impl Feed {
let (insert, update): (Vec<_>, Vec<_>) = items
.into_iter()
.filter_map(|item| glue(item, pd.id()).ok())
.filter(|state| match state {
&IndexState::NotChanged => false,
.filter(|state| match *state {
IndexState::NotChanged => false,
_ => true,
})
.partition_map(|state| match state {
@ -79,27 +81,28 @@ impl Feed {
fn index_channel_items_async(&self, pd: &Podcast) -> Box<Future<Item = (), Error = Error>> {
let fut = self.get_stuff(pd)
.and_then(|(insert, update)| {
info!("Indexing {} episodes.", insert.len());
dbqueries::index_new_episodes(insert.as_slice())?;
if !insert.is_empty() {
info!("Indexing {} episodes.", insert.len());
dbqueries::index_new_episodes(insert.as_slice())?;
}
Ok((insert, update))
})
.map(|(_, update)| {
info!("Updating {} episodes.", update.len());
update.iter().for_each(|&(ref ep, rowid)| {
if let Err(err) = ep.update(rowid) {
error!("Failed to index episode: {:?}.", ep.title());
error!("Error msg: {}", err);
};
})
if !update.is_empty() {
info!("Updating {} episodes.", update.len());
update.iter().for_each(|&(ref ep, rowid)| {
if let Err(err) = ep.update(rowid) {
error!("Failed to index episode: {:?}.", ep.title());
error!("Error msg: {}", err);
};
})
}
});
Box::new(fut)
}
fn get_stuff(
&self,
pd: &Podcast,
) -> Box<Future<Item = (Vec<NewEpisode>, Vec<(NewEpisode, i32)>), Error = Error>> {
fn get_stuff(&self, pd: &Podcast) -> Box<Future<Item = InsertUpdate, Error = Error>> {
let (insert, update): (Vec<_>, Vec<_>) = self.channel
.items()
.into_iter()

View File

@ -79,22 +79,20 @@ impl Index for NewEpisode {
fn index(&self) -> Result<()> {
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
match exists {
false => self.insert(),
true => {
let old = dbqueries::get_episode_minimal_from_pk(self.title(), self.podcast_id())?;
if exists {
let old = dbqueries::get_episode_minimal_from_pk(self.title(), self.podcast_id())?;
// This is messy
if (self.title() != old.title()) || (self.uri() != old.uri())
|| (self.duration() != old.duration())
|| (self.epoch() != old.epoch())
|| (self.guid() != old.guid())
{
self.update(old.rowid())
} else {
Ok(())
}
// This is messy
if (self.title() != old.title()) || (self.uri() != old.uri())
|| (self.duration() != old.duration())
|| (self.epoch() != old.epoch()) || (self.guid() != old.guid())
{
self.update(old.rowid())
} else {
Ok(())
}
} else {
self.insert()
}
}
}

View File

@ -61,21 +61,20 @@ impl Index for NewPodcast {
fn index(&self) -> Result<()> {
let exists = dbqueries::podcast_exists(self.source_id)?;
match exists {
false => self.insert(),
true => {
let old = dbqueries::get_podcast_from_source_id(self.source_id)?;
if exists {
let old = dbqueries::get_podcast_from_source_id(self.source_id)?;
// This is messy
if (self.link() != old.link()) || (self.title() != old.title())
|| (self.image_uri() != old.image_uri())
|| (self.description() != old.description())
{
self.update(old.id())
} else {
Ok(())
}
// This is messy
if (self.link() != old.link()) || (self.title() != old.title())
|| (self.image_uri() != old.image_uri())
|| (self.description() != old.description())
{
self.update(old.id())
} else {
Ok(())
}
} else {
self.insert()
}
}
}

View File

@ -71,9 +71,9 @@ impl Source {
/// Helper method to easily save/"sync" current state of self to the Database.
pub fn save(&self) -> Result<Source> {
let db = connection();
let tempdb = db.get()?;
let con = db.get()?;
Ok(self.save_changes::<Source>(&*tempdb)?)
Ok(self.save_changes::<Source>(&*con)?)
}
/// Extract Etag and LastModifier from res, and update self and the
@ -81,14 +81,12 @@ impl Source {
fn update_etag(&mut self, res: &Response) -> Result<()> {
let headers = res.headers();
let etag = headers.get::<ETag>();
let lmod = headers.get::<LastModified>();
let etag = headers.get::<ETag>().map(|x| x.tag());
let lmod = headers.get::<LastModified>().map(|x| format!("{}", x));
if self.http_etag() != etag.map(|x| x.tag()) || self.last_modified != lmod.map(|x| {
format!("{}", x)
}) {
self.set_http_etag(etag.map(|x| x.tag()));
self.set_last_modified(lmod.map(|x| format!("{}", x)));
if (self.http_etag() != etag) || (self.last_modified != lmod) {
self.set_http_etag(etag);
self.set_last_modified(lmod);
self.save()?;
}
@ -110,7 +108,7 @@ impl Source {
///
/// Consumes `self` and Returns the corresponding `Feed` Object.
// TODO: Refactor into TryInto once it lands on stable.
pub fn to_feed(
pub fn into_feed(
mut self,
client: &Client<HttpsConnector<HttpConnector>>,
ignore_etags: bool,
@ -118,7 +116,7 @@ impl Source {
let id = self.id();
let feed = self.request_constructor(client, ignore_etags)
.map_err(From::from)
.and_then(move |res| {
.and_then(move |res| -> Result<Response> {
self.update_etag(&res)?;
Ok(res)
})
@ -126,7 +124,7 @@ impl Source {
match_status(res.status())?;
Ok(res)
})
.and_then(|res| response_to_channel(res))
.and_then(response_to_channel)
.map(move |chan| Feed::from_channel_source(chan, id));
Box::new(feed)
@ -170,9 +168,9 @@ fn response_to_channel(res: Response) -> Box<Future<Item = Channel, Error = Erro
.and_then(|iter| {
let utf_8_bytes = iter.collect::<Vec<u8>>();
let buf = String::from_utf8_lossy(&utf_8_bytes).into_owned();
let chan = Channel::from_str(&buf).map_err(From::from);
chan
Channel::from_str(&buf).map_err(From::from)
});
Box::new(chan)
}
@ -223,7 +221,7 @@ mod tests {
let url = "http://www.newrustacean.com/feed.xml";
let source = Source::from_url(url).unwrap();
let feed = source.to_feed(&client, true);
let feed = source.into_feed(&client, true);
assert!(core.run(feed).is_ok());
}

View File

@ -24,7 +24,7 @@ use std;
///
/// Messy temp diagram:
/// Source -> GET Request -> Update Etags -> Check Status -> Parse xml/Rss ->
/// Convert rss::Channel into Feed -> Index Podcast -> Index Episodes.
/// Convert `rss::Channel` into Feed -> Index Podcast -> Index Episodes.
pub fn pipeline<S: IntoIterator<Item = Source>>(sources: S, ignore_etags: bool) -> Result<()> {
let mut core = Core::new()?;
let handle = core.handle();
@ -35,7 +35,7 @@ pub fn pipeline<S: IntoIterator<Item = Source>>(sources: S, ignore_etags: bool)
let list = sources
.into_iter()
.map(|s| s.to_feed(&client, ignore_etags))
.map(|s| s.into_feed(&client, ignore_etags))
.map(|fut| fut.and_then(|feed| feed.index_async()))
.collect();
@ -53,15 +53,15 @@ fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result<IndexSt
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
if !exists {
return Ok(IndexState::Index(ep.into_new_episode(item)));
Ok(IndexState::Index(ep.into_new_episode(item)))
} else {
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.podcast_id())?;
let rowid = old.rowid();
if ep != old.into() {
return Ok(IndexState::Update((ep.into_new_episode(item), rowid)));
Ok(IndexState::Update((ep.into_new_episode(item), rowid)))
} else {
return Ok(IndexState::NotChanged);
Ok(IndexState::NotChanged)
}
}
}
@ -69,7 +69,7 @@ fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result<IndexSt
#[allow(dead_code)]
pub(crate) fn glue(item: &rss::Item, id: i32) -> Result<IndexState<NewEpisode>> {
let e = NewEpisodeMinimal::new(item, id)?;
determine_ep_state(e, &item)
determine_ep_state(e, item)
}
#[allow(dead_code)]
@ -100,7 +100,7 @@ where
Err((r, _, rest)) => (Err(r), rest),
};
done.push(r);
if rest.len() == 0 {
if rest.is_empty() {
Ok(Loop::Break(done))
} else {
Ok(Loop::Continue((rest, done)))

View File

@ -138,7 +138,7 @@ pub fn get_download_folder(pd_title: &str) -> Result<String> {
/// and deletes all of the downloaded content.
/// TODO: Write Tests
pub fn delete_show(pd: &Podcast) -> Result<()> {
dbqueries::remove_feed(&pd)?;
dbqueries::remove_feed(pd)?;
info!("{} was removed succesfully.", pd.title());
let fold = get_download_folder(pd.title())?;

View File

@ -239,7 +239,7 @@ mod tests {
// Copy it's id
let sid = source.id();
// Convert Source it into a future Feed and index it
let future = source.to_feed(&client, true);
let future = source.into_feed(&client, true);
let feed = core.run(future).unwrap();
feed.index().unwrap();

View File

@ -147,7 +147,7 @@ mod tests {
// Copy it's id
let sid = source.id();
// Convert Source it into a future Feed and index it
let future = source.to_feed(&client, true);
let future = source.into_feed(&client, true);
let feed = core.run(future).unwrap();
feed.index().unwrap();

View File

@ -104,7 +104,7 @@ mod tests {
// Copy it's id
let sid = source.id();
// Convert Source it into a future Feed and index it
let future = source.to_feed(&client, true);
let future = source.into_feed(&client, true);
let feed = core.run(future).unwrap();
feed.index().unwrap();