Applied some clippy suggestions.

This commit is contained in:
Jordan Petridis 2017-10-09 16:49:00 +03:00
parent 2c1b55c2fb
commit f1d3cd5e25
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
6 changed files with 21 additions and 18 deletions

View File

@ -20,4 +20,4 @@ dotenv = "*"
[dev-dependencies] [dev-dependencies]
tempdir = "0.3.5" tempdir = "0.3.5"
rand = "0.3.16" rand = "0.3.16"

View File

@ -1,3 +1,5 @@
#![cfg_attr(feature = "cargo-clippy", allow(let_and_return))]
use diesel::prelude::*; use diesel::prelude::*;
use models::{Episode, Podcast, Source}; use models::{Episode, Podcast, Source};

View File

@ -20,7 +20,7 @@ pub fn parse_podcast(chan: &Channel, source_id: i32) -> Result<models::NewPodcas
Ok(foo) Ok(foo)
} }
pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result<models::NewEpisode<'a>> { pub fn parse_episode(item: &Item, parent_id: i32) -> Result<models::NewEpisode> {
let title = item.title(); let title = item.title();
let description = item.description(); let description = item.description();
let guid = item.guid().map(|x| x.value()); let guid = item.guid().map(|x| x.value());

View File

@ -1,4 +1,5 @@
#![allow(dead_code)] #![allow(dead_code)]
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
use diesel::prelude::*; use diesel::prelude::*;
use diesel; use diesel;
@ -39,7 +40,7 @@ fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> {
} }
fn index_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<()> { fn index_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<()> {
match dbqueries::load_episode(con, &ep.uri.unwrap()) { match dbqueries::load_episode(con, ep.uri.unwrap()) {
Ok(mut foo) => if foo.title() != ep.title Ok(mut foo) => if foo.title() != ep.title
|| foo.published_date() != ep.published_date.as_ref().map(|x| x.as_str()) || foo.published_date() != ep.published_date.as_ref().map(|x| x.as_str())
{ {
@ -74,7 +75,7 @@ fn insert_return_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<Podc
fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Episode> { fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Episode> {
index_episode(con, ep)?; index_episode(con, ep)?;
Ok(dbqueries::load_episode(con, &ep.uri.unwrap())?) Ok(dbqueries::load_episode(con, ep.uri.unwrap())?)
} }
pub fn index_loop(db: SqliteConnection, force: bool) -> Result<()> { pub fn index_loop(db: SqliteConnection, force: bool) -> Result<()> {
@ -105,18 +106,18 @@ fn complete_index_from_source(
req.read_to_string(&mut buf)?; req.read_to_string(&mut buf)?;
let chan = rss::Channel::from_str(&buf)?; let chan = rss::Channel::from_str(&buf)?;
complete_index(mutex, chan, &source)?; complete_index(mutex, &chan, source)?;
Ok(()) Ok(())
} }
fn complete_index( fn complete_index(
mutex: Arc<Mutex<SqliteConnection>>, mutex: Arc<Mutex<SqliteConnection>>,
chan: rss::Channel, chan: &rss::Channel,
parent: &Source, parent: &Source,
) -> Result<()> { ) -> Result<()> {
let tempdb = mutex.lock().unwrap(); let tempdb = mutex.lock().unwrap();
let pd = index_channel(&tempdb, &chan, parent)?; let pd = index_channel(&tempdb, chan, parent)?;
drop(tempdb); drop(tempdb);
index_channel_items(mutex.clone(), chan.items(), &pd)?; index_channel_items(mutex.clone(), chan.items(), &pd)?;
@ -125,7 +126,7 @@ fn complete_index(
} }
fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) -> Result<Podcast> { fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) -> Result<Podcast> {
let pd = feedparser::parse_podcast(&chan, parent.id())?; let pd = feedparser::parse_podcast(chan, parent.id())?;
// Convert NewPodcast to Podcast // Convert NewPodcast to Podcast
let pd = insert_return_podcast(db, &pd)?; let pd = insert_return_podcast(db, &pd)?;
Ok(pd) Ok(pd)
@ -138,13 +139,13 @@ fn index_channel_items(
pd: &Podcast, pd: &Podcast,
) -> Result<()> { ) -> Result<()> {
let foo: Vec<_> = i.par_iter() let foo: Vec<_> = i.par_iter()
.map(|x| feedparser::parse_episode(&x, pd.id()).unwrap()) .map(|x| feedparser::parse_episode(x, pd.id()).unwrap())
.collect(); .collect();
foo.par_iter().for_each(|x| { foo.par_iter().for_each(|x| {
let dbmutex = mutex.clone(); let dbmutex = mutex.clone();
let db = dbmutex.lock().unwrap(); let db = dbmutex.lock().unwrap();
index_episode(&db, &x).unwrap(); index_episode(&db, x).unwrap();
}); });
Ok(()) Ok(())
} }
@ -315,7 +316,7 @@ mod tests {
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
// Index the channel // Index the channel
complete_index(m.clone(), chan, &s).unwrap(); complete_index(m.clone(), &chan, &s).unwrap();
}) })
.fold((), |(), _| ()); .fold((), |(), _| ());

View File

@ -1,4 +1,5 @@
#![recursion_limit = "1024"] #![recursion_limit = "1024"]
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
#[macro_use] #[macro_use]
extern crate error_chain; extern crate error_chain;

View File

@ -62,12 +62,11 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
// TODO when for_each reaches stable: // TODO when for_each reaches stable:
// Remove all the ugly folds(_) and replace map() with for_each(). // Remove all the ugly folds(_) and replace map() with for_each().
.map(|x| -> Result<()> { .map(|x| -> Result<()> {
let mut eps; let mut eps = if limit == 0 {
if limit == 0 { dbqueries::get_pd_episodes(connection, x)?
eps = dbqueries::get_pd_episodes(connection, &x)?;
} else { } else {
eps = dbqueries::get_pd_episodes_limit(connection, &x, limit)?; dbqueries::get_pd_episodes_limit(connection, x, limit)?
} };
// It might be better to make it a hash of the title // It might be better to make it a hash of the title
let dl_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), x.title()); let dl_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), x.title());
@ -79,7 +78,7 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
eps.iter_mut() eps.iter_mut()
.map(|y| -> Result<()> { .map(|y| -> Result<()> {
// Check if its alrdy downloaded // Check if its alrdy downloaded
if let Some(_) = y.local_uri() { if y.local_uri().is_some() {
// Not idiomatic but I am still fighting the borrow-checker. // Not idiomatic but I am still fighting the borrow-checker.
if Path::new(y.local_uri().unwrap().to_owned().as_str()).exists() { if Path::new(y.local_uri().unwrap().to_owned().as_str()).exists() {
return Ok(()); return Ok(());
@ -90,7 +89,7 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
}; };
// Unreliable and hacky way to extract the file extension from the url. // Unreliable and hacky way to extract the file extension from the url.
let ext = y.uri().split(".").last().unwrap().to_owned(); let ext = y.uri().split('.').last().unwrap().to_owned();
// Construct the download path. // Construct the download path.
let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap().to_owned(), ext); let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap().to_owned(), ext);