Setup of Rustfmt and rustfmt.toml config file.

This commit is contained in:
Jordan Petridis 2017-09-23 09:06:15 +03:00
parent 2c1f66f85f
commit 874d33c1f4
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
8 changed files with 66 additions and 67 deletions

27
rustfmt.toml Normal file
View File

@ -0,0 +1,27 @@
verbose = false
disable_all_formatting = false
skip_children = false
max_width = 100
# error_on_line_overflow = true
error_on_line_overflow = false
wrap_comments = true
error_on_line_overflow_comments = true
tab_spaces = 4
fn_call_width = 60
newline_style = "Unix"
fn_call_style = "Block"
report_todo = "Never"
report_fixme = "Never"
reorder_extern_crates = true
reorder_extern_crates_in_group = true
reorder_imports = false
hard_tabs = false
spaces_within_parens = false
use_try_shorthand = false
write_mode = "Overwrite"
merge_derives = true
array_horizontal_layout_threshold = 5
condense_wildcard_suffixes = true
# format_strings = true
chain_one_line_max = 60
chain_split_single_child = false

View File

@ -23,4 +23,4 @@ pub fn run() -> Result<()> {
::index_feed::foo();
Ok(())
}
}

View File

@ -1,5 +1,5 @@
use diesel::prelude::*;
use models::{Podcast, Episode, Source};
use models::{Episode, Podcast, Source};
pub fn get_sources(con: &SqliteConnection) -> QueryResult<Vec<Source>> {
use schema::source::dsl::*;
@ -38,4 +38,4 @@ pub fn load_episode(con: &SqliteConnection, uri_: &str) -> QueryResult<Episode>
let ep = episode.filter(uri.eq(uri_)).get_result::<Episode>(con);
ep
}
}

View File

@ -4,7 +4,6 @@ use models;
use errors::*;
pub fn parse_podcast(chan: &Channel, source_id: i32) -> Result<models::NewPodcast> {
let title = chan.title().to_owned();
let link = chan.link().to_owned();
@ -28,7 +27,6 @@ pub fn parse_podcast(chan: &Channel, source_id: i32) -> Result<models::NewPodcas
}
pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result<models::NewEpisode<'a>> {
let title = item.title();
let description = item.description();
@ -72,9 +70,8 @@ pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result<models::NewEp
_ => 0,
};
let length = item.enclosure().map(
|x| x.length().parse().unwrap_or_default(),
);
let length = item.enclosure()
.map(|x| x.length().parse().unwrap_or_default());
let foo = models::NewEpisode {
title,
@ -140,9 +137,7 @@ mod tests {
assert_eq!(pd.description, descr.to_string());
assert_eq!(
pd.image_uri,
Some(
"http://michaeltunnell.com/images/linux-unplugged.jpg".to_string(),
)
Some("http://michaeltunnell.com/images/linux-unplugged.jpg".to_string(),)
);
}
@ -162,9 +157,7 @@ mod tests {
assert_eq!(pd.description, descr.to_string());
assert_eq!(
pd.image_uri,
Some(
"https://request-for-explanation.github.io/podcast/podcast.png".to_string(),
)
Some("https://request-for-explanation.github.io/podcast/podcast.png".to_string(),)
);
}
@ -191,9 +184,7 @@ mod tests {
let descr2 = "This week on Intercepted: Jeremy gives an update on the aftermath of Blackwaters 2007 massacre of Iraqi civilians. Intercept reporter Lee Fang lays out how a network of libertarian think tanks called the Atlas Network is insidiously shaping political infrastructure in Latin America. We speak with attorney and former Hugo Chavez adviser Eva Golinger about the Venezuela\'s political turmoil.And we hear Claudia Lizardo of the Caracas-based band, La Pequeña Revancha, talk about her music and hopes for Venezuela.";
assert_eq!(
i2.title,
Some(
"Atlas Golfed — U.S.-Backed Think Tanks Target Latin America",
)
Some("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America",)
);
assert_eq!(i2.uri, Some("http://traffic.megaphone.fm/FL5331443769.mp3"));
assert_eq!(i2.description, Some(descr2));
@ -217,15 +208,11 @@ mod tests {
assert_eq!(
i.title,
Some(
"The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill",
)
Some("The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill",)
);
assert_eq!(
i.uri,
Some(
"http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3",
)
Some("http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3",)
);
assert_eq!(i.description, Some(descr));
assert_eq!(i.length, Some(33396551));
@ -253,9 +240,7 @@ mod tests {
);
assert_eq!(
i2.uri,
Some(
"http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3",
)
Some("http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3",)
);
assert_eq!(i2.description, Some(descr2));
assert_eq!(i2.length, Some(17964071));
@ -281,9 +266,7 @@ mod tests {
assert_eq!(i.title, Some("Hacking Devices with Kali Linux | LUP 214"));
assert_eq!(
i.uri,
Some(
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3",
)
Some("http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3",)
);
assert_eq!(i.description, Some(descr));
assert_eq!(i.length, Some(46479789));
@ -300,9 +283,7 @@ mod tests {
assert_eq!(i2.title, Some("Gnome Does it Again | LUP 213"));
assert_eq!(
i2.uri,
Some(
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3",
)
Some("http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3",)
);
assert_eq!(i2.description, Some(descr2));
assert_eq!(i2.length, Some(36544272));
@ -331,9 +312,7 @@ mod tests {
assert_eq!(i.length, Some(15077388));
assert_eq!(
i.guid,
Some(
"https://request-for-explanation.github.io/podcast/ep9-a-once-in-a-lifetime-rfc/",
)
Some("https://request-for-explanation.github.io/podcast/ep9-a-once-in-a-lifetime-rfc/",)
);
assert_eq!(i.published_date, Some("Mon, 28 Aug 2017 15:00:00 PDT"));
// Need to fix datetime parser first
@ -354,12 +333,10 @@ mod tests {
assert_eq!(i2.length, Some(13713219));
assert_eq!(
i2.guid,
Some(
"https://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/",
)
Some("https://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/",)
);
assert_eq!(i2.published_date, Some("Tue, 15 Aug 2017 17:00:00 PDT"));
// Need to fix datetime parser first
// assert_eq!(i.epoch, );
}
}
}

View File

@ -6,7 +6,7 @@ use schema;
use dbqueries;
use feedparser;
use errors::*;
use models::{NewSource, Source, Podcast, Episode};
use models::{Episode, NewSource, Podcast, Source};
pub fn foo() {
let inpt = vec![
@ -42,9 +42,9 @@ fn insert_source(con: &SqliteConnection, url: &str) -> Result<Source> {
// bar.save_changes::<Source>(con)?;
}
Err(_) => {
diesel::insert(&foo).into(schema::source::table).execute(
con,
)?;
diesel::insert(&foo)
.into(schema::source::table)
.execute(con)?;
}
}
@ -67,9 +67,9 @@ fn index_podcast(
foo.save_changes::<Podcast>(con)?;
}
Err(_) => {
diesel::insert(&pd).into(schema::podcast::table).execute(
con,
)?;
diesel::insert(&pd)
.into(schema::podcast::table)
.execute(con)?;
}
}
@ -89,11 +89,11 @@ fn index_episode(con: &SqliteConnection, item: &rss::Item, parent: &Podcast) ->
foo.set_length(ep.length);
foo.set_epoch(ep.epoch);
foo.save_changes::<Episode>(con)?;
}
}
Err(_) => {
diesel::insert(&ep).into(schema::episode::table).execute(
con,
)?;
diesel::insert(&ep)
.into(schema::episode::table)
.execute(con)?;
}
}
@ -122,7 +122,6 @@ pub fn index_loop(db: SqliteConnection) -> Result<()> {
info!("{:#?}", pd);
// info!("{:#?}", episodes);
// info!("{:?}", chan);
}
Ok(())
}
}

View File

@ -19,10 +19,10 @@ extern crate diesel;
#[macro_use]
extern crate diesel_codegen;
extern crate xdg;
extern crate chrono;
extern crate reqwest;
extern crate rss;
extern crate chrono;
extern crate xdg;
pub mod cli;
pub mod schema;
@ -68,7 +68,7 @@ lazy_static!{
static ref HAMMOND_DATA: PathBuf = HAMMOND_XDG.create_data_directory(HAMMOND_XDG.get_data_home()).unwrap();
static ref HAMMOND_CONFIG: PathBuf = HAMMOND_XDG.create_config_directory(HAMMOND_XDG.get_config_home()).unwrap();
static ref HAMMOND_CACHE: PathBuf = HAMMOND_XDG.create_cache_directory(HAMMOND_XDG.get_cache_home()).unwrap();
static ref DB_PATH: PathBuf = {
// Ensure that xdg_data is created.
&HAMMOND_DATA;
@ -88,10 +88,8 @@ pub fn init() -> Result<()> {
pub fn establish_connection() -> SqliteConnection {
let database_url = DB_PATH.to_str().unwrap();
// let database_url = &String::from(".random/foo.db");
SqliteConnection::establish(database_url).expect(&format!(
"Error connecting to {}",
database_url
))
SqliteConnection::establish(database_url)
.expect(&format!("Error connecting to {}", database_url))
}
#[cfg(test)]

View File

@ -7,4 +7,4 @@ extern crate hammond;
use hammond::cli::run;
quick_main!(run);
quick_main!(run);

View File

@ -7,8 +7,7 @@ use reqwest::header::{ETag, LastModified};
use schema::{episode, podcast, source};
use errors::*;
#[derive(Queryable, Identifiable, AsChangeset)]
#[derive(Associations)]
#[derive(Queryable, Identifiable, AsChangeset, Associations)]
#[table_name = "episode"]
#[belongs_to(Podcast, foreign_key = "podcast_id")]
#[derive(Debug, Clone)]
@ -95,8 +94,7 @@ impl Episode {
}
}
#[derive(Queryable, Identifiable, AsChangeset)]
#[derive(Associations)]
#[derive(Queryable, Identifiable, AsChangeset, Associations)]
#[belongs_to(Source, foreign_key = "source_id")]
#[table_name = "podcast"]
#[derive(Debug, Clone)]
@ -195,9 +193,9 @@ impl<'a> Source {
Ok(chan)
}
/// Extract Etag and LastModifier from req, and update self and the corresponding db row.
/// Extract Etag and LastModifier from req, and update self and the
/// corresponding db row.
fn update_etag(&mut self, con: &SqliteConnection, req: &reqwest::Response) -> Result<()> {
let headers = req.headers();
debug!("{:#?}", headers);
@ -267,4 +265,4 @@ impl NewPodcast {
let foo = ::feedparser::parse_podcast(&chan, parent.id())?;
Ok(foo)
}
}
}