diff --git a/hammond-data/benches/bench.rs b/hammond-data/benches/bench.rs index a990f31..0bbeaea 100644 --- a/hammond-data/benches/bench.rs +++ b/hammond-data/benches/bench.rs @@ -15,9 +15,10 @@ use rand::Rng; use test::Bencher; use hammond_data::run_migration_on; -use hammond_data::index_feed::{index_feeds, insert_return_source, Database, Feed}; +use hammond_data::models::NewSource; +use hammond_data::index_feed::{index_feeds, Database, Feed}; -// use std::io::BufRead; +use std::io::BufReader; use std::path::PathBuf; use std::sync::{Arc, Mutex}; @@ -55,20 +56,17 @@ fn get_temp_db() -> TempDB { } fn index_urls(m: &Database) { - let mut feeds: Vec<_> = URLS.par_iter() + URLS.par_iter() .map(|&(buff, url)| { // Create and insert a Source into db - let s = { - let temp = m.lock().unwrap(); - insert_return_source(&temp, url).unwrap() - }; + let s = NewSource::new_with_uri(url).into_source(m).unwrap(); // parse it into a channel - let chan = rss::Channel::read_from(buff).unwrap(); + let chan = rss::Channel::read_from(BufReader::new(buff)).unwrap(); Feed::new_from_channel_source(chan, s) }) - .collect(); - - index_feeds(m, &mut feeds); + .for_each(|feed| { + index_feeds(m, &mut [feed]); + }); } #[bench] diff --git a/hammond-data/src/index_feed.rs b/hammond-data/src/index_feed.rs index 778411e..d3fdbb3 100644 --- a/hammond-data/src/index_feed.rs +++ b/hammond-data/src/index_feed.rs @@ -1,12 +1,14 @@ use diesel::prelude::*; -use diesel; -use rss; use rayon::prelude::*; +use diesel; +use rss; + use dbqueries; +use feedparser; + use models::*; use errors::*; -use feedparser; use std::sync::{Arc, Mutex}; @@ -31,18 +33,16 @@ impl Feed { } fn index(&self, db: &Database) -> Result<()> { - let tempdb = db.lock().unwrap(); - let pd = self.index_channel(&tempdb)?; - drop(tempdb); + let pd = self.index_channel(db)?; self.index_channel_items(db, &pd)?; Ok(()) } - fn index_channel(&self, con: &SqliteConnection) -> Result { + fn index_channel(&self, db: &Database) -> Result { let pd = feedparser::parse_podcast(&self.channel, self.source.id()); // Convert NewPodcast to Podcast - insert_return_podcast(con, &pd) + pd.into_podcast(db) } fn index_channel_items(&self, db: &Database, pd: &Podcast) -> Result<()> { @@ -76,7 +76,7 @@ pub fn index_source(con: &SqliteConnection, foo: &NewSource) { let _ = diesel::insert_into(source).values(foo).execute(con); } -fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> { +pub fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> { use schema::podcast::dsl::*; match dbqueries::get_podcast_from_title(con, &pd.title) { @@ -108,25 +108,6 @@ fn index_episode(con: &SqliteConnection, ep: &NewEpisode) -> QueryResult<()> { Ok(()) } -pub fn insert_return_source(con: &SqliteConnection, url: &str) -> Result { - let foo = NewSource::new_with_uri(url); - index_source(con, &foo); - - Ok(dbqueries::get_source_from_uri(con, foo.uri)?) -} - -fn insert_return_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result { - index_podcast(con, pd)?; - - Ok(dbqueries::get_podcast_from_title(con, &pd.title)?) -} - -// fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result { -// index_episode(con, ep)?; - -// Ok(dbqueries::get_episode_from_uri(con, ep.uri.unwrap())?) -// } - pub fn full_index_loop(db: &Database) -> Result<()> { let mut f = fetch_all_feeds(db)?; @@ -137,7 +118,7 @@ pub fn full_index_loop(db: &Database) -> Result<()> { pub fn index_feeds(db: &Database, f: &mut [Feed]) { f.into_par_iter().for_each(|x| { - let e = x.index(db); + let e = x.index(&Arc::clone(db)); if e.is_err() { error!("Error While trying to update the database."); error!("Error msg: {}", e.unwrap_err()); @@ -160,7 +141,7 @@ pub fn fetch_feeds(db: &Database, feeds: Vec) -> Vec { .into_par_iter() .filter_map(|x| { let uri = x.uri().to_owned(); - let l = Feed::new_from_source(db, x); + let l = Feed::new_from_source(&Arc::clone(db), x); if l.is_ok() { l.ok() } else { @@ -260,10 +241,10 @@ mod tests { let mut feeds: Vec<_> = urls.iter() .map(|&(path, url)| { - let tempdb = m.lock().unwrap(); // Create and insert a Source into db - let s = insert_return_source(&tempdb, url).unwrap(); - drop(tempdb); + let s = NewSource::new_with_uri(url) + .into_source(&m.clone()) + .unwrap(); // open the xml file let feed = fs::File::open(path).unwrap(); diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index ec90134..4de40e9 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -1,9 +1,10 @@ use diesel::prelude::*; use schema::{episode, podcast, source}; - -use models::Source; +use models::{Podcast, Source}; use index_feed::Database; +use errors::*; + use index_feed; use dbqueries; @@ -11,7 +12,7 @@ use dbqueries; #[table_name = "source"] #[derive(Debug, Clone)] pub struct NewSource<'a> { - pub uri: &'a str, + uri: &'a str, last_modified: Option<&'a str>, http_etag: Option<&'a str>, } @@ -25,6 +26,7 @@ impl<'a> NewSource<'a> { } } + // Look out for when tryinto lands into stable. pub fn into_source(self, db: &Database) -> QueryResult { let tempdb = db.lock().unwrap(); index_feed::index_source(&tempdb, &self); @@ -56,3 +58,13 @@ pub struct NewPodcast { pub image_uri: Option, pub source_id: i32, } + +impl NewPodcast { + // Look out for when tryinto lands into stable. + pub fn into_podcast(self, db: &Database) -> Result { + let tempdb = db.lock().unwrap(); + index_feed::index_podcast(&tempdb, &self)?; + + Ok(dbqueries::get_podcast_from_title(&tempdb, &self.title)?) + } +} diff --git a/hammond-gtk/src/headerbar.rs b/hammond-gtk/src/headerbar.rs index f8837a4..70d3604 100644 --- a/hammond-gtk/src/headerbar.rs +++ b/hammond-gtk/src/headerbar.rs @@ -1,7 +1,7 @@ use gtk; use gtk::prelude::*; -use hammond_data::index_feed; +use hammond_data::models::NewSource; use hammond_data::index_feed::Database; use podcasts_view::update_podcasts_view; @@ -53,17 +53,14 @@ pub fn get_headerbar(db: &Database, stack: >k::Stack) -> gtk::HeaderBar { } fn on_add_bttn_clicked(db: &Database, stack: >k::Stack, url: &str) { - let source = { - let tempdb = db.lock().unwrap(); - index_feed::insert_return_source(&tempdb, url) - }; + let source = NewSource::new_with_uri(url).into_source(db); info!("{:?} feed added", url); if let Ok(s) = source { // update the db utils::refresh_feed(db, stack, Some(vec![s]), None); } else { - error!("Expected Error, feed probably already exists."); + error!("Feed probably already exists."); error!("Error: {:?}", source.unwrap_err()); } }