From ce29602431b8fca61666b7319cd10857a7a30cbc Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 20 Nov 2017 16:57:27 +0200 Subject: [PATCH] I hate everything. --- hammond-data/src/dbqueries.rs | 42 +++++++++++----------- hammond-data/src/feed.rs | 4 +-- hammond-data/src/lib.rs | 49 ++++++++++++++++---------- hammond-data/src/models/insertables.rs | 8 ++--- hammond-data/src/models/queryables.rs | 8 ++--- hammond-data/src/utils.rs | 23 ------------ hammond-gtk/src/main.rs | 1 - 7 files changed, 61 insertions(+), 74 deletions(-) diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 18bdbc1..6e86d13 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -7,33 +7,33 @@ use chrono::prelude::*; /// Random db querries helper functions. /// Probably needs cleanup. -use POOL; +use connection; pub fn get_sources() -> QueryResult> { use schema::source::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); source.load::(&*con) } pub fn get_podcasts() -> QueryResult> { use schema::podcast::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); podcast.load::(&*con) } pub fn get_episodes() -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode.order(epoch.desc()).load::(&*con) } pub fn get_downloaded_episodes() -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode .filter(local_uri.is_not_null()) .load::(&*con) @@ -42,21 +42,21 @@ pub fn get_downloaded_episodes() -> QueryResult> { pub fn get_played_episodes() -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode.filter(played.is_not_null()).load::(&*con) } pub fn get_episode_from_id(ep_id: i32) -> QueryResult { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode.filter(id.eq(ep_id)).get_result::(&*con) } pub fn get_episode_local_uri_from_id(ep_id: i32) -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode .filter(id.eq(ep_id)) @@ -67,7 +67,7 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> QueryResult> pub fn get_episodes_with_limit(limit: u32) -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode .order(epoch.desc()) @@ -78,14 +78,14 @@ pub fn get_episodes_with_limit(limit: u32) -> QueryResult> { pub fn get_podcast_from_id(pid: i32) -> QueryResult { use schema::podcast::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); podcast.filter(id.eq(pid)).get_result::(&*con) } pub fn get_pd_episodes(parent: &Podcast) -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); Episode::belonging_to(parent) .order(epoch.desc()) @@ -95,7 +95,7 @@ pub fn get_pd_episodes(parent: &Podcast) -> QueryResult> { pub fn get_pd_unplayed_episodes(parent: &Podcast) -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); Episode::belonging_to(parent) .filter(played.is_null()) @@ -106,7 +106,7 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> QueryResult> { pub fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); Episode::belonging_to(parent) .order(epoch.desc()) @@ -117,14 +117,14 @@ pub fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> QueryResult QueryResult { use schema::source::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); source.filter(uri.eq(uri_)).get_result::(&*con) } pub fn get_podcast_from_title(title_: &str) -> QueryResult { use schema::podcast::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); podcast .filter(title.eq(title_)) .get_result::(&*con) @@ -133,12 +133,12 @@ pub fn get_podcast_from_title(title_: &str) -> QueryResult { pub fn get_episode_from_uri(uri_: &str) -> QueryResult { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); episode.filter(uri.eq(uri_)).get_result::(&*con) } pub fn remove_feed(pd: &Podcast) -> QueryResult { - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); con.transaction(|| -> QueryResult { delete_source(pd.source_id())?; @@ -150,28 +150,28 @@ pub fn remove_feed(pd: &Podcast) -> QueryResult { pub fn delete_source(source_id: i32) -> QueryResult { use schema::source::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); diesel::delete(source.filter(id.eq(source_id))).execute(&*con) } pub fn delete_podcast(podcast_id: i32) -> QueryResult { use schema::podcast::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); diesel::delete(podcast.filter(id.eq(podcast_id))).execute(&*con) } pub fn delete_podcast_episodes(parent_id: i32) -> QueryResult { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(&*con) } pub fn update_none_to_played_now(parent: &Podcast) -> QueryResult { use schema::episode::dsl::*; - let con = POOL.clone().get().unwrap(); + let con = connection().get().unwrap(); let epoch_now = Utc::now().timestamp() as i32; con.transaction(|| -> QueryResult { diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 093a63c..05bdb0b 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -6,7 +6,7 @@ use rss; use dbqueries; use parser; -use POOL; +use connection; use models::{Podcast, Source}; use errors::*; @@ -51,7 +51,7 @@ impl Feed { .map(|item| parser::new_episode(item, *pd.id())) .collect(); - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); let _ = tempdb.transaction::<(), Error, _>(|| { episodes.into_iter().for_each(|x| { let e = x.index(&*tempdb); diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index d3039e6..776df89 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -32,7 +32,13 @@ pub mod errors; mod parser; mod schema; +use r2d2_diesel::ConnectionManager; +use diesel::SqliteConnection; + use std::path::PathBuf; +// use std::time::Duration; + +type Pool = r2d2::Pool>; lazy_static!{ #[allow(dead_code)] @@ -56,39 +62,44 @@ lazy_static!{ HAMMOND_XDG.create_data_directory("Downloads").unwrap() }; - pub static ref DB_PATH: PathBuf = HAMMOND_XDG.place_data_file("hammond.db").unwrap(); + static ref POOL: Pool = init_pool(DB_PATH.to_str().unwrap()); } #[cfg(not(test))] lazy_static! { - pub static ref POOL: utils::Pool = utils::init_pool(DB_PATH.to_str().unwrap()); + static ref DB_PATH: PathBuf = HAMMOND_XDG.place_data_file("hammond.db").unwrap(); } -#[cfg(test)] -lazy_static! { - static ref TEMPDB: TempDB = get_temp_db(); - - pub static ref POOL: &'static utils::Pool = &TEMPDB.2; -} - -#[cfg(test)] -struct TempDB(tempdir::TempDir, PathBuf, utils::Pool); - #[cfg(test)] extern crate tempdir; #[cfg(test)] -/// Create and return a Temporary DB. -/// Will be destroed once the returned variable(s) is dropped. -fn get_temp_db() -> TempDB { - let tmp_dir = tempdir::TempDir::new("hammond_unit_test").unwrap(); - let db_path = tmp_dir.path().join("test.db"); +lazy_static! { + static ref TEMPDIR: tempdir::TempDir = { + tempdir::TempDir::new("hammond_unit_test").unwrap() + }; + + static ref DB_PATH: PathBuf = TEMPDIR.path().join("hammond.db"); +} + +pub fn connection() -> Pool { + POOL.clone() +} + +fn init_pool(db_path: &str) -> Pool { + let config = r2d2::Config::builder() + // .pool_size(60) + // .min_idle(Some(60)) + // .connection_timeout(Duration::from_secs(60)) + .build(); + let manager = ConnectionManager::::new(db_path); + let pool = r2d2::Pool::new(config, manager).expect("Failed to create pool."); + info!("Database pool initialized."); - let pool = utils::init_pool(db_path.to_str().unwrap()); { let db = pool.clone().get().unwrap(); utils::run_migration_on(&*db).unwrap(); } - TempDB(tmp_dir, db_path, pool) + pool } diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index 69f7be6..0bbfe10 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -3,7 +3,7 @@ use diesel; use schema::{episode, podcast, source}; use models::{Podcast, Source}; -use POOL; +use connection; use errors::*; use dbqueries; @@ -29,7 +29,7 @@ impl<'a> NewSource<'a> { fn index(&self) { use schema::source::dsl::*; - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); // Throw away the result like `insert or ignore` // Diesel deos not support `insert or ignore` yet. let _ = diesel::insert_into(source).values(self).execute(&*tempdb); @@ -112,13 +112,13 @@ impl NewPodcast { match pd { Ok(foo) => if foo.link() != self.link { - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); diesel::replace_into(podcast) .values(self) .execute(&*tempdb)?; }, Err(_) => { - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); diesel::insert_into(podcast).values(self).execute(&*tempdb)?; } } diff --git a/hammond-data/src/models/queryables.rs b/hammond-data/src/models/queryables.rs index da1c2f0..14502f2 100644 --- a/hammond-data/src/models/queryables.rs +++ b/hammond-data/src/models/queryables.rs @@ -10,7 +10,7 @@ use feed::Feed; use errors::*; use models::insertables::NewPodcast; -use POOL; +use connection; use std::io::Read; use std::str::FromStr; @@ -129,7 +129,7 @@ impl Episode { } pub fn save(&self) -> QueryResult { - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); self.save_changes::(&*tempdb) } } @@ -226,7 +226,7 @@ impl Podcast { } pub fn save(&self) -> QueryResult { - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); self.save_changes::(&*tempdb) } } @@ -285,7 +285,7 @@ impl<'a> Source { } pub fn save(&self) -> QueryResult { - let tempdb = POOL.clone().get().unwrap(); + let tempdb = connection().get().unwrap(); self.save_changes::(&*tempdb) } diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index c54cdfd..6b3c362 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -1,9 +1,7 @@ use rayon::prelude::*; use chrono::prelude::*; -use r2d2; use diesel::sqlite::SqliteConnection; -use r2d2_diesel::ConnectionManager; use errors::*; use dbqueries; @@ -11,30 +9,9 @@ use models::Episode; use std::path::Path; use std::fs; -use std::sync::Arc; -use std::time::Duration; - -use POOL; embed_migrations!("migrations/"); -pub type Pool = Arc>>; - -pub fn init() -> Result<()> { - let con = POOL.clone().get().unwrap(); - run_migration_on(&*con) -} - -pub fn init_pool(db_path: &str) -> Pool { - let config = r2d2::Config::builder() - .connection_timeout(Duration::from_secs(60)) - .build(); - let manager = ConnectionManager::::new(db_path); - let pool = r2d2::Pool::new(config, manager).expect("Failed to create pool."); - info!("Database pool initialized."); - Arc::new(pool) -} - pub fn run_migration_on(connection: &SqliteConnection) -> Result<()> { info!("Running DB Migrations..."); embedded_migrations::run(connection)?; diff --git a/hammond-gtk/src/main.rs b/hammond-gtk/src/main.rs index ba4a7d2..63b4860 100644 --- a/hammond-gtk/src/main.rs +++ b/hammond-gtk/src/main.rs @@ -105,7 +105,6 @@ fn main() { // TODO: make the the logger a cli -vv option loggerv::init_with_level(LogLevel::Info).unwrap(); static_resource::init().expect("Something went wrong with the resource file initialization."); - hammond_data::utils::init().expect("Hammond Initialazation failed."); let application = gtk::Application::new("org.gnome.Hammond", gio::ApplicationFlags::empty()) .expect("Initialization failed...");