From 47b32ac000b5c43afafba200ca914f72a5c8c1b6 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 19 Nov 2017 22:39:51 +0200 Subject: [PATCH] Removed the Database type alias. --- hammond-data/src/dbqueries.rs | 21 +++++++++-------- hammond-data/src/lib.rs | 5 ----- hammond-data/src/models/insertables.rs | 2 -- hammond-data/src/models/queryables.rs | 9 ++++---- hammond-data/src/utils.rs | 31 +++++++++++++------------- hammond-downloader/src/downloader.rs | 7 +++--- 6 files changed, 32 insertions(+), 43 deletions(-) diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 18701b9..101fdba 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -2,7 +2,6 @@ use diesel::prelude::*; use diesel; use models::{Episode, Podcast, Source}; -use Database; use chrono::prelude::*; /// Random db querries helper functions. @@ -13,7 +12,7 @@ use POOL; pub fn get_sources() -> QueryResult> { use schema::source::dsl::*; - let con = POOL.get().unwrap(); + let con = POOL.clone().get().unwrap(); let s = source.load::(&*con); // s.iter().for_each(|x| println!("{:#?}", x)); s @@ -22,14 +21,14 @@ pub fn get_sources() -> QueryResult> { pub fn get_podcasts() -> QueryResult> { use schema::podcast::dsl::*; - let con = POOL.get().unwrap(); + let con = POOL.clone().get().unwrap(); podcast.load::(&*con) } pub fn get_episodes() -> QueryResult> { use schema::episode::dsl::*; - let con = POOL.get().unwrap(); + let con = POOL.clone().get().unwrap(); episode.order(epoch.desc()).load::(&*con) } @@ -114,7 +113,7 @@ pub fn get_pd_episodes_limit( pub fn get_source_from_uri(uri_: &str) -> QueryResult { use schema::source::dsl::*; - let con = POOL.get().unwrap(); + let con = POOL.clone().get().unwrap(); source.filter(uri.eq(uri_)).get_result::(&*con) } @@ -130,13 +129,13 @@ pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult(con) } -pub fn remove_feed(db: &Database, pd: &Podcast) -> QueryResult { - let tempdb = db.lock().unwrap(); +pub fn remove_feed(pd: &Podcast) -> QueryResult { + let con = POOL.clone().get().unwrap(); - tempdb.transaction(|| -> QueryResult { - delete_source(&tempdb, pd.source_id())?; - delete_podcast(&tempdb, *pd.id())?; - delete_podcast_episodes(&tempdb, *pd.id()) + con.transaction(|| -> QueryResult { + delete_source(&*con, pd.source_id())?; + delete_podcast(&*con, *pd.id())?; + delete_podcast_episodes(&*con, *pd.id()) }) } diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 20206a1..04e1ed0 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -32,12 +32,7 @@ pub mod errors; mod parser; mod schema; -use diesel::prelude::*; - use std::path::PathBuf; -use std::sync::{Arc, Mutex}; - -pub type Database = Arc>; lazy_static!{ #[allow(dead_code)] diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index 507b12e..f4c16bc 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -61,8 +61,6 @@ impl<'a> NewEpisode<'a> { // TODO: Currently using diesel from master git. // Watch out for v0.99.0 beta and change the toml. // TODO: Refactor into batch indexes instead. - // TODO: Refactor so all index methods take consistent arguments - // like NewEpisode.index wants Sqliteconnection where the other take a Database pub fn index(&self, con: &SqliteConnection) -> QueryResult<()> { use schema::episode::dsl::*; diff --git a/hammond-data/src/models/queryables.rs b/hammond-data/src/models/queryables.rs index a9eb1b0..da1c2f0 100644 --- a/hammond-data/src/models/queryables.rs +++ b/hammond-data/src/models/queryables.rs @@ -10,7 +10,6 @@ use feed::Feed; use errors::*; use models::insertables::NewPodcast; -use Database; use POOL; use std::io::Read; @@ -129,8 +128,8 @@ impl Episode { self.favorite = b } - pub fn save(&self, db: &Database) -> QueryResult { - let tempdb = db.lock().unwrap(); + pub fn save(&self) -> QueryResult { + let tempdb = POOL.clone().get().unwrap(); self.save_changes::(&*tempdb) } } @@ -226,8 +225,8 @@ impl Podcast { self.always_dl = b } - pub fn save(&self, db: &Database) -> QueryResult { - let tempdb = db.lock().unwrap(); + pub fn save(&self) -> QueryResult { + let tempdb = POOL.clone().get().unwrap(); self.save_changes::(&*tempdb) } } diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index 112ee11..d8715eb 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -8,12 +8,11 @@ use r2d2_diesel::ConnectionManager; use errors::*; use dbqueries; -use Database; +use POOL; use models::Episode; use std::path::Path; use std::fs; -use std::sync::Arc; use DB_PATH; @@ -46,16 +45,16 @@ pub fn establish_connection() -> SqliteConnection { } // TODO: Write unit test. -fn download_checker(db: &Database) -> Result<()> { +fn download_checker() -> Result<()> { let episodes = { - let tempdb = db.lock().unwrap(); + let tempdb = POOL.clone().get().unwrap(); dbqueries::get_downloaded_episodes(&tempdb)? }; episodes.into_par_iter().for_each(|mut ep| { if !Path::new(ep.local_uri().unwrap()).exists() { ep.set_local_uri(None); - let res = ep.save(&Arc::clone(db)); + let res = ep.save(); if let Err(err) = res { error!("Error while trying to update episode: {:#?}", ep); error!("Error: {}", err); @@ -67,10 +66,10 @@ fn download_checker(db: &Database) -> Result<()> { } // TODO: Write unit test. -fn played_cleaner(db: &Database) -> Result<()> { +fn played_cleaner() -> Result<()> { let episodes = { - let tempdb = db.lock().unwrap(); - dbqueries::get_played_episodes(&tempdb)? + let tempdb = POOL.clone().get().unwrap(); + dbqueries::get_played_episodes(&*tempdb)? }; let now_utc = Utc::now().timestamp() as i32; @@ -80,7 +79,7 @@ fn played_cleaner(db: &Database) -> Result<()> { // TODO: expose a config and a user set option. let limit = played + 172_800; // add 2days in seconds if now_utc > limit { - let e = delete_local_content(&Arc::clone(db), &mut ep); + let e = delete_local_content(&mut ep); if let Err(err) = e { error!("Error while trying to delete file: {:?}", ep.local_uri()); error!("Error: {}", err); @@ -94,14 +93,14 @@ fn played_cleaner(db: &Database) -> Result<()> { } // TODO: Write unit test. -pub fn delete_local_content(db: &Database, ep: &mut Episode) -> Result<()> { +pub fn delete_local_content(ep: &mut Episode) -> Result<()> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); if Path::new(&uri).exists() { let res = fs::remove_file(&uri); if res.is_ok() { ep.set_local_uri(None); - ep.save(db)?; + ep.save()?; } else { error!("Error while trying to delete file: {}", uri); error!("Error: {}", res.unwrap_err()); @@ -116,15 +115,15 @@ pub fn delete_local_content(db: &Database, ep: &mut Episode) -> Result<()> { Ok(()) } -pub fn set_played_now(db: &Database, ep: &mut Episode) -> Result<()> { +pub fn set_played_now(ep: &mut Episode) -> Result<()> { let epoch = Utc::now().timestamp() as i32; ep.set_played(Some(epoch)); - ep.save(db)?; + ep.save()?; Ok(()) } -pub fn checkup(db: &Database) -> Result<()> { - download_checker(db)?; - played_cleaner(db)?; +pub fn checkup() -> Result<()> { + download_checker()?; + played_cleaner()?; Ok(()) } diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 49037c8..2f1209a 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -10,7 +10,6 @@ use std::io::{BufWriter, Read, Write}; use std::path::Path; use errors::*; -use hammond_data::Database; use hammond_data::models::{Episode, Podcast}; use hammond_data::{DL_DIR, HAMMOND_CACHE}; @@ -118,7 +117,7 @@ pub fn get_download_folder(pd_title: &str) -> Result { } // TODO: Refactor -pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &str) -> Result<()> { +pub fn get_episode(ep: &mut Episode, download_folder: &str) -> Result<()> { // Check if its alrdy downloaded if ep.local_uri().is_some() { if Path::new(ep.local_uri().unwrap()).exists() { @@ -127,7 +126,7 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st // If the path is not valid, then set it to None. ep.set_local_uri(None); - ep.save(connection)?; + ep.save()?; }; let res = download_into(download_folder, ep.title().unwrap(), ep.uri()); @@ -135,7 +134,7 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st if let Ok(path) = res { // If download succedes set episode local_uri to dlpath. ep.set_local_uri(Some(&path)); - ep.save(connection)?; + ep.save()?; Ok(()) } else { error!("Something whent wrong while downloading.");