From beaeeffba827aa1921685b488450e7e0b2f4808a Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 19 Nov 2017 23:08:36 +0200 Subject: [PATCH] Refactored dbquerries. Refactored so dbquerries.rs module *should* be the only piece of code that interacts with a Database connection. Insert stuff will be moved too. --- hammond-data/src/dbqueries.rs | 113 +++++++++++++------------ hammond-data/src/models/insertables.rs | 10 +-- hammond-data/src/utils.rs | 11 +-- 3 files changed, 65 insertions(+), 69 deletions(-) diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 101fdba..18bdbc1 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -13,9 +13,7 @@ pub fn get_sources() -> QueryResult> { use schema::source::dsl::*; let con = POOL.clone().get().unwrap(); - let s = source.load::(&*con); - // s.iter().for_each(|x| println!("{:#?}", x)); - s + source.load::(&*con) } pub fn get_podcasts() -> QueryResult> { @@ -32,82 +30,88 @@ pub fn get_episodes() -> QueryResult> { episode.order(epoch.desc()).load::(&*con) } -pub fn get_downloaded_episodes(con: &SqliteConnection) -> QueryResult> { +pub fn get_downloaded_episodes() -> QueryResult> { use schema::episode::dsl::*; - episode.filter(local_uri.is_not_null()).load::(con) + let con = POOL.clone().get().unwrap(); + episode + .filter(local_uri.is_not_null()) + .load::(&*con) } -pub fn get_played_episodes(con: &SqliteConnection) -> QueryResult> { +pub fn get_played_episodes() -> QueryResult> { use schema::episode::dsl::*; - episode.filter(played.is_not_null()).load::(con) + let con = POOL.clone().get().unwrap(); + episode.filter(played.is_not_null()).load::(&*con) } -pub fn get_episode_from_id(con: &SqliteConnection, ep_id: i32) -> QueryResult { +pub fn get_episode_from_id(ep_id: i32) -> QueryResult { use schema::episode::dsl::*; - episode.filter(id.eq(ep_id)).get_result::(con) + let con = POOL.clone().get().unwrap(); + episode.filter(id.eq(ep_id)).get_result::(&*con) } -pub fn get_episode_local_uri_from_id( - con: &SqliteConnection, - ep_id: i32, -) -> QueryResult> { +pub fn get_episode_local_uri_from_id(ep_id: i32) -> QueryResult> { use schema::episode::dsl::*; + let con = POOL.clone().get().unwrap(); + episode .filter(id.eq(ep_id)) .select(local_uri) - .get_result::>(con) + .get_result::>(&*con) } -pub fn get_episodes_with_limit(con: &SqliteConnection, limit: u32) -> QueryResult> { +pub fn get_episodes_with_limit(limit: u32) -> QueryResult> { use schema::episode::dsl::*; + let con = POOL.clone().get().unwrap(); + episode .order(epoch.desc()) .limit(i64::from(limit)) - .load::(con) + .load::(&*con) } -pub fn get_podcast_from_id(con: &SqliteConnection, pid: i32) -> QueryResult { +pub fn get_podcast_from_id(pid: i32) -> QueryResult { use schema::podcast::dsl::*; - podcast.filter(id.eq(pid)).get_result::(con) + let con = POOL.clone().get().unwrap(); + podcast.filter(id.eq(pid)).get_result::(&*con) } -pub fn get_pd_episodes(con: &SqliteConnection, parent: &Podcast) -> QueryResult> { +pub fn get_pd_episodes(parent: &Podcast) -> QueryResult> { use schema::episode::dsl::*; + let con = POOL.clone().get().unwrap(); + Episode::belonging_to(parent) .order(epoch.desc()) - .load::(con) + .load::(&*con) } -pub fn get_pd_unplayed_episodes( - con: &SqliteConnection, - parent: &Podcast, -) -> QueryResult> { +pub fn get_pd_unplayed_episodes(parent: &Podcast) -> QueryResult> { use schema::episode::dsl::*; + let con = POOL.clone().get().unwrap(); + Episode::belonging_to(parent) .filter(played.is_null()) .order(epoch.desc()) - .load::(con) + .load::(&*con) } -pub fn get_pd_episodes_limit( - con: &SqliteConnection, - parent: &Podcast, - limit: u32, -) -> QueryResult> { +pub fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> QueryResult> { use schema::episode::dsl::*; + let con = POOL.clone().get().unwrap(); + Episode::belonging_to(parent) .order(epoch.desc()) .limit(i64::from(limit)) - .load::(con) + .load::(&*con) } pub fn get_source_from_uri(uri_: &str) -> QueryResult { @@ -117,59 +121,62 @@ pub fn get_source_from_uri(uri_: &str) -> QueryResult { source.filter(uri.eq(uri_)).get_result::(&*con) } -pub fn get_podcast_from_title(con: &SqliteConnection, title_: &str) -> QueryResult { +pub fn get_podcast_from_title(title_: &str) -> QueryResult { use schema::podcast::dsl::*; - podcast.filter(title.eq(title_)).get_result::(con) + let con = POOL.clone().get().unwrap(); + podcast + .filter(title.eq(title_)) + .get_result::(&*con) } -pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult { +pub fn get_episode_from_uri(uri_: &str) -> QueryResult { use schema::episode::dsl::*; - episode.filter(uri.eq(uri_)).get_result::(con) + let con = POOL.clone().get().unwrap(); + episode.filter(uri.eq(uri_)).get_result::(&*con) } pub fn remove_feed(pd: &Podcast) -> QueryResult { let con = POOL.clone().get().unwrap(); con.transaction(|| -> QueryResult { - delete_source(&*con, pd.source_id())?; - delete_podcast(&*con, *pd.id())?; - delete_podcast_episodes(&*con, *pd.id()) + delete_source(pd.source_id())?; + delete_podcast(*pd.id())?; + delete_podcast_episodes(*pd.id()) }) } -pub fn delete_source(connection: &SqliteConnection, source_id: i32) -> QueryResult { +pub fn delete_source(source_id: i32) -> QueryResult { use schema::source::dsl::*; - diesel::delete(source.filter(id.eq(source_id))).execute(connection) + let con = POOL.clone().get().unwrap(); + diesel::delete(source.filter(id.eq(source_id))).execute(&*con) } -pub fn delete_podcast(connection: &SqliteConnection, podcast_id: i32) -> QueryResult { +pub fn delete_podcast(podcast_id: i32) -> QueryResult { use schema::podcast::dsl::*; - diesel::delete(podcast.filter(id.eq(podcast_id))).execute(connection) + let con = POOL.clone().get().unwrap(); + diesel::delete(podcast.filter(id.eq(podcast_id))).execute(&*con) } -pub fn delete_podcast_episodes( - connection: &SqliteConnection, - parent_id: i32, -) -> QueryResult { +pub fn delete_podcast_episodes(parent_id: i32) -> QueryResult { use schema::episode::dsl::*; - diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(connection) + let con = POOL.clone().get().unwrap(); + diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(&*con) } -pub fn update_none_to_played_now( - connection: &SqliteConnection, - parent: &Podcast, -) -> QueryResult { +pub fn update_none_to_played_now(parent: &Podcast) -> QueryResult { use schema::episode::dsl::*; + let con = POOL.clone().get().unwrap(); + let epoch_now = Utc::now().timestamp() as i32; - connection.transaction(|| -> QueryResult { + con.transaction(|| -> QueryResult { diesel::update(Episode::belonging_to(parent).filter(played.is_null())) .set(played.eq(Some(epoch_now))) - .execute(connection) + .execute(&*con) }) } diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index f4c16bc..69f7be6 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -67,7 +67,7 @@ impl<'a> NewEpisode<'a> { let ep = { // let tempdb = db.lock().unwrap(); // dbqueries::get_episode_from_uri(&tempdb, self.uri.unwrap()) - dbqueries::get_episode_from_uri(con, self.uri.unwrap()) + dbqueries::get_episode_from_uri(self.uri.unwrap()) }; match ep { @@ -103,16 +103,12 @@ impl NewPodcast { // Look out for when tryinto lands into stable. pub fn into_podcast(self) -> Result { self.index()?; - let tempdb = POOL.clone().get().unwrap(); - Ok(dbqueries::get_podcast_from_title(&*tempdb, &self.title)?) + Ok(dbqueries::get_podcast_from_title(&self.title)?) } fn index(&self) -> QueryResult<()> { use schema::podcast::dsl::*; - let pd = { - let tempdb = POOL.clone().get().unwrap(); - dbqueries::get_podcast_from_title(&*tempdb, &self.title) - }; + let pd = dbqueries::get_podcast_from_title(&self.title); match pd { Ok(foo) => if foo.link() != self.link { diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index d8715eb..324fee4 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -8,7 +8,6 @@ use r2d2_diesel::ConnectionManager; use errors::*; use dbqueries; -use POOL; use models::Episode; use std::path::Path; @@ -46,10 +45,7 @@ pub fn establish_connection() -> SqliteConnection { // TODO: Write unit test. fn download_checker() -> Result<()> { - let episodes = { - let tempdb = POOL.clone().get().unwrap(); - dbqueries::get_downloaded_episodes(&tempdb)? - }; + let episodes = dbqueries::get_downloaded_episodes()?; episodes.into_par_iter().for_each(|mut ep| { if !Path::new(ep.local_uri().unwrap()).exists() { @@ -67,10 +63,7 @@ fn download_checker() -> Result<()> { // TODO: Write unit test. fn played_cleaner() -> Result<()> { - let episodes = { - let tempdb = POOL.clone().get().unwrap(); - dbqueries::get_played_episodes(&*tempdb)? - }; + let episodes = dbqueries::get_played_episodes()?; let now_utc = Utc::now().timestamp() as i32; episodes.into_par_iter().for_each(|mut ep| {