Removed the Database type alias.

This commit is contained in:
Jordan Petridis 2017-11-19 22:39:51 +02:00
parent 55442529a8
commit 47b32ac000
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
6 changed files with 32 additions and 43 deletions

View File

@ -2,7 +2,6 @@
use diesel::prelude::*;
use diesel;
use models::{Episode, Podcast, Source};
use Database;
use chrono::prelude::*;
/// Random db querries helper functions.
@ -13,7 +12,7 @@ use POOL;
pub fn get_sources() -> QueryResult<Vec<Source>> {
use schema::source::dsl::*;
let con = POOL.get().unwrap();
let con = POOL.clone().get().unwrap();
let s = source.load::<Source>(&*con);
// s.iter().for_each(|x| println!("{:#?}", x));
s
@ -22,14 +21,14 @@ pub fn get_sources() -> QueryResult<Vec<Source>> {
pub fn get_podcasts() -> QueryResult<Vec<Podcast>> {
use schema::podcast::dsl::*;
let con = POOL.get().unwrap();
let con = POOL.clone().get().unwrap();
podcast.load::<Podcast>(&*con)
}
pub fn get_episodes() -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*;
let con = POOL.get().unwrap();
let con = POOL.clone().get().unwrap();
episode.order(epoch.desc()).load::<Episode>(&*con)
}
@ -114,7 +113,7 @@ pub fn get_pd_episodes_limit(
pub fn get_source_from_uri(uri_: &str) -> QueryResult<Source> {
use schema::source::dsl::*;
let con = POOL.get().unwrap();
let con = POOL.clone().get().unwrap();
source.filter(uri.eq(uri_)).get_result::<Source>(&*con)
}
@ -130,13 +129,13 @@ pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<E
episode.filter(uri.eq(uri_)).get_result::<Episode>(con)
}
pub fn remove_feed(db: &Database, pd: &Podcast) -> QueryResult<usize> {
let tempdb = db.lock().unwrap();
pub fn remove_feed(pd: &Podcast) -> QueryResult<usize> {
let con = POOL.clone().get().unwrap();
tempdb.transaction(|| -> QueryResult<usize> {
delete_source(&tempdb, pd.source_id())?;
delete_podcast(&tempdb, *pd.id())?;
delete_podcast_episodes(&tempdb, *pd.id())
con.transaction(|| -> QueryResult<usize> {
delete_source(&*con, pd.source_id())?;
delete_podcast(&*con, *pd.id())?;
delete_podcast_episodes(&*con, *pd.id())
})
}

View File

@ -32,12 +32,7 @@ pub mod errors;
mod parser;
mod schema;
use diesel::prelude::*;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
pub type Database = Arc<Mutex<SqliteConnection>>;
lazy_static!{
#[allow(dead_code)]

View File

@ -61,8 +61,6 @@ impl<'a> NewEpisode<'a> {
// TODO: Currently using diesel from master git.
// Watch out for v0.99.0 beta and change the toml.
// TODO: Refactor into batch indexes instead.
// TODO: Refactor so all index methods take consistent arguments
// like NewEpisode.index wants Sqliteconnection where the other take a Database
pub fn index(&self, con: &SqliteConnection) -> QueryResult<()> {
use schema::episode::dsl::*;

View File

@ -10,7 +10,6 @@ use feed::Feed;
use errors::*;
use models::insertables::NewPodcast;
use Database;
use POOL;
use std::io::Read;
@ -129,8 +128,8 @@ impl Episode {
self.favorite = b
}
pub fn save(&self, db: &Database) -> QueryResult<Episode> {
let tempdb = db.lock().unwrap();
pub fn save(&self) -> QueryResult<Episode> {
let tempdb = POOL.clone().get().unwrap();
self.save_changes::<Episode>(&*tempdb)
}
}
@ -226,8 +225,8 @@ impl Podcast {
self.always_dl = b
}
pub fn save(&self, db: &Database) -> QueryResult<Podcast> {
let tempdb = db.lock().unwrap();
pub fn save(&self) -> QueryResult<Podcast> {
let tempdb = POOL.clone().get().unwrap();
self.save_changes::<Podcast>(&*tempdb)
}
}

View File

@ -8,12 +8,11 @@ use r2d2_diesel::ConnectionManager;
use errors::*;
use dbqueries;
use Database;
use POOL;
use models::Episode;
use std::path::Path;
use std::fs;
use std::sync::Arc;
use DB_PATH;
@ -46,16 +45,16 @@ pub fn establish_connection() -> SqliteConnection {
}
// TODO: Write unit test.
fn download_checker(db: &Database) -> Result<()> {
fn download_checker() -> Result<()> {
let episodes = {
let tempdb = db.lock().unwrap();
let tempdb = POOL.clone().get().unwrap();
dbqueries::get_downloaded_episodes(&tempdb)?
};
episodes.into_par_iter().for_each(|mut ep| {
if !Path::new(ep.local_uri().unwrap()).exists() {
ep.set_local_uri(None);
let res = ep.save(&Arc::clone(db));
let res = ep.save();
if let Err(err) = res {
error!("Error while trying to update episode: {:#?}", ep);
error!("Error: {}", err);
@ -67,10 +66,10 @@ fn download_checker(db: &Database) -> Result<()> {
}
// TODO: Write unit test.
fn played_cleaner(db: &Database) -> Result<()> {
fn played_cleaner() -> Result<()> {
let episodes = {
let tempdb = db.lock().unwrap();
dbqueries::get_played_episodes(&tempdb)?
let tempdb = POOL.clone().get().unwrap();
dbqueries::get_played_episodes(&*tempdb)?
};
let now_utc = Utc::now().timestamp() as i32;
@ -80,7 +79,7 @@ fn played_cleaner(db: &Database) -> Result<()> {
// TODO: expose a config and a user set option.
let limit = played + 172_800; // add 2days in seconds
if now_utc > limit {
let e = delete_local_content(&Arc::clone(db), &mut ep);
let e = delete_local_content(&mut ep);
if let Err(err) = e {
error!("Error while trying to delete file: {:?}", ep.local_uri());
error!("Error: {}", err);
@ -94,14 +93,14 @@ fn played_cleaner(db: &Database) -> Result<()> {
}
// TODO: Write unit test.
pub fn delete_local_content(db: &Database, ep: &mut Episode) -> Result<()> {
pub fn delete_local_content(ep: &mut Episode) -> Result<()> {
if ep.local_uri().is_some() {
let uri = ep.local_uri().unwrap().to_owned();
if Path::new(&uri).exists() {
let res = fs::remove_file(&uri);
if res.is_ok() {
ep.set_local_uri(None);
ep.save(db)?;
ep.save()?;
} else {
error!("Error while trying to delete file: {}", uri);
error!("Error: {}", res.unwrap_err());
@ -116,15 +115,15 @@ pub fn delete_local_content(db: &Database, ep: &mut Episode) -> Result<()> {
Ok(())
}
pub fn set_played_now(db: &Database, ep: &mut Episode) -> Result<()> {
pub fn set_played_now(ep: &mut Episode) -> Result<()> {
let epoch = Utc::now().timestamp() as i32;
ep.set_played(Some(epoch));
ep.save(db)?;
ep.save()?;
Ok(())
}
pub fn checkup(db: &Database) -> Result<()> {
download_checker(db)?;
played_cleaner(db)?;
pub fn checkup() -> Result<()> {
download_checker()?;
played_cleaner()?;
Ok(())
}

View File

@ -10,7 +10,6 @@ use std::io::{BufWriter, Read, Write};
use std::path::Path;
use errors::*;
use hammond_data::Database;
use hammond_data::models::{Episode, Podcast};
use hammond_data::{DL_DIR, HAMMOND_CACHE};
@ -118,7 +117,7 @@ pub fn get_download_folder(pd_title: &str) -> Result<String> {
}
// TODO: Refactor
pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &str) -> Result<()> {
pub fn get_episode(ep: &mut Episode, download_folder: &str) -> Result<()> {
// Check if its alrdy downloaded
if ep.local_uri().is_some() {
if Path::new(ep.local_uri().unwrap()).exists() {
@ -127,7 +126,7 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st
// If the path is not valid, then set it to None.
ep.set_local_uri(None);
ep.save(connection)?;
ep.save()?;
};
let res = download_into(download_folder, ep.title().unwrap(), ep.uri());
@ -135,7 +134,7 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st
if let Ok(path) = res {
// If download succedes set episode local_uri to dlpath.
ep.set_local_uri(Some(&path));
ep.save(connection)?;
ep.save()?;
Ok(())
} else {
error!("Something whent wrong while downloading.");