Partial cleanup of dbqueries.

This commit is contained in:
Jordan Petridis 2017-11-05 14:56:24 +02:00
parent 27c73e3d30
commit 7b5bca2162
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
6 changed files with 73 additions and 100 deletions

View File

@ -49,6 +49,8 @@ fn played_cleaner(db: &Database) -> Result<()> {
if let Err(err) = e { if let Err(err) = e {
error!("Error while trying to delete file: {:?}", ep.local_uri()); error!("Error while trying to delete file: {:?}", ep.local_uri());
error!("Error: {}", err); error!("Error: {}", err);
} else {
info!("Episode {:?} was deleted succesfully.", ep.title());
}; };
} }
} }

View File

@ -1,91 +1,82 @@
#![cfg_attr(feature = "cargo-clippy", allow(let_and_return))]
use diesel::prelude::*; use diesel::prelude::*;
use diesel; use diesel;
use models::{Episode, Podcast, Source}; use models::{Episode, Podcast, Source};
use index_feed::Database; use index_feed::Database;
use errors::*;
use chrono::prelude::*; use chrono::prelude::*;
/// Random db querries helper functions. /// Random db querries helper functions.
/// Probably needs cleanup. /// Probably needs cleanup.
pub fn get_sources(con: &SqliteConnection) -> QueryResult<Vec<Source>> { pub fn get_sources(con: &SqliteConnection) -> QueryResult<Vec<Source>> {
use schema::source::dsl::*; use schema::source::dsl::*;
let s = source.load::<Source>(con); source.load::<Source>(con)
s
} }
pub fn get_podcasts(con: &SqliteConnection) -> QueryResult<Vec<Podcast>> { pub fn get_podcasts(con: &SqliteConnection) -> QueryResult<Vec<Podcast>> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let pds = podcast.load::<Podcast>(con); podcast.load::<Podcast>(con)
pds
} }
pub fn get_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> { pub fn get_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = episode.order(epoch.desc()).load::<Episode>(con); episode.order(epoch.desc()).load::<Episode>(con)
eps
} }
pub fn get_downloaded_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> { pub fn get_downloaded_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = episode.filter(local_uri.is_not_null()).load::<Episode>(con); episode.filter(local_uri.is_not_null()).load::<Episode>(con)
eps
} }
pub fn get_played_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> { pub fn get_played_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = episode.filter(played.is_not_null()).load::<Episode>(con); episode.filter(played.is_not_null()).load::<Episode>(con)
eps
} }
pub fn get_episode(con: &SqliteConnection, ep_id: i32) -> QueryResult<Episode> { pub fn get_episode_from_id(con: &SqliteConnection, ep_id: i32) -> QueryResult<Episode> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let ep = episode.filter(id.eq(ep_id)).get_result::<Episode>(con); episode.filter(id.eq(ep_id)).get_result::<Episode>(con)
ep
} }
pub fn get_episode_local_uri(con: &SqliteConnection, ep_id: i32) -> QueryResult<Option<String>> { pub fn get_episode_local_uri_from_id(
con: &SqliteConnection,
ep_id: i32,
) -> QueryResult<Option<String>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let ep = episode episode
.filter(id.eq(ep_id)) .filter(id.eq(ep_id))
.select(local_uri) .select(local_uri)
.get_result::<Option<String>>(con); .get_result::<Option<String>>(con)
ep
} }
pub fn get_episodes_with_limit(con: &SqliteConnection, limit: u32) -> QueryResult<Vec<Episode>> { pub fn get_episodes_with_limit(con: &SqliteConnection, limit: u32) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = episode episode
.order(epoch.desc()) .order(epoch.desc())
.limit(i64::from(limit)) .limit(i64::from(limit))
.load::<Episode>(con); .load::<Episode>(con)
eps
} }
pub fn get_podcast_from_id(con: &SqliteConnection, pid: i32) -> QueryResult<Podcast> { pub fn get_podcast_from_id(con: &SqliteConnection, pid: i32) -> QueryResult<Podcast> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let pd = podcast.filter(id.eq(pid)).get_result::<Podcast>(con);
pd podcast.filter(id.eq(pid)).get_result::<Podcast>(con)
} }
pub fn get_pd_episodes(con: &SqliteConnection, parent: &Podcast) -> QueryResult<Vec<Episode>> { pub fn get_pd_episodes(con: &SqliteConnection, parent: &Podcast) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = Episode::belonging_to(parent) Episode::belonging_to(parent)
.order(epoch.desc()) .order(epoch.desc())
.load::<Episode>(con); .load::<Episode>(con)
eps
} }
pub fn get_pd_unplayed_episodes( pub fn get_pd_unplayed_episodes(
@ -94,11 +85,10 @@ pub fn get_pd_unplayed_episodes(
) -> QueryResult<Vec<Episode>> { ) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = Episode::belonging_to(parent) Episode::belonging_to(parent)
.filter(played.is_null()) .filter(played.is_null())
.order(epoch.desc()) .order(epoch.desc())
.load::<Episode>(con); .load::<Episode>(con)
eps
} }
pub fn get_pd_episodes_limit( pub fn get_pd_episodes_limit(
@ -108,79 +98,71 @@ pub fn get_pd_episodes_limit(
) -> QueryResult<Vec<Episode>> { ) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let eps = Episode::belonging_to(parent) Episode::belonging_to(parent)
.order(epoch.desc()) .order(epoch.desc())
.limit(i64::from(limit)) .limit(i64::from(limit))
.load::<Episode>(con); .load::<Episode>(con)
eps
} }
pub fn load_source_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<Source> { pub fn get_source_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<Source> {
use schema::source::dsl::*; use schema::source::dsl::*;
let s = source.filter(uri.eq(uri_)).get_result::<Source>(con); source.filter(uri.eq(uri_)).get_result::<Source>(con)
s
} }
pub fn load_podcast_from_title(con: &SqliteConnection, title_: &str) -> QueryResult<Podcast> { pub fn get_podcast_from_title(con: &SqliteConnection, title_: &str) -> QueryResult<Podcast> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let pd = podcast.filter(title.eq(title_)).get_result::<Podcast>(con); podcast.filter(title.eq(title_)).get_result::<Podcast>(con)
pd
} }
pub fn load_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<Episode> { pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<Episode> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let ep = episode.filter(uri.eq(uri_)).get_result::<Episode>(con); episode.filter(uri.eq(uri_)).get_result::<Episode>(con)
ep
} }
pub fn remove_feed(db: &Database, pd: &Podcast) -> Result<()> { pub fn remove_feed(db: &Database, pd: &Podcast) -> QueryResult<usize> {
let s_id = pd.source_id();
let pd_id = pd.id();
let tempdb = db.lock().unwrap(); let tempdb = db.lock().unwrap();
tempdb.transaction(|| -> Result<()> { tempdb.transaction(|| -> QueryResult<usize> {
delete_source(&tempdb, s_id)?; delete_source(&tempdb, pd.source_id())?;
delete_podcast(&tempdb, pd_id)?; delete_podcast(&tempdb, pd.id())?;
delete_podcast_episodes(&tempdb, pd_id)?; delete_podcast_episodes(&tempdb, pd.id())
Ok(()) })
})?;
Ok(())
} }
pub fn delete_source(connection: &SqliteConnection, source_id: i32) -> Result<()> { pub fn delete_source(connection: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
use schema::source::dsl::*; use schema::source::dsl::*;
diesel::delete(source.filter(id.eq(source_id))).execute(connection)?; diesel::delete(source.filter(id.eq(source_id))).execute(connection)
Ok(())
} }
pub fn delete_podcast(connection: &SqliteConnection, podcast_id: i32) -> Result<()> { pub fn delete_podcast(connection: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
diesel::delete(podcast.filter(id.eq(podcast_id))).execute(connection)?; diesel::delete(podcast.filter(id.eq(podcast_id))).execute(connection)
Ok(())
} }
pub fn delete_podcast_episodes(connection: &SqliteConnection, parent_id: i32) -> Result<()> { pub fn delete_podcast_episodes(
connection: &SqliteConnection,
parent_id: i32,
) -> QueryResult<usize> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(connection)?; diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(connection)
Ok(())
} }
pub fn update_none_to_played_now(connection: &SqliteConnection, parent: &Podcast) -> Result<()> { pub fn update_none_to_played_now(
connection: &SqliteConnection,
parent: &Podcast,
) -> QueryResult<usize> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let epoch_now = Utc::now().timestamp() as i32; let epoch_now = Utc::now().timestamp() as i32;
connection.transaction(|| -> Result<()> { connection.transaction(|| -> QueryResult<usize> {
diesel::update(Episode::belonging_to(parent).filter(played.is_null())) diesel::update(Episode::belonging_to(parent).filter(played.is_null()))
.set(played.eq(Some(epoch_now))) .set(played.eq(Some(epoch_now)))
.execute(connection)?; .execute(connection)
Ok(()) })
})?;
Ok(())
} }

View File

@ -1,5 +1,3 @@
#![allow(dead_code)]
use diesel::prelude::*; use diesel::prelude::*;
use diesel; use diesel;
use rss; use rss;
@ -19,20 +17,15 @@ pub struct Feed(pub reqwest::Response, pub Source);
pub type Database = Arc<Mutex<SqliteConnection>>; pub type Database = Arc<Mutex<SqliteConnection>>;
fn index_source(con: &SqliteConnection, foo: &NewSource) -> Result<()> { fn index_source(con: &SqliteConnection, foo: &NewSource) -> QueryResult<usize> {
match dbqueries::load_source_from_uri(con, foo.uri) { match dbqueries::get_source_from_uri(con, foo.uri) {
Ok(_) => Ok(()), Ok(_) => Ok(1),
Err(_) => { Err(_) => diesel::insert(foo).into(schema::source::table).execute(con),
diesel::insert(foo)
.into(schema::source::table)
.execute(con)?;
Ok(())
}
} }
} }
fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> { fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> {
match dbqueries::load_podcast_from_title(con, &pd.title) { match dbqueries::get_podcast_from_title(con, &pd.title) {
Ok(mut foo) => if foo.link() != pd.link || foo.description() != pd.description { Ok(mut foo) => if foo.link() != pd.link || foo.description() != pd.description {
foo.set_link(&pd.link); foo.set_link(&pd.link);
foo.set_description(&pd.description); foo.set_description(&pd.description);
@ -49,7 +42,7 @@ fn index_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<()> {
} }
fn index_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<()> { fn index_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<()> {
match dbqueries::load_episode_from_uri(con, ep.uri.unwrap()) { match dbqueries::get_episode_from_uri(con, ep.uri.unwrap()) {
Ok(mut foo) => if foo.title() != ep.title Ok(mut foo) => if foo.title() != ep.title
|| foo.published_date() != ep.published_date.as_ref().map(|x| x.as_str()) || foo.published_date() != ep.published_date.as_ref().map(|x| x.as_str())
{ {
@ -74,20 +67,20 @@ pub fn insert_return_source(con: &SqliteConnection, url: &str) -> Result<Source>
let foo = NewSource::new_with_uri(url); let foo = NewSource::new_with_uri(url);
index_source(con, &foo)?; index_source(con, &foo)?;
Ok(dbqueries::load_source_from_uri(con, foo.uri)?) Ok(dbqueries::get_source_from_uri(con, foo.uri)?)
} }
fn insert_return_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<Podcast> { fn insert_return_podcast(con: &SqliteConnection, pd: &NewPodcast) -> Result<Podcast> {
index_podcast(con, pd)?; index_podcast(con, pd)?;
Ok(dbqueries::load_podcast_from_title(con, &pd.title)?) Ok(dbqueries::get_podcast_from_title(con, &pd.title)?)
} }
fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Episode> { // fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Episode> {
index_episode(con, ep)?; // index_episode(con, ep)?;
Ok(dbqueries::load_episode_from_uri(con, ep.uri.unwrap())?) // Ok(dbqueries::get_episode_from_uri(con, ep.uri.unwrap())?)
} // }
pub fn full_index_loop(db: &Database) -> Result<()> { pub fn full_index_loop(db: &Database) -> Result<()> {
let mut f = fetch_all_feeds(db)?; let mut f = fetch_all_feeds(db)?;
@ -120,9 +113,7 @@ pub fn complete_index_from_source(
req.read_to_string(&mut buf)?; req.read_to_string(&mut buf)?;
let chan = rss::Channel::from_str(&buf)?; let chan = rss::Channel::from_str(&buf)?;
complete_index(db, &chan, source)?; complete_index(db, &chan, source)
Ok(())
} }
fn complete_index(db: &Database, chan: &rss::Channel, parent: &Source) -> Result<()> { fn complete_index(db: &Database, chan: &rss::Channel, parent: &Source) -> Result<()> {
@ -137,8 +128,7 @@ fn complete_index(db: &Database, chan: &rss::Channel, parent: &Source) -> Result
fn index_channel(con: &SqliteConnection, chan: &rss::Channel, parent: &Source) -> Result<Podcast> { fn index_channel(con: &SqliteConnection, chan: &rss::Channel, parent: &Source) -> Result<Podcast> {
let pd = feedparser::parse_podcast(chan, parent.id()); let pd = feedparser::parse_podcast(chan, parent.id());
// Convert NewPodcast to Podcast // Convert NewPodcast to Podcast
let pd = insert_return_podcast(con, &pd)?; insert_return_podcast(con, &pd)
Ok(pd)
} }
fn index_channel_items(db: &Database, it: &[rss::Item], pd: &Podcast) { fn index_channel_items(db: &Database, it: &[rss::Item], pd: &Podcast) {
@ -280,7 +270,7 @@ mod tests {
inpt.iter().for_each(|feed| { inpt.iter().for_each(|feed| {
let tempdb = db.lock().unwrap(); let tempdb = db.lock().unwrap();
index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap() index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap();
}); });
full_index_loop(&db).unwrap(); full_index_loop(&db).unwrap();

View File

@ -1,6 +1,5 @@
#![recursion_limit = "1024"] #![recursion_limit = "1024"]
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))] #![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
#![allow(dead_code)]
#[macro_use] #[macro_use]
extern crate error_chain; extern crate error_chain;
@ -39,12 +38,12 @@ use std::path::PathBuf;
embed_migrations!("migrations/"); embed_migrations!("migrations/");
lazy_static!{ lazy_static!{
#[allow(dead_code)]
static ref HAMMOND_XDG: xdg::BaseDirectories = { static ref HAMMOND_XDG: xdg::BaseDirectories = {
xdg::BaseDirectories::with_prefix("Hammond").unwrap() xdg::BaseDirectories::with_prefix("Hammond").unwrap()
}; };
static ref HAMMOND_DATA: PathBuf = { static ref _HAMMOND_DATA: PathBuf = {
HAMMOND_XDG.create_data_directory(HAMMOND_XDG.get_data_home()).unwrap() HAMMOND_XDG.create_data_directory(HAMMOND_XDG.get_data_home()).unwrap()
}; };

View File

@ -145,5 +145,5 @@ fn init_flowbox(db: &Database, stack: &gtk::Stack, flowbox: &gtk::FlowBox) {
on_flowbox_child_activate(&db, &stack, &parent); on_flowbox_child_activate(&db, &stack, &parent);
})); }));
// Populate the flowbox with the Podcasts. // Populate the flowbox with the Podcasts.
populate_flowbox(db, stack, &flowbox); populate_flowbox(db, stack, flowbox);
} }

View File

@ -152,7 +152,7 @@ fn on_download_clicked(
fn on_play_bttn_clicked(db: &Database, episode_id: i32) { fn on_play_bttn_clicked(db: &Database, episode_id: i32) {
let local_uri = { let local_uri = {
let tempdb = db.lock().unwrap(); let tempdb = db.lock().unwrap();
dbqueries::get_episode_local_uri(&tempdb, episode_id).unwrap() dbqueries::get_episode_local_uri_from_id(&tempdb, episode_id).unwrap()
}; };
if let Some(uri) = local_uri { if let Some(uri) = local_uri {
@ -175,7 +175,7 @@ fn on_play_bttn_clicked(db: &Database, episode_id: i32) {
fn on_delete_bttn_clicked(db: &Database, episode_id: i32) { fn on_delete_bttn_clicked(db: &Database, episode_id: i32) {
let mut ep = { let mut ep = {
let tempdb = db.lock().unwrap(); let tempdb = db.lock().unwrap();
dbqueries::get_episode(&tempdb, episode_id).unwrap() dbqueries::get_episode_from_id(&tempdb, episode_id).unwrap()
}; };
let e = delete_local_content(db, &mut ep); let e = delete_local_content(db, &mut ep);