hammond-data: Rework the modules privacy.

This commit is contained in:
Jordan Petridis 2018-01-20 07:28:30 +02:00
parent 855b1517a7
commit 7d9781052f
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
7 changed files with 66 additions and 96 deletions

2
.gitignore vendored
View File

@ -1,4 +1,4 @@
/target/ target/
**/*.rs.bk **/*.rs.bk
Cargo.lock Cargo.lock
.vscode .vscode

View File

@ -6,30 +6,29 @@ use diesel::prelude::*;
use database::connection; use database::connection;
use errors::*; use errors::*;
use models::{Episode, EpisodeCleanerQuery, EpisodeMinimal, EpisodeWidgetQuery, NewEpisode, use models::*;
Podcast, PodcastCoverQuery, Source};
pub fn get_sources() -> Result<Vec<Source>> { pub fn get_sources() -> Result<Vec<Source>> {
use schema::source::dsl::*; use schema::source::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
source.load::<Source>(&*con).map_err(From::from) source.load::<Source>(&*con).map_err(From::from)
} }
pub fn get_podcasts() -> Result<Vec<Podcast>> { pub fn get_podcasts() -> Result<Vec<Podcast>> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
podcast.load::<Podcast>(&*con).map_err(From::from) podcast.load::<Podcast>(&*con).map_err(From::from)
} }
pub fn get_episodes() -> Result<Vec<Episode>> { pub fn get_episodes() -> Result<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
episode episode
.order(epoch.desc()) .order(epoch.desc())
.load::<Episode>(&*con) .load::<Episode>(&*con)
@ -38,9 +37,9 @@ pub fn get_episodes() -> Result<Vec<Episode>> {
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>> { pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
episode episode
.select((rowid, local_uri, played)) .select((rowid, local_uri, played))
.filter(local_uri.is_not_null()) .filter(local_uri.is_not_null())
@ -48,22 +47,22 @@ pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
.map_err(From::from) .map_err(From::from)
} }
pub fn get_played_episodes() -> Result<Vec<Episode>> { // pub(crate) fn get_played_episodes() -> Result<Vec<Episode>> {
use schema::episode::dsl::*; // use schema::episode::dsl::*;
// let db = connection();
// let con = db.get()?;
// episode
// .filter(played.is_not_null())
// .load::<Episode>(&*con)
// .map_err(From::from)
// }
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
episode
.filter(played.is_not_null())
.load::<Episode>(&*con)
.map_err(From::from)
}
pub fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
use schema::episode::dsl::*;
let db = connection();
let con = db.get()?;
episode episode
.select((rowid, local_uri, played)) .select((rowid, local_uri, played))
.filter(played.is_not_null()) .filter(played.is_not_null())
@ -73,9 +72,9 @@ pub fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode> { pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
episode episode
.filter(rowid.eq(ep_id)) .filter(rowid.eq(ep_id))
.get_result::<Episode>(&*con) .get_result::<Episode>(&*con)
@ -84,7 +83,6 @@ pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode> {
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> { pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -95,22 +93,8 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
.map_err(From::from) .map_err(From::from)
} }
pub fn get_episodes_with_limit(limit: u32) -> Result<Vec<Episode>> {
use schema::episode::dsl::*;
let db = connection();
let con = db.get()?;
episode
.order(epoch.desc())
.limit(i64::from(limit))
.load::<Episode>(&*con)
.map_err(From::from)
}
pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQuery>> { pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQuery>> {
use schema::episode; use schema::episode;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -134,9 +118,9 @@ pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQu
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast> { pub fn get_podcast_from_id(pid: i32) -> Result<Podcast> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
podcast podcast
.filter(id.eq(pid)) .filter(id.eq(pid))
.get_result::<Podcast>(&*con) .get_result::<Podcast>(&*con)
@ -145,9 +129,9 @@ pub fn get_podcast_from_id(pid: i32) -> Result<Podcast> {
pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery> { pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
podcast podcast
.select((id, title, image_uri)) .select((id, title, image_uri))
.filter(id.eq(pid)) .filter(id.eq(pid))
@ -157,7 +141,6 @@ pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery> {
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>> { pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -169,7 +152,6 @@ pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>> { pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -183,7 +165,6 @@ pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> { pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -194,45 +175,35 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
.map_err(From::from) .map_err(From::from)
} }
pub fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> Result<Vec<Episode>> { // pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> Result<Vec<Episode>> {
use schema::episode::dsl::*; // use schema::episode::dsl::*;
let db = connection(); // let db = connection();
let con = db.get()?; // let con = db.get()?;
Episode::belonging_to(parent) // Episode::belonging_to(parent)
.order(epoch.desc()) // .order(epoch.desc())
.limit(i64::from(limit)) // .limit(i64::from(limit))
.load::<Episode>(&*con) // .load::<Episode>(&*con)
.map_err(From::from) // .map_err(From::from)
} // }
pub fn get_source_from_uri(uri_: &str) -> Result<Source> { pub fn get_source_from_uri(uri_: &str) -> Result<Source> {
use schema::source::dsl::*; use schema::source::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
source source
.filter(uri.eq(uri_)) .filter(uri.eq(uri_))
.get_result::<Source>(&*con) .get_result::<Source>(&*con)
.map_err(From::from) .map_err(From::from)
} }
// pub fn get_podcast_from_title(title_: &str) -> QueryResult<Podcast> {
// use schema::podcast::dsl::*;
// let db = connection();
// let con = db.get()?;
// podcast
// .filter(title.eq(title_))
// .get_result::<Podcast>(&*con)
// }
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast> { pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
podcast podcast
.filter(source_id.eq(sid)) .filter(source_id.eq(sid))
.get_result::<Podcast>(&*con) .get_result::<Podcast>(&*con)
@ -251,7 +222,7 @@ pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode> {
.map_err(From::from) .map_err(From::from)
} }
pub fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result<EpisodeMinimal> { pub(crate) fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result<EpisodeMinimal> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -264,7 +235,7 @@ pub fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result<EpisodeMini
.map_err(From::from) .map_err(From::from)
} }
pub fn remove_feed(pd: &Podcast) -> Result<()> { pub(crate) fn remove_feed(pd: &Podcast) -> Result<()> {
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -277,25 +248,25 @@ pub fn remove_feed(pd: &Podcast) -> Result<()> {
}) })
} }
pub fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> { fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
use schema::source::dsl::*; use schema::source::dsl::*;
diesel::delete(source.filter(id.eq(source_id))).execute(&*con) diesel::delete(source.filter(id.eq(source_id))).execute(&*con)
} }
pub fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> { fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
diesel::delete(podcast.filter(id.eq(podcast_id))).execute(&*con) diesel::delete(podcast.filter(id.eq(podcast_id))).execute(&*con)
} }
pub fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> { fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(&*con) diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(&*con)
} }
pub fn podcast_exists(source_id_: i32) -> Result<bool> { pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool> {
use diesel::dsl::exists; use diesel::dsl::exists;
use diesel::select; use diesel::select;
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
@ -309,7 +280,7 @@ pub fn podcast_exists(source_id_: i32) -> Result<bool> {
} }
#[cfg_attr(rustfmt, rustfmt_skip)] #[cfg_attr(rustfmt, rustfmt_skip)]
pub fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool> { pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
use diesel::select; use diesel::select;
use diesel::dsl::exists; use diesel::dsl::exists;
@ -336,7 +307,6 @@ pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> {
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize> { pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;

View File

@ -18,28 +18,22 @@
unconditional_recursion, unions_with_drop_fields, unused_allocation, unused_comparisons, unconditional_recursion, unions_with_drop_fields, unused_allocation, unused_comparisons,
unused_parens, while_true)] unused_parens, while_true)]
#![deny(missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts)] #![deny(missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts)]
// FIXME: uncomment #![deny(unused_extern_crates, unused)]
// #![deny(unused_extern_crates, unused)]
// #![feature(conservative_impl_trait)] // #![feature(conservative_impl_trait)]
#[macro_use] #[macro_use]
extern crate error_chain; extern crate derive_builder;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use] #[macro_use]
extern crate diesel; extern crate diesel;
#[macro_use] #[macro_use]
extern crate diesel_migrations; extern crate diesel_migrations;
#[macro_use] #[macro_use]
extern crate derive_builder; extern crate error_chain;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate ammonia; extern crate ammonia;
extern crate chrono; extern crate chrono;
@ -63,15 +57,15 @@ pub mod dbqueries;
#[allow(missing_docs)] #[allow(missing_docs)]
pub mod errors; pub mod errors;
pub mod utils; pub mod utils;
pub mod feed;
pub mod database; pub mod database;
pub mod pipeline; pub mod pipeline;
pub(crate) mod models; pub(crate) mod models;
mod feed;
mod parser; mod parser;
mod schema; mod schema;
pub use feed::Feed;
pub use models::{Episode, EpisodeWidgetQuery, Podcast, PodcastCoverQuery, Source}; pub use models::{Episode, EpisodeWidgetQuery, Podcast, PodcastCoverQuery, Source};
// pub use feed::Feed;
/// [XDG Base Direcotory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths. /// [XDG Base Direcotory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths.
#[allow(missing_debug_implementations)] #[allow(missing_debug_implementations)]

View File

@ -1,20 +1,25 @@
pub(crate) mod new_episode; mod new_episode;
pub(crate) mod new_podcast; mod new_podcast;
pub(crate) mod new_source; mod new_source;
pub(crate) mod episode; mod episode;
pub(crate) mod podcast; mod podcast;
pub(crate) mod source; mod source;
// use futures::prelude::*; // use futures::prelude::*;
// use futures::future::*; // use futures::future::*;
pub(crate) use self::episode::EpisodeCleanerQuery;
pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal}; pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal};
pub(crate) use self::new_podcast::NewPodcast; pub(crate) use self::new_podcast::NewPodcast;
pub(crate) use self::new_source::NewSource; pub(crate) use self::new_source::NewSource;
#[cfg(test)]
pub(crate) use self::new_episode::NewEpisodeBuilder;
#[cfg(test)]
pub(crate) use self::new_podcast::NewPodcastBuilder;
pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery}; pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery};
pub(crate) use self::episode::EpisodeCleanerQuery;
pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::podcast::{Podcast, PodcastCoverQuery};
pub use self::source::Source; pub use self::source::Source;

View File

@ -31,8 +31,8 @@ pub(crate) fn parse_itunes_duration(item: &Item) -> Option<i32> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use models::new_episode::{NewEpisode, NewEpisodeBuilder}; use models::{NewEpisode, NewEpisodeBuilder};
use models::new_podcast::{NewPodcast, NewPodcastBuilder}; use models::{NewPodcast, NewPodcastBuilder};
use rss::{Channel, ItemBuilder}; use rss::{Channel, ItemBuilder};
use rss::extension::itunes::ITunesItemExtensionBuilder; use rss::extension::itunes::ITunesItemExtensionBuilder;

View File

@ -14,7 +14,6 @@ use Source;
use dbqueries; use dbqueries;
use errors::*; use errors::*;
use models::{IndexState, NewEpisode, NewEpisodeMinimal}; use models::{IndexState, NewEpisode, NewEpisodeMinimal};
// use models::new_episode::NewEpisodeMinimal;
// use Feed; // use Feed;
use std; use std;

View File

@ -153,8 +153,10 @@ mod tests {
use self::tempdir::TempDir; use self::tempdir::TempDir;
use super::*; use super::*;
use database::truncate_db; use database::truncate_db;
use models::new_episode::NewEpisodeBuilder; use models::NewEpisodeBuilder;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;