Merge branch '39-failure-libs-migration' into 'master'
Resolve "Migrate Error-Handling to Failure crate." Closes #39 See merge request alatiera/Hammond!16
This commit is contained in:
commit
c1ecdbda52
7
Cargo.lock
generated
7
Cargo.lock
generated
@ -617,6 +617,8 @@ dependencies = [
|
|||||||
"diesel_migrations 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"diesel_migrations 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"dotenv 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"dotenv 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"futures 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
"futures 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"hyper 0.11.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
"hyper 0.11.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -628,7 +630,6 @@ dependencies = [
|
|||||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rayon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rayon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"reqwest 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rfc822_sanitizer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rfc822_sanitizer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rss 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rss 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -642,6 +643,8 @@ name = "hammond-downloader"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"hammond-data 0.1.0",
|
"hammond-data 0.1.0",
|
||||||
"hyper 0.11.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
"hyper 0.11.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -657,6 +660,8 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"dissolve 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"dissolve 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"gdk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gdk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"gdk-pixbuf 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gdk-pixbuf 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"gio 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gio 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
|||||||
@ -14,7 +14,6 @@ itertools = "0.7.6"
|
|||||||
lazy_static = "1.0.0"
|
lazy_static = "1.0.0"
|
||||||
log = "0.4.1"
|
log = "0.4.1"
|
||||||
rayon = "0.9.0"
|
rayon = "0.9.0"
|
||||||
reqwest = "0.8.4"
|
|
||||||
rfc822_sanitizer = "0.3.3"
|
rfc822_sanitizer = "0.3.3"
|
||||||
rss = "1.2.1"
|
rss = "1.2.1"
|
||||||
url = "1.6.0"
|
url = "1.6.0"
|
||||||
@ -26,6 +25,8 @@ hyper-tls = "0.1.2"
|
|||||||
native-tls = "0.1.5"
|
native-tls = "0.1.5"
|
||||||
futures-cpupool = "0.1.8"
|
futures-cpupool = "0.1.8"
|
||||||
num_cpus = "1.8.0"
|
num_cpus = "1.8.0"
|
||||||
|
failure = "0.1.1"
|
||||||
|
failure_derive = "0.1.1"
|
||||||
|
|
||||||
[dependencies.diesel]
|
[dependencies.diesel]
|
||||||
features = ["sqlite", "r2d2"]
|
features = ["sqlite", "r2d2"]
|
||||||
|
|||||||
@ -7,7 +7,7 @@ use diesel::r2d2::ConnectionManager;
|
|||||||
use std::io;
|
use std::io;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
|
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
use xdg_dirs;
|
use xdg_dirs;
|
||||||
@ -57,7 +57,7 @@ fn init_pool(db_path: &str) -> Pool {
|
|||||||
pool
|
pool
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_migration_on(connection: &SqliteConnection) -> Result<()> {
|
fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
|
||||||
info!("Running DB Migrations...");
|
info!("Running DB Migrations...");
|
||||||
// embedded_migrations::run(connection)?;
|
// embedded_migrations::run(connection)?;
|
||||||
embedded_migrations::run_with_output(connection, &mut io::stdout()).map_err(From::from)
|
embedded_migrations::run_with_output(connection, &mut io::stdout()).map_err(From::from)
|
||||||
@ -66,7 +66,7 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<()> {
|
|||||||
/// Reset the database into a clean state.
|
/// Reset the database into a clean state.
|
||||||
// Test share a Temp file db.
|
// Test share a Temp file db.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn truncate_db() -> Result<()> {
|
pub fn truncate_db() -> Result<(), DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
con.execute("DELETE FROM episode")?;
|
con.execute("DELETE FROM episode")?;
|
||||||
|
|||||||
@ -8,10 +8,10 @@ use diesel::dsl::exists;
|
|||||||
use diesel::select;
|
use diesel::select;
|
||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::*;
|
use models::*;
|
||||||
|
|
||||||
pub fn get_sources() -> Result<Vec<Source>> {
|
pub fn get_sources() -> Result<Vec<Source>, DataError> {
|
||||||
use schema::source::dsl::*;
|
use schema::source::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -22,7 +22,7 @@ pub fn get_sources() -> Result<Vec<Source>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcasts() -> Result<Vec<Podcast>> {
|
pub fn get_podcasts() -> Result<Vec<Podcast>, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -33,7 +33,7 @@ pub fn get_podcasts() -> Result<Vec<Podcast>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episodes() -> Result<Vec<Episode>> {
|
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -44,7 +44,7 @@ pub fn get_episodes() -> Result<Vec<Episode>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
|
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -56,7 +56,7 @@ pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>> {
|
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
||||||
// use schema::episode::dsl::*;
|
// use schema::episode::dsl::*;
|
||||||
|
|
||||||
// let db = connection();
|
// let db = connection();
|
||||||
@ -67,7 +67,7 @@ pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
|
|||||||
// .map_err(From::from)
|
// .map_err(From::from)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>> {
|
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -79,7 +79,7 @@ pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>>
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode> {
|
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -90,7 +90,7 @@ pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
|
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -102,7 +102,7 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQuery>> {
|
pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
||||||
use schema::episode;
|
use schema::episode;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -125,7 +125,7 @@ pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQu
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast> {
|
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -136,7 +136,7 @@ pub fn get_podcast_from_id(pid: i32) -> Result<Podcast> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery> {
|
pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -148,7 +148,7 @@ pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -159,7 +159,7 @@ pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>> {
|
pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -172,7 +172,7 @@ pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -184,8 +184,8 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> Result<Vec<Episode>> {
|
// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) ->
|
||||||
// use schema::episode::dsl::*;
|
// Result<Vec<Episode>, DataError> { use schema::episode::dsl::*;
|
||||||
|
|
||||||
// let db = connection();
|
// let db = connection();
|
||||||
// let con = db.get()?;
|
// let con = db.get()?;
|
||||||
@ -197,7 +197,7 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
|||||||
// .map_err(From::from)
|
// .map_err(From::from)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
pub fn get_source_from_uri(uri_: &str) -> Result<Source> {
|
pub fn get_source_from_uri(uri_: &str) -> Result<Source, DataError> {
|
||||||
use schema::source::dsl::*;
|
use schema::source::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -208,7 +208,7 @@ pub fn get_source_from_uri(uri_: &str) -> Result<Source> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_source_from_id(id_: i32) -> Result<Source> {
|
pub fn get_source_from_id(id_: i32) -> Result<Source, DataError> {
|
||||||
use schema::source::dsl::*;
|
use schema::source::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -219,7 +219,7 @@ pub fn get_source_from_id(id_: i32) -> Result<Source> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast> {
|
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -230,7 +230,7 @@ pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode> {
|
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -242,7 +242,10 @@ pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result<EpisodeMinimal> {
|
pub(crate) fn get_episode_minimal_from_pk(
|
||||||
|
title_: &str,
|
||||||
|
pid: i32,
|
||||||
|
) -> Result<EpisodeMinimal, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -255,11 +258,11 @@ pub(crate) fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result<Epis
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn remove_feed(pd: &Podcast) -> Result<()> {
|
pub(crate) fn remove_feed(pd: &Podcast) -> Result<(), DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
con.transaction(|| -> Result<()> {
|
con.transaction(|| {
|
||||||
delete_source(&con, pd.source_id())?;
|
delete_source(&con, pd.source_id())?;
|
||||||
delete_podcast(&con, pd.id())?;
|
delete_podcast(&con, pd.id())?;
|
||||||
delete_podcast_episodes(&con, pd.id())?;
|
delete_podcast_episodes(&con, pd.id())?;
|
||||||
@ -286,7 +289,7 @@ fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResul
|
|||||||
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con)
|
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn source_exists(url: &str) -> Result<bool> {
|
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||||
use schema::source::dsl::*;
|
use schema::source::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
@ -297,7 +300,7 @@ pub fn source_exists(url: &str) -> Result<bool> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool> {
|
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
@ -309,7 +312,7 @@ pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||||
pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool> {
|
pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
@ -320,7 +323,7 @@ pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> {
|
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -332,13 +335,13 @@ pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> {
|
|||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize> {
|
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
let epoch_now = Utc::now().timestamp() as i32;
|
let epoch_now = Utc::now().timestamp() as i32;
|
||||||
con.transaction(|| -> Result<usize> {
|
con.transaction(|| {
|
||||||
diesel::update(Episode::belonging_to(parent).filter(played.is_null()))
|
diesel::update(Episode::belonging_to(parent).filter(played.is_null()))
|
||||||
.set(played.eq(Some(epoch_now)))
|
.set(played.eq(Some(epoch_now)))
|
||||||
.execute(&con)
|
.execute(&con)
|
||||||
|
|||||||
@ -3,23 +3,105 @@ use diesel::r2d2;
|
|||||||
use diesel_migrations::RunMigrationsError;
|
use diesel_migrations::RunMigrationsError;
|
||||||
use hyper;
|
use hyper;
|
||||||
use native_tls;
|
use native_tls;
|
||||||
use reqwest;
|
// use rss;
|
||||||
use rss;
|
|
||||||
use url;
|
use url;
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
|
// use std::fmt;
|
||||||
|
|
||||||
error_chain! {
|
// fadsadfs NOT SYNC
|
||||||
foreign_links {
|
// #[derive(Fail, Debug)]
|
||||||
DieselResultError(diesel::result::Error);
|
// #[fail(display = "RSS Error: {}", _0)]
|
||||||
DieselMigrationError(RunMigrationsError);
|
// struct RSSError(rss::Error);
|
||||||
R2D2Error(r2d2::Error);
|
|
||||||
R2D2PoolError(r2d2::PoolError);
|
#[derive(Fail, Debug)]
|
||||||
RSSError(rss::Error);
|
pub enum DataError {
|
||||||
ReqError(reqwest::Error);
|
#[fail(display = "SQL Query failed: {}", _0)]
|
||||||
HyperError(hyper::Error);
|
DieselResultError(#[cause] diesel::result::Error),
|
||||||
UrlError(url::ParseError);
|
#[fail(display = "Database Migration error: {}", _0)]
|
||||||
TLSError(native_tls::Error);
|
DieselMigrationError(#[cause] RunMigrationsError),
|
||||||
IoError(io::Error);
|
#[fail(display = "R2D2 error: {}", _0)]
|
||||||
|
R2D2Error(#[cause] r2d2::Error),
|
||||||
|
#[fail(display = "R2D2 Pool error: {}", _0)]
|
||||||
|
R2D2PoolError(#[cause] r2d2::PoolError),
|
||||||
|
#[fail(display = "Hyper Error: {}", _0)]
|
||||||
|
HyperError(#[cause] hyper::Error),
|
||||||
|
#[fail(display = "Failed to parse a url: {}", _0)]
|
||||||
|
// TODO: print the url too
|
||||||
|
UrlError(#[cause] url::ParseError),
|
||||||
|
#[fail(display = "TLS Error: {}", _0)]
|
||||||
|
TLSError(#[cause] native_tls::Error),
|
||||||
|
#[fail(display = "IO Error: {}", _0)]
|
||||||
|
IOError(#[cause] io::Error),
|
||||||
|
#[fail(display = "RSS Error: {}", _0)]
|
||||||
|
// Rss::Error is not yet Sync
|
||||||
|
RssCrateError(String),
|
||||||
|
#[fail(display = "Error: {}", _0)]
|
||||||
|
Bail(String),
|
||||||
|
#[fail(display = "Request to {} returned {}. Context: {}", url, status_code, context)]
|
||||||
|
HttpStatusError {
|
||||||
|
url: String,
|
||||||
|
status_code: hyper::StatusCode,
|
||||||
|
context: String,
|
||||||
|
},
|
||||||
|
#[fail(display = "Error occured while Parsing an Episode. Reason: {}", reason)]
|
||||||
|
ParseEpisodeError { reason: String, parent_id: i32 },
|
||||||
|
#[fail(display = "No Futures where produced to be run.")]
|
||||||
|
EmptyFuturesList,
|
||||||
|
#[fail(display = "Episode was not changed and thus skipped.")]
|
||||||
|
EpisodeNotChanged,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<RunMigrationsError> for DataError {
|
||||||
|
fn from(err: RunMigrationsError) -> Self {
|
||||||
|
DataError::DieselMigrationError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<diesel::result::Error> for DataError {
|
||||||
|
fn from(err: diesel::result::Error) -> Self {
|
||||||
|
DataError::DieselResultError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<r2d2::Error> for DataError {
|
||||||
|
fn from(err: r2d2::Error) -> Self {
|
||||||
|
DataError::R2D2Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<r2d2::PoolError> for DataError {
|
||||||
|
fn from(err: r2d2::PoolError) -> Self {
|
||||||
|
DataError::R2D2PoolError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<hyper::Error> for DataError {
|
||||||
|
fn from(err: hyper::Error) -> Self {
|
||||||
|
DataError::HyperError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<url::ParseError> for DataError {
|
||||||
|
fn from(err: url::ParseError) -> Self {
|
||||||
|
DataError::UrlError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<native_tls::Error> for DataError {
|
||||||
|
fn from(err: native_tls::Error) -> Self {
|
||||||
|
DataError::TLSError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<io::Error> for DataError {
|
||||||
|
fn from(err: io::Error) -> Self {
|
||||||
|
DataError::IOError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for DataError {
|
||||||
|
fn from(err: String) -> Self {
|
||||||
|
DataError::Bail(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,7 @@ use itertools::{Either, Itertools};
|
|||||||
use rss;
|
use rss;
|
||||||
|
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::{Index, IndexState, Update};
|
use models::{Index, IndexState, Update};
|
||||||
use models::{NewEpisode, NewPodcast, Podcast};
|
use models::{NewEpisode, NewPodcast, Podcast};
|
||||||
use pipeline::*;
|
use pipeline::*;
|
||||||
@ -26,7 +26,7 @@ pub struct Feed {
|
|||||||
|
|
||||||
impl Feed {
|
impl Feed {
|
||||||
/// Index the contents of the RSS `Feed` into the database.
|
/// Index the contents of the RSS `Feed` into the database.
|
||||||
pub fn index(self) -> Box<Future<Item = (), Error = Error> + Send> {
|
pub fn index(self) -> Box<Future<Item = (), Error = DataError> + Send> {
|
||||||
let fut = self.parse_podcast_async()
|
let fut = self.parse_podcast_async()
|
||||||
.and_then(|pd| pd.to_podcast())
|
.and_then(|pd| pd.to_podcast())
|
||||||
.and_then(move |pd| self.index_channel_items(&pd));
|
.and_then(move |pd| self.index_channel_items(&pd));
|
||||||
@ -38,22 +38,25 @@ impl Feed {
|
|||||||
NewPodcast::new(&self.channel, self.source_id)
|
NewPodcast::new(&self.channel, self.source_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_podcast_async(&self) -> Box<Future<Item = NewPodcast, Error = Error> + Send> {
|
fn parse_podcast_async(&self) -> Box<Future<Item = NewPodcast, Error = DataError> + Send> {
|
||||||
Box::new(ok(self.parse_podcast()))
|
Box::new(ok(self.parse_podcast()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_channel_items(&self, pd: &Podcast) -> Box<Future<Item = (), Error = Error> + Send> {
|
fn index_channel_items(
|
||||||
|
&self,
|
||||||
|
pd: &Podcast,
|
||||||
|
) -> Box<Future<Item = (), Error = DataError> + Send> {
|
||||||
let fut = self.get_stuff(pd)
|
let fut = self.get_stuff(pd)
|
||||||
.and_then(|(insert, update)| {
|
.and_then(|(insert, update)| {
|
||||||
if !insert.is_empty() {
|
if !insert.is_empty() {
|
||||||
info!("Indexing {} episodes.", insert.len());
|
info!("Indexing {} episodes.", insert.len());
|
||||||
if let Err(err) = dbqueries::index_new_episodes(insert.as_slice()) {
|
if let Err(err) = dbqueries::index_new_episodes(insert.as_slice()) {
|
||||||
error!("Failed batch indexng, Fallign back to individual indexing.");
|
error!("Failed batch indexng, Fallign back to individual indexing.");
|
||||||
error!("Error: {}", err);
|
error!("{}", err);
|
||||||
insert.iter().for_each(|ep| {
|
insert.iter().for_each(|ep| {
|
||||||
if let Err(err) = ep.index() {
|
if let Err(err) = ep.index() {
|
||||||
error!("Failed to index episode: {:?}.", ep.title());
|
error!("Failed to index episode: {:?}.", ep.title());
|
||||||
error!("Error msg: {}", err);
|
error!("{}", err);
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -70,7 +73,7 @@ impl Feed {
|
|||||||
.for_each(|(ref ep, rowid)| {
|
.for_each(|(ref ep, rowid)| {
|
||||||
if let Err(err) = ep.update(rowid) {
|
if let Err(err) = ep.update(rowid) {
|
||||||
error!("Failed to index episode: {:?}.", ep.title());
|
error!("Failed to index episode: {:?}.", ep.title());
|
||||||
error!("Error msg: {}", err);
|
error!("{}", err);
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -79,7 +82,10 @@ impl Feed {
|
|||||||
Box::new(fut)
|
Box::new(fut)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_stuff(&self, pd: &Podcast) -> Box<Future<Item = InsertUpdate, Error = Error> + Send> {
|
fn get_stuff(
|
||||||
|
&self,
|
||||||
|
pd: &Podcast,
|
||||||
|
) -> Box<Future<Item = InsertUpdate, Error = DataError> + Send> {
|
||||||
let (insert, update): (Vec<_>, Vec<_>) = self.channel
|
let (insert, update): (Vec<_>, Vec<_>) = self.channel
|
||||||
.items()
|
.items()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -90,7 +96,7 @@ impl Feed {
|
|||||||
// I am not sure what the optimizations are on match vs allocating None.
|
// I am not sure what the optimizations are on match vs allocating None.
|
||||||
.map(|fut| {
|
.map(|fut| {
|
||||||
fut.and_then(|x| match x {
|
fut.and_then(|x| match x {
|
||||||
IndexState::NotChanged => bail!("Nothing to do here."),
|
IndexState::NotChanged => return Err(DataError::EpisodeNotChanged),
|
||||||
_ => Ok(x),
|
_ => Ok(x),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -6,9 +6,6 @@
|
|||||||
wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
|
wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
|
||||||
unicode_not_nfc, enum_glob_use, if_not_else, items_after_statements,
|
unicode_not_nfc, enum_glob_use, if_not_else, items_after_statements,
|
||||||
used_underscore_binding))]
|
used_underscore_binding))]
|
||||||
|
|
||||||
//! FIXME: Docs
|
|
||||||
|
|
||||||
#![allow(unknown_lints)]
|
#![allow(unknown_lints)]
|
||||||
#![deny(bad_style, const_err, dead_code, improper_ctypes, legacy_directory_ownership,
|
#![deny(bad_style, const_err, dead_code, improper_ctypes, legacy_directory_ownership,
|
||||||
non_shorthand_field_patterns, no_mangle_generic_items, overflowing_literals,
|
non_shorthand_field_patterns, no_mangle_generic_items, overflowing_literals,
|
||||||
@ -21,14 +18,18 @@
|
|||||||
|
|
||||||
// #![feature(conservative_impl_trait)]
|
// #![feature(conservative_impl_trait)]
|
||||||
|
|
||||||
|
//! FIXME: Docs
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate derive_builder;
|
extern crate derive_builder;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate diesel;
|
extern crate diesel;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate diesel_migrations;
|
extern crate diesel_migrations;
|
||||||
|
// #[macro_use]
|
||||||
|
extern crate failure;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate error_chain;
|
extern crate failure_derive;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
@ -44,7 +45,6 @@ extern crate itertools;
|
|||||||
extern crate native_tls;
|
extern crate native_tls;
|
||||||
extern crate num_cpus;
|
extern crate num_cpus;
|
||||||
extern crate rayon;
|
extern crate rayon;
|
||||||
extern crate reqwest;
|
|
||||||
extern crate rfc822_sanitizer;
|
extern crate rfc822_sanitizer;
|
||||||
extern crate rss;
|
extern crate rss;
|
||||||
extern crate tokio_core;
|
extern crate tokio_core;
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use diesel::SaveChangesDsl;
|
|||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::{Podcast, Save};
|
use models::{Podcast, Save};
|
||||||
use schema::episode;
|
use schema::episode;
|
||||||
|
|
||||||
@ -31,9 +31,9 @@ pub struct Episode {
|
|||||||
podcast_id: i32,
|
podcast_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<Episode> for Episode {
|
impl Save<Episode, DataError> for Episode {
|
||||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||||
fn save(&self) -> Result<Episode> {
|
fn save(&self) -> Result<Episode, DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let tempdb = db.get()?;
|
let tempdb = db.get()?;
|
||||||
|
|
||||||
@ -180,7 +180,7 @@ impl Episode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||||
pub fn set_played_now(&mut self) -> Result<()> {
|
pub fn set_played_now(&mut self) -> Result<(), DataError> {
|
||||||
let epoch = Utc::now().timestamp() as i32;
|
let epoch = Utc::now().timestamp() as i32;
|
||||||
self.set_played(Some(epoch));
|
self.set_played(Some(epoch));
|
||||||
self.save().map(|_| ())
|
self.save().map(|_| ())
|
||||||
@ -223,9 +223,9 @@ impl From<Episode> for EpisodeWidgetQuery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<usize> for EpisodeWidgetQuery {
|
impl Save<usize, DataError> for EpisodeWidgetQuery {
|
||||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||||
fn save(&self) -> Result<usize> {
|
fn save(&self) -> Result<usize, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
@ -342,7 +342,7 @@ impl EpisodeWidgetQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||||
pub fn set_played_now(&mut self) -> Result<()> {
|
pub fn set_played_now(&mut self) -> Result<(), DataError> {
|
||||||
let epoch = Utc::now().timestamp() as i32;
|
let epoch = Utc::now().timestamp() as i32;
|
||||||
self.set_played(Some(epoch));
|
self.set_played(Some(epoch));
|
||||||
self.save().map(|_| ())
|
self.save().map(|_| ())
|
||||||
@ -361,9 +361,9 @@ pub struct EpisodeCleanerQuery {
|
|||||||
played: Option<i32>,
|
played: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<usize> for EpisodeCleanerQuery {
|
impl Save<usize, DataError> for EpisodeCleanerQuery {
|
||||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||||
fn save(&self) -> Result<usize> {
|
fn save(&self) -> Result<usize, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
|
|||||||
@ -23,8 +23,6 @@ pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery};
|
|||||||
pub use self::podcast::{Podcast, PodcastCoverQuery};
|
pub use self::podcast::{Podcast, PodcastCoverQuery};
|
||||||
pub use self::source::Source;
|
pub use self::source::Source;
|
||||||
|
|
||||||
use errors::*;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum IndexState<T> {
|
pub enum IndexState<T> {
|
||||||
Index(T),
|
Index(T),
|
||||||
@ -32,20 +30,21 @@ pub enum IndexState<T> {
|
|||||||
NotChanged,
|
NotChanged,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Insert {
|
pub trait Insert<T, E> {
|
||||||
fn insert(&self) -> Result<()>;
|
fn insert(&self) -> Result<T, E>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Update {
|
pub trait Update<T, E> {
|
||||||
fn update(&self, i32) -> Result<()>;
|
fn update(&self, i32) -> Result<T, E>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Index: Insert + Update {
|
// This might need to change in the future
|
||||||
fn index(&self) -> Result<()>;
|
pub trait Index<T, E>: Insert<T, E> + Update<T, E> {
|
||||||
|
fn index(&self) -> Result<T, E>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// FIXME: DOCS
|
/// FIXME: DOCS
|
||||||
pub trait Save<T> {
|
pub trait Save<T, E> {
|
||||||
/// Helper method to easily save/"sync" current state of a diesel model to the Database.
|
/// Helper method to easily save/"sync" current state of a diesel model to the Database.
|
||||||
fn save(&self) -> Result<T>;
|
fn save(&self) -> Result<T, E>;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,17 +1,15 @@
|
|||||||
use diesel::prelude::*;
|
|
||||||
|
|
||||||
use diesel;
|
|
||||||
use schema::episode;
|
|
||||||
|
|
||||||
use ammonia;
|
use ammonia;
|
||||||
|
use diesel;
|
||||||
|
use diesel::prelude::*;
|
||||||
use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822;
|
use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822;
|
||||||
use rss;
|
use rss;
|
||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
use models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
||||||
use parser;
|
use parser;
|
||||||
|
use schema::episode;
|
||||||
use utils::{replace_extra_spaces, url_cleaner};
|
use utils::{replace_extra_spaces, url_cleaner};
|
||||||
|
|
||||||
#[derive(Insertable, AsChangeset)]
|
#[derive(Insertable, AsChangeset)]
|
||||||
@ -45,8 +43,8 @@ impl From<NewEpisodeMinimal> for NewEpisode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Insert for NewEpisode {
|
impl Insert<(), DataError> for NewEpisode {
|
||||||
fn insert(&self) -> Result<()> {
|
fn insert(&self) -> Result<(), DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -60,8 +58,8 @@ impl Insert for NewEpisode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Update for NewEpisode {
|
impl Update<(), DataError> for NewEpisode {
|
||||||
fn update(&self, episode_id: i32) -> Result<()> {
|
fn update(&self, episode_id: i32) -> Result<(), DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episode::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -75,9 +73,9 @@ impl Update for NewEpisode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Index for NewEpisode {
|
impl Index<(), DataError> for NewEpisode {
|
||||||
// Does not update the episode description if it's the only thing that has changed.
|
// Does not update the episode description if it's the only thing that has changed.
|
||||||
fn index(&self) -> Result<()> {
|
fn index(&self) -> Result<(), DataError> {
|
||||||
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
|
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
|
||||||
|
|
||||||
if exists {
|
if exists {
|
||||||
@ -115,14 +113,14 @@ impl PartialEq<Episode> for NewEpisode {
|
|||||||
impl NewEpisode {
|
impl NewEpisode {
|
||||||
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self> {
|
pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self, DataError> {
|
||||||
NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item))
|
NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn to_episode(&self) -> Result<Episode> {
|
pub(crate) fn to_episode(&self) -> Result<Episode, DataError> {
|
||||||
self.index()?;
|
self.index()?;
|
||||||
dbqueries::get_episode_from_pk(&self.title, self.podcast_id)
|
dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -184,9 +182,14 @@ impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl NewEpisodeMinimal {
|
impl NewEpisodeMinimal {
|
||||||
pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result<Self> {
|
pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result<Self, DataError> {
|
||||||
if item.title().is_none() {
|
if item.title().is_none() {
|
||||||
bail!("No title specified for the item.")
|
let err = DataError::ParseEpisodeError {
|
||||||
|
reason: format!("No title specified for this Episode."),
|
||||||
|
parent_id,
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
let title = item.title().unwrap().trim().to_owned();
|
let title = item.title().unwrap().trim().to_owned();
|
||||||
@ -197,7 +200,12 @@ impl NewEpisodeMinimal {
|
|||||||
} else if item.link().is_some() {
|
} else if item.link().is_some() {
|
||||||
item.link().map(|s| url_cleaner(s))
|
item.link().map(|s| url_cleaner(s))
|
||||||
} else {
|
} else {
|
||||||
bail!("No url specified for the item.")
|
let err = DataError::ParseEpisodeError {
|
||||||
|
reason: format!("No url specified for the item."),
|
||||||
|
parent_id,
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Default to rfc2822 represantation of epoch 0.
|
// Default to rfc2822 represantation of epoch 0.
|
||||||
|
|||||||
@ -4,6 +4,7 @@ use diesel::prelude::*;
|
|||||||
use ammonia;
|
use ammonia;
|
||||||
use rss;
|
use rss;
|
||||||
|
|
||||||
|
use errors::DataError;
|
||||||
use models::{Index, Insert, Update};
|
use models::{Index, Insert, Update};
|
||||||
use models::Podcast;
|
use models::Podcast;
|
||||||
use schema::podcast;
|
use schema::podcast;
|
||||||
@ -12,8 +13,6 @@ use database::connection;
|
|||||||
use dbqueries;
|
use dbqueries;
|
||||||
use utils::{replace_extra_spaces, url_cleaner};
|
use utils::{replace_extra_spaces, url_cleaner};
|
||||||
|
|
||||||
use errors::*;
|
|
||||||
|
|
||||||
#[derive(Insertable, AsChangeset)]
|
#[derive(Insertable, AsChangeset)]
|
||||||
#[table_name = "podcast"]
|
#[table_name = "podcast"]
|
||||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||||
@ -28,8 +27,8 @@ pub(crate) struct NewPodcast {
|
|||||||
source_id: i32,
|
source_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Insert for NewPodcast {
|
impl Insert<(), DataError> for NewPodcast {
|
||||||
fn insert(&self) -> Result<()> {
|
fn insert(&self) -> Result<(), DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -42,8 +41,8 @@ impl Insert for NewPodcast {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Update for NewPodcast {
|
impl Update<(), DataError> for NewPodcast {
|
||||||
fn update(&self, podcast_id: i32) -> Result<()> {
|
fn update(&self, podcast_id: i32) -> Result<(), DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::podcast::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -59,8 +58,8 @@ impl Update for NewPodcast {
|
|||||||
|
|
||||||
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
||||||
// It would make unti testing better too.
|
// It would make unti testing better too.
|
||||||
impl Index for NewPodcast {
|
impl Index<(), DataError> for NewPodcast {
|
||||||
fn index(&self) -> Result<()> {
|
fn index(&self) -> Result<(), DataError> {
|
||||||
let exists = dbqueries::podcast_exists(self.source_id)?;
|
let exists = dbqueries::podcast_exists(self.source_id)?;
|
||||||
|
|
||||||
if exists {
|
if exists {
|
||||||
@ -119,7 +118,7 @@ impl NewPodcast {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Look out for when tryinto lands into stable.
|
// Look out for when tryinto lands into stable.
|
||||||
pub(crate) fn to_podcast(&self) -> Result<Podcast> {
|
pub(crate) fn to_podcast(&self) -> Result<Podcast, DataError> {
|
||||||
self.index()?;
|
self.index()?;
|
||||||
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,11 +7,10 @@ use url::Url;
|
|||||||
use database::connection;
|
use database::connection;
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
// use models::{Insert, Update};
|
// use models::{Insert, Update};
|
||||||
|
use errors::DataError;
|
||||||
use models::Source;
|
use models::Source;
|
||||||
use schema::source;
|
use schema::source;
|
||||||
|
|
||||||
use errors::*;
|
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[table_name = "source"]
|
#[table_name = "source"]
|
||||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||||
@ -33,7 +32,7 @@ impl NewSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert_or_ignore(&self) -> Result<()> {
|
pub(crate) fn insert_or_ignore(&self) -> Result<(), DataError> {
|
||||||
use schema::source::dsl::*;
|
use schema::source::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
@ -46,8 +45,8 @@ impl NewSource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Look out for when tryinto lands into stable.
|
// Look out for when tryinto lands into stable.
|
||||||
pub(crate) fn to_source(&self) -> Result<Source> {
|
pub(crate) fn to_source(&self) -> Result<Source, DataError> {
|
||||||
self.insert_or_ignore()?;
|
self.insert_or_ignore()?;
|
||||||
dbqueries::get_source_from_uri(&self.uri)
|
dbqueries::get_source_from_uri(&self.uri).map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
use diesel::SaveChangesDsl;
|
use diesel::SaveChangesDsl;
|
||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::{Save, Source};
|
use models::{Save, Source};
|
||||||
use schema::podcast;
|
use schema::podcast;
|
||||||
|
|
||||||
@ -23,9 +23,9 @@ pub struct Podcast {
|
|||||||
source_id: i32,
|
source_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<Podcast> for Podcast {
|
impl Save<Podcast, DataError> for Podcast {
|
||||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||||
fn save(&self) -> Result<Podcast> {
|
fn save(&self) -> Result<Podcast, DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let tempdb = db.get()?;
|
let tempdb = db.get()?;
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
use diesel::SaveChangesDsl;
|
use diesel::SaveChangesDsl;
|
||||||
|
// use failure::ResultExt;
|
||||||
use rss::Channel;
|
use rss::Channel;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
@ -13,7 +14,7 @@ use futures::prelude::*;
|
|||||||
use futures_cpupool::CpuPool;
|
use futures_cpupool::CpuPool;
|
||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use feed::{Feed, FeedBuilder};
|
use feed::{Feed, FeedBuilder};
|
||||||
use models::{NewSource, Save};
|
use models::{NewSource, Save};
|
||||||
use schema::source;
|
use schema::source;
|
||||||
@ -32,9 +33,9 @@ pub struct Source {
|
|||||||
http_etag: Option<String>,
|
http_etag: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<Source> for Source {
|
impl Save<Source, DataError> for Source {
|
||||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||||
fn save(&self) -> Result<Source> {
|
fn save(&self) -> Result<Source, DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
@ -85,7 +86,7 @@ impl Source {
|
|||||||
|
|
||||||
/// Extract Etag and LastModifier from res, and update self and the
|
/// Extract Etag and LastModifier from res, and update self and the
|
||||||
/// corresponding db row.
|
/// corresponding db row.
|
||||||
fn update_etag(&mut self, res: &Response) -> Result<()> {
|
fn update_etag(&mut self, res: &Response) -> Result<(), DataError> {
|
||||||
let headers = res.headers();
|
let headers = res.headers();
|
||||||
|
|
||||||
let etag = headers.get::<ETag>().map(|x| x.tag());
|
let etag = headers.get::<ETag>().map(|x| x.tag());
|
||||||
@ -109,29 +110,77 @@ impl Source {
|
|||||||
// 403: Forbidden
|
// 403: Forbidden
|
||||||
// 408: Timeout
|
// 408: Timeout
|
||||||
// 410: Feed deleted
|
// 410: Feed deleted
|
||||||
fn match_status(mut self, res: Response) -> Result<(Self, Response)> {
|
// TODO: Rething this api,
|
||||||
|
fn match_status(mut self, res: Response) -> Result<(Self, Response), DataError> {
|
||||||
self.update_etag(&res)?;
|
self.update_etag(&res)?;
|
||||||
let code = res.status();
|
let code = res.status();
|
||||||
match code {
|
match code {
|
||||||
StatusCode::NotModified => bail!("304: skipping.."),
|
StatusCode::NotModified => {
|
||||||
|
let err = DataError::HttpStatusError {
|
||||||
|
url: self.uri,
|
||||||
|
status_code: code,
|
||||||
|
context: format!("304: skipping.."),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
StatusCode::MovedPermanently => {
|
StatusCode::MovedPermanently => {
|
||||||
error!("Feed was moved permanently.");
|
error!("Feed was moved permanently.");
|
||||||
self.handle_301(&res)?;
|
self.handle_301(&res)?;
|
||||||
bail!("301: Feed was moved permanently.")
|
|
||||||
|
let err = DataError::HttpStatusError {
|
||||||
|
url: self.uri,
|
||||||
|
status_code: code,
|
||||||
|
context: format!("301: Feed was moved permanently."),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
}
|
}
|
||||||
StatusCode::TemporaryRedirect => debug!("307: Temporary Redirect."),
|
StatusCode::TemporaryRedirect => debug!("307: Temporary Redirect."),
|
||||||
StatusCode::PermanentRedirect => warn!("308: Permanent Redirect."),
|
StatusCode::PermanentRedirect => warn!("308: Permanent Redirect."),
|
||||||
StatusCode::Unauthorized => bail!("401: Unauthorized."),
|
StatusCode::Unauthorized => {
|
||||||
StatusCode::Forbidden => bail!("403: Forbidden."),
|
let err = DataError::HttpStatusError {
|
||||||
StatusCode::NotFound => bail!("404: Not found."),
|
url: self.uri,
|
||||||
StatusCode::RequestTimeout => bail!("408: Request Timeout."),
|
status_code: code,
|
||||||
StatusCode::Gone => bail!("410: Feed was deleted."),
|
context: format!("401: Unauthorized."),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
StatusCode::Forbidden => {
|
||||||
|
let err = DataError::HttpStatusError {
|
||||||
|
url: self.uri,
|
||||||
|
status_code: code,
|
||||||
|
context: format!("403: Forbidden."),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
StatusCode::NotFound => return Err(format!("404: Not found.")).map_err(From::from),
|
||||||
|
StatusCode::RequestTimeout => {
|
||||||
|
let err = DataError::HttpStatusError {
|
||||||
|
url: self.uri,
|
||||||
|
status_code: code,
|
||||||
|
context: format!("408: Request Timeout."),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
StatusCode::Gone => {
|
||||||
|
let err = DataError::HttpStatusError {
|
||||||
|
url: self.uri,
|
||||||
|
status_code: code,
|
||||||
|
context: format!("410: Feed was deleted.."),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
_ => info!("HTTP StatusCode: {}", code),
|
_ => info!("HTTP StatusCode: {}", code),
|
||||||
};
|
};
|
||||||
Ok((self, res))
|
Ok((self, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_301(&mut self, res: &Response) -> Result<()> {
|
fn handle_301(&mut self, res: &Response) -> Result<(), DataError> {
|
||||||
let headers = res.headers();
|
let headers = res.headers();
|
||||||
|
|
||||||
if let Some(url) = headers.get::<Location>() {
|
if let Some(url) = headers.get::<Location>() {
|
||||||
@ -150,7 +199,7 @@ impl Source {
|
|||||||
/// Construct a new `Source` with the given `uri` and index it.
|
/// Construct a new `Source` with the given `uri` and index it.
|
||||||
///
|
///
|
||||||
/// This only indexes the `Source` struct, not the Podcast Feed.
|
/// This only indexes the `Source` struct, not the Podcast Feed.
|
||||||
pub fn from_url(uri: &str) -> Result<Source> {
|
pub fn from_url(uri: &str) -> Result<Source, DataError> {
|
||||||
let url = Url::parse(uri)?;
|
let url = Url::parse(uri)?;
|
||||||
|
|
||||||
NewSource::new(&url).to_source()
|
NewSource::new(&url).to_source()
|
||||||
@ -169,7 +218,7 @@ impl Source {
|
|||||||
client: &Client<HttpsConnector<HttpConnector>>,
|
client: &Client<HttpsConnector<HttpConnector>>,
|
||||||
pool: CpuPool,
|
pool: CpuPool,
|
||||||
ignore_etags: bool,
|
ignore_etags: bool,
|
||||||
) -> Box<Future<Item = Feed, Error = Error>> {
|
) -> Box<Future<Item = Feed, Error = DataError>> {
|
||||||
let id = self.id();
|
let id = self.id();
|
||||||
let feed = self.request_constructor(client, ignore_etags)
|
let feed = self.request_constructor(client, ignore_etags)
|
||||||
.and_then(move |(_, res)| response_to_channel(res, pool))
|
.and_then(move |(_, res)| response_to_channel(res, pool))
|
||||||
@ -190,7 +239,7 @@ impl Source {
|
|||||||
self,
|
self,
|
||||||
client: &Client<HttpsConnector<HttpConnector>>,
|
client: &Client<HttpsConnector<HttpConnector>>,
|
||||||
ignore_etags: bool,
|
ignore_etags: bool,
|
||||||
) -> Box<Future<Item = (Self, Response), Error = Error>> {
|
) -> Box<Future<Item = (Self, Response), Error = DataError>> {
|
||||||
// FIXME: remove unwrap somehow
|
// FIXME: remove unwrap somehow
|
||||||
let uri = Uri::from_str(self.uri()).unwrap();
|
let uri = Uri::from_str(self.uri()).unwrap();
|
||||||
let mut req = Request::new(Method::Get, uri);
|
let mut req = Request::new(Method::Get, uri);
|
||||||
@ -221,14 +270,16 @@ impl Source {
|
|||||||
fn response_to_channel(
|
fn response_to_channel(
|
||||||
res: Response,
|
res: Response,
|
||||||
pool: CpuPool,
|
pool: CpuPool,
|
||||||
) -> Box<Future<Item = Channel, Error = Error> + Send> {
|
) -> Box<Future<Item = Channel, Error = DataError> + Send> {
|
||||||
let chan = res.body()
|
let chan = res.body()
|
||||||
.concat2()
|
.concat2()
|
||||||
.map(|x| x.into_iter())
|
.map(|x| x.into_iter())
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
.map(|iter| iter.collect::<Vec<u8>>())
|
.map(|iter| iter.collect::<Vec<u8>>())
|
||||||
.map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned())
|
.map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned())
|
||||||
.and_then(|buf| Channel::from_str(&buf).map_err(From::from));
|
.and_then(|buf| {
|
||||||
|
Channel::from_str(&buf).or_else(|err| Err(DataError::RssCrateError(format!("{}", err))))
|
||||||
|
});
|
||||||
let cpu_chan = pool.spawn(chan);
|
let cpu_chan = pool.spawn(chan);
|
||||||
Box::new(cpu_chan)
|
Box::new(cpu_chan)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,11 +15,9 @@ use rss;
|
|||||||
|
|
||||||
use Source;
|
use Source;
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::{IndexState, NewEpisode, NewEpisodeMinimal};
|
use models::{IndexState, NewEpisode, NewEpisodeMinimal};
|
||||||
// use Feed;
|
|
||||||
|
|
||||||
use std;
|
|
||||||
// use std::sync::{Arc, Mutex};
|
// use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
macro_rules! clone {
|
macro_rules! clone {
|
||||||
@ -51,7 +49,7 @@ pub fn pipeline<S: IntoIterator<Item = Source>>(
|
|||||||
tokio_core: &mut Core,
|
tokio_core: &mut Core,
|
||||||
pool: &CpuPool,
|
pool: &CpuPool,
|
||||||
client: Client<HttpsConnector<HttpConnector>>,
|
client: Client<HttpsConnector<HttpConnector>>,
|
||||||
) -> Result<()> {
|
) -> Result<(), DataError> {
|
||||||
let list: Vec<_> = sources
|
let list: Vec<_> = sources
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags)))
|
.map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags)))
|
||||||
@ -60,7 +58,7 @@ pub fn pipeline<S: IntoIterator<Item = Source>>(
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if list.is_empty() {
|
if list.is_empty() {
|
||||||
bail!("No futures were found to run.");
|
return Err(DataError::EmptyFuturesList);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Thats not really concurrent yet I think.
|
// Thats not really concurrent yet I think.
|
||||||
@ -70,7 +68,7 @@ pub fn pipeline<S: IntoIterator<Item = Source>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline.
|
/// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline.
|
||||||
pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<()> {
|
pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<(), DataError> {
|
||||||
if sources.is_empty() {
|
if sources.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -86,7 +84,7 @@ pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Docs
|
/// Docs
|
||||||
pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> {
|
pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), DataError> {
|
||||||
let pool = CpuPool::new_num_cpus();
|
let pool = CpuPool::new_num_cpus();
|
||||||
let mut core = Core::new()?;
|
let mut core = Core::new()?;
|
||||||
let handle = core.handle();
|
let handle = core.handle();
|
||||||
@ -102,7 +100,10 @@ pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> {
|
|||||||
core.run(work)
|
core.run(work)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result<IndexState<NewEpisode>> {
|
fn determine_ep_state(
|
||||||
|
ep: NewEpisodeMinimal,
|
||||||
|
item: &rss::Item,
|
||||||
|
) -> Result<IndexState<NewEpisode>, DataError> {
|
||||||
// Check if feed exists
|
// Check if feed exists
|
||||||
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
|
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
|
||||||
|
|
||||||
@ -123,7 +124,7 @@ fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result<IndexSt
|
|||||||
pub(crate) fn glue_async<'a>(
|
pub(crate) fn glue_async<'a>(
|
||||||
item: &'a rss::Item,
|
item: &'a rss::Item,
|
||||||
id: i32,
|
id: i32,
|
||||||
) -> Box<Future<Item = IndexState<NewEpisode>, Error = Error> + 'a> {
|
) -> Box<Future<Item = IndexState<NewEpisode>, Error = DataError> + 'a> {
|
||||||
Box::new(
|
Box::new(
|
||||||
result(NewEpisodeMinimal::new(item, id)).and_then(move |ep| determine_ep_state(ep, item)),
|
result(NewEpisodeMinimal::new(item, id)).and_then(move |ep| determine_ep_state(ep, item)),
|
||||||
)
|
)
|
||||||
@ -135,7 +136,7 @@ pub(crate) fn glue_async<'a>(
|
|||||||
#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
|
#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
|
||||||
pub fn collect_futures<F>(
|
pub fn collect_futures<F>(
|
||||||
futures: Vec<F>,
|
futures: Vec<F>,
|
||||||
) -> Box<Future<Item = Vec<std::result::Result<F::Item, F::Error>>, Error = Error>>
|
) -> Box<Future<Item = Vec<Result<F::Item, F::Error>>, Error = DataError>>
|
||||||
where
|
where
|
||||||
F: 'static + Future,
|
F: 'static + Future,
|
||||||
<F as Future>::Item: 'static,
|
<F as Future>::Item: 'static,
|
||||||
|
|||||||
@ -7,7 +7,7 @@ use itertools::Itertools;
|
|||||||
use url::{Position, Url};
|
use url::{Position, Url};
|
||||||
|
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
use errors::*;
|
use errors::DataError;
|
||||||
use models::{EpisodeCleanerQuery, Podcast, Save};
|
use models::{EpisodeCleanerQuery, Podcast, Save};
|
||||||
use xdg_dirs::DL_DIR;
|
use xdg_dirs::DL_DIR;
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ use std::fs;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
/// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`.
|
/// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`.
|
||||||
fn download_checker() -> Result<()> {
|
fn download_checker() -> Result<(), DataError> {
|
||||||
let mut episodes = dbqueries::get_downloaded_episodes()?;
|
let mut episodes = dbqueries::get_downloaded_episodes()?;
|
||||||
|
|
||||||
episodes
|
episodes
|
||||||
@ -25,7 +25,7 @@ fn download_checker() -> Result<()> {
|
|||||||
ep.set_local_uri(None);
|
ep.set_local_uri(None);
|
||||||
if let Err(err) = ep.save() {
|
if let Err(err) = ep.save() {
|
||||||
error!("Error while trying to update episode: {:#?}", ep);
|
error!("Error while trying to update episode: {:#?}", ep);
|
||||||
error!("Error: {}", err);
|
error!("{}", err);
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -33,7 +33,7 @@ fn download_checker() -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Delete watched `episodes` that have exceded their liftime after played.
|
/// Delete watched `episodes` that have exceded their liftime after played.
|
||||||
fn played_cleaner() -> Result<()> {
|
fn played_cleaner() -> Result<(), DataError> {
|
||||||
let mut episodes = dbqueries::get_played_cleaner_episodes()?;
|
let mut episodes = dbqueries::get_played_cleaner_episodes()?;
|
||||||
|
|
||||||
let now_utc = Utc::now().timestamp() as i32;
|
let now_utc = Utc::now().timestamp() as i32;
|
||||||
@ -47,7 +47,7 @@ fn played_cleaner() -> Result<()> {
|
|||||||
if now_utc > limit {
|
if now_utc > limit {
|
||||||
if let Err(err) = delete_local_content(ep) {
|
if let Err(err) = delete_local_content(ep) {
|
||||||
error!("Error while trying to delete file: {:?}", ep.local_uri());
|
error!("Error while trying to delete file: {:?}", ep.local_uri());
|
||||||
error!("Error: {}", err);
|
error!("{}", err);
|
||||||
} else {
|
} else {
|
||||||
info!("Episode {:?} was deleted succesfully.", ep.local_uri());
|
info!("Episode {:?} was deleted succesfully.", ep.local_uri());
|
||||||
};
|
};
|
||||||
@ -57,7 +57,7 @@ fn played_cleaner() -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Check `ep.local_uri` field and delete the file it points to.
|
/// Check `ep.local_uri` field and delete the file it points to.
|
||||||
fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> {
|
fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> {
|
||||||
if ep.local_uri().is_some() {
|
if ep.local_uri().is_some() {
|
||||||
let uri = ep.local_uri().unwrap().to_owned();
|
let uri = ep.local_uri().unwrap().to_owned();
|
||||||
if Path::new(&uri).exists() {
|
if Path::new(&uri).exists() {
|
||||||
@ -67,7 +67,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> {
|
|||||||
ep.save()?;
|
ep.save()?;
|
||||||
} else {
|
} else {
|
||||||
error!("Error while trying to delete file: {}", uri);
|
error!("Error while trying to delete file: {}", uri);
|
||||||
error!("Error: {}", res.unwrap_err());
|
error!("{}", res.unwrap_err());
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -86,7 +86,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> {
|
|||||||
///
|
///
|
||||||
/// Runs a cleaner for played Episode's that are pass the lifetime limit and
|
/// Runs a cleaner for played Episode's that are pass the lifetime limit and
|
||||||
/// scheduled for removal.
|
/// scheduled for removal.
|
||||||
pub fn checkup() -> Result<()> {
|
pub fn checkup() -> Result<(), DataError> {
|
||||||
info!("Running database checks.");
|
info!("Running database checks.");
|
||||||
download_checker()?;
|
download_checker()?;
|
||||||
played_cleaner()?;
|
played_cleaner()?;
|
||||||
@ -123,7 +123,7 @@ pub fn replace_extra_spaces(s: &str) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the URI of a Podcast Downloads given it's title.
|
/// Returns the URI of a Podcast Downloads given it's title.
|
||||||
pub fn get_download_folder(pd_title: &str) -> Result<String> {
|
pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
||||||
// It might be better to make it a hash of the title or the podcast rowid
|
// It might be better to make it a hash of the title or the podcast rowid
|
||||||
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
||||||
|
|
||||||
@ -137,7 +137,7 @@ pub fn get_download_folder(pd_title: &str) -> Result<String> {
|
|||||||
/// Removes all the entries associated with the given show from the database,
|
/// Removes all the entries associated with the given show from the database,
|
||||||
/// and deletes all of the downloaded content.
|
/// and deletes all of the downloaded content.
|
||||||
// TODO: Write Tests
|
// TODO: Write Tests
|
||||||
pub fn delete_show(pd: &Podcast) -> Result<()> {
|
pub fn delete_show(pd: &Podcast) -> Result<(), DataError> {
|
||||||
dbqueries::remove_feed(pd)?;
|
dbqueries::remove_feed(pd)?;
|
||||||
info!("{} was removed succesfully.", pd.title());
|
info!("{} was removed succesfully.", pd.title());
|
||||||
|
|
||||||
|
|||||||
@ -12,6 +12,8 @@ mime_guess = "1.8.3"
|
|||||||
reqwest = "0.8.4"
|
reqwest = "0.8.4"
|
||||||
tempdir = "0.3.5"
|
tempdir = "0.3.5"
|
||||||
glob = "0.2.11"
|
glob = "0.2.11"
|
||||||
|
failure = "0.1.1"
|
||||||
|
failure_derive = "0.1.1"
|
||||||
|
|
||||||
[dependencies.hammond-data]
|
[dependencies.hammond-data]
|
||||||
path = "../hammond-data"
|
path = "../hammond-data"
|
||||||
|
|||||||
@ -11,10 +11,12 @@ use std::io::{BufWriter, Read, Write};
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use errors::*;
|
|
||||||
use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save};
|
use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save};
|
||||||
use hammond_data::xdg_dirs::HAMMOND_CACHE;
|
use hammond_data::xdg_dirs::HAMMOND_CACHE;
|
||||||
|
|
||||||
|
// use failure::Error;
|
||||||
|
use errors::DownloadError;
|
||||||
|
|
||||||
// TODO: Replace path that are of type &str with std::path.
|
// TODO: Replace path that are of type &str with std::path.
|
||||||
// TODO: Have a convention/document absolute/relative paths, if they should end with / or not.
|
// TODO: Have a convention/document absolute/relative paths, if they should end with / or not.
|
||||||
|
|
||||||
@ -36,7 +38,7 @@ fn download_into(
|
|||||||
file_title: &str,
|
file_title: &str,
|
||||||
url: &str,
|
url: &str,
|
||||||
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
||||||
) -> Result<String> {
|
) -> Result<String, DownloadError> {
|
||||||
info!("GET request to: {}", url);
|
info!("GET request to: {}", url);
|
||||||
// Haven't included the loop check as
|
// Haven't included the loop check as
|
||||||
// Steal the Stars would tigger it as
|
// Steal the Stars would tigger it as
|
||||||
@ -60,7 +62,7 @@ fn download_into(
|
|||||||
info!("Status Resp: {}", resp.status());
|
info!("Status Resp: {}", resp.status());
|
||||||
|
|
||||||
if !resp.status().is_success() {
|
if !resp.status().is_success() {
|
||||||
bail!("Unexpected server response: {}", resp.status())
|
return Err(DownloadError::UnexpectedResponse(resp.status()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let headers = resp.headers().clone();
|
let headers = resp.headers().clone();
|
||||||
@ -117,7 +119,7 @@ fn save_io(
|
|||||||
resp: &mut reqwest::Response,
|
resp: &mut reqwest::Response,
|
||||||
content_lenght: Option<u64>,
|
content_lenght: Option<u64>,
|
||||||
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
||||||
) -> Result<()> {
|
) -> Result<(), DownloadError> {
|
||||||
info!("Downloading into: {}", file);
|
info!("Downloading into: {}", file);
|
||||||
let chunk_size = match content_lenght {
|
let chunk_size = match content_lenght {
|
||||||
Some(x) => x as usize / 99,
|
Some(x) => x as usize / 99,
|
||||||
@ -139,7 +141,7 @@ fn save_io(
|
|||||||
if let Ok(l) = len {
|
if let Ok(l) = len {
|
||||||
if let Ok(mut m) = prog.lock() {
|
if let Ok(mut m) = prog.lock() {
|
||||||
if m.should_cancel() {
|
if m.should_cancel() {
|
||||||
bail!("Download was cancelled.");
|
return Err(DownloadError::DownloadCancelled);
|
||||||
}
|
}
|
||||||
m.set_downloaded(l);
|
m.set_downloaded(l);
|
||||||
}
|
}
|
||||||
@ -158,7 +160,7 @@ pub fn get_episode(
|
|||||||
ep: &mut EpisodeWidgetQuery,
|
ep: &mut EpisodeWidgetQuery,
|
||||||
download_folder: &str,
|
download_folder: &str,
|
||||||
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
||||||
) -> Result<()> {
|
) -> Result<(), DownloadError> {
|
||||||
// Check if its alrdy downloaded
|
// Check if its alrdy downloaded
|
||||||
if ep.local_uri().is_some() {
|
if ep.local_uri().is_some() {
|
||||||
if Path::new(ep.local_uri().unwrap()).exists() {
|
if Path::new(ep.local_uri().unwrap()).exists() {
|
||||||
|
|||||||
@ -1,11 +1,35 @@
|
|||||||
use hammond_data;
|
use hammond_data::errors::DataError;
|
||||||
use reqwest;
|
use reqwest;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
error_chain! {
|
#[derive(Fail, Debug)]
|
||||||
foreign_links {
|
pub enum DownloadError {
|
||||||
ReqError(reqwest::Error);
|
#[fail(display = "Reqwest error: {}", _0)]
|
||||||
IoError(io::Error);
|
RequestError(#[cause] reqwest::Error),
|
||||||
DataError(hammond_data::errors::Error);
|
#[fail(display = "Data error: {}", _0)]
|
||||||
|
DataError(#[cause] DataError),
|
||||||
|
#[fail(display = "Io error: {}", _0)]
|
||||||
|
IoError(#[cause] io::Error),
|
||||||
|
#[fail(display = "The Download was cancelled")]
|
||||||
|
DownloadCancelled,
|
||||||
|
#[fail(display = "Unexpected server response: {}", _0)]
|
||||||
|
UnexpectedResponse(reqwest::StatusCode),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest::Error> for DownloadError {
|
||||||
|
fn from(err: reqwest::Error) -> Self {
|
||||||
|
DownloadError::RequestError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<io::Error> for DownloadError {
|
||||||
|
fn from(err: io::Error) -> Self {
|
||||||
|
DownloadError::IoError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<DataError> for DownloadError {
|
||||||
|
fn from(err: DataError) -> Self {
|
||||||
|
DownloadError::DataError(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,13 +1,15 @@
|
|||||||
#![recursion_limit = "1024"]
|
#![recursion_limit = "1024"]
|
||||||
#![deny(unused_extern_crates, unused)]
|
#![deny(unused_extern_crates, unused)]
|
||||||
|
|
||||||
|
extern crate failure;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate error_chain;
|
extern crate failure_derive;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
extern crate glob;
|
extern crate glob;
|
||||||
extern crate hammond_data;
|
extern crate hammond_data;
|
||||||
extern crate hyper;
|
extern crate hyper;
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
extern crate mime_guess;
|
extern crate mime_guess;
|
||||||
extern crate reqwest;
|
extern crate reqwest;
|
||||||
extern crate tempdir;
|
extern crate tempdir;
|
||||||
|
|||||||
@ -20,6 +20,8 @@ open = "1.2.1"
|
|||||||
rayon = "0.9.0"
|
rayon = "0.9.0"
|
||||||
send-cell = "0.1.2"
|
send-cell = "0.1.2"
|
||||||
url = "1.6.0"
|
url = "1.6.0"
|
||||||
|
failure = "0.1.1"
|
||||||
|
failure_derive = "0.1.1"
|
||||||
|
|
||||||
[dependencies.gtk]
|
[dependencies.gtk]
|
||||||
features = ["v3_22"]
|
features = ["v3_22"]
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))]
|
||||||
#![deny(unused_extern_crates, unused)]
|
// #![deny(unused_extern_crates, unused)]
|
||||||
|
|
||||||
extern crate gdk;
|
extern crate gdk;
|
||||||
extern crate gdk_pixbuf;
|
extern crate gdk_pixbuf;
|
||||||
@ -7,15 +7,20 @@ extern crate gio;
|
|||||||
extern crate glib;
|
extern crate glib;
|
||||||
extern crate gtk;
|
extern crate gtk;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate failure;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate failure_derive;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
extern crate dissolve;
|
extern crate dissolve;
|
||||||
extern crate hammond_data;
|
extern crate hammond_data;
|
||||||
extern crate hammond_downloader;
|
extern crate hammond_downloader;
|
||||||
extern crate humansize;
|
extern crate humansize;
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
extern crate loggerv;
|
extern crate loggerv;
|
||||||
extern crate open;
|
extern crate open;
|
||||||
extern crate send_cell;
|
extern crate send_cell;
|
||||||
|
|||||||
@ -4,12 +4,12 @@ use gtk;
|
|||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use gtk::prelude::*;
|
use gtk::prelude::*;
|
||||||
|
|
||||||
|
use failure::Error;
|
||||||
use humansize::{file_size_opts as size_opts, FileSize};
|
use humansize::{file_size_opts as size_opts, FileSize};
|
||||||
use open;
|
use open;
|
||||||
|
|
||||||
use hammond_data::{EpisodeWidgetQuery, Podcast};
|
use hammond_data::{EpisodeWidgetQuery, Podcast};
|
||||||
use hammond_data::dbqueries;
|
use hammond_data::dbqueries;
|
||||||
use hammond_data::errors::*;
|
|
||||||
use hammond_data::utils::get_download_folder;
|
use hammond_data::utils::get_download_folder;
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
@ -368,7 +368,7 @@ fn update_total_size_callback(prog: Arc<Mutex<manager::Progress>>, total_size: g
|
|||||||
// };
|
// };
|
||||||
// }
|
// }
|
||||||
|
|
||||||
pub fn episodes_listbox(pd: &Podcast, sender: Sender<Action>) -> Result<gtk::ListBox> {
|
pub fn episodes_listbox(pd: &Podcast, sender: Sender<Action>) -> Result<gtk::ListBox, Error> {
|
||||||
let mut episodes = dbqueries::get_pd_episodeswidgets(pd)?;
|
let mut episodes = dbqueries::get_pd_episodeswidgets(pd)?;
|
||||||
|
|
||||||
let list = gtk::ListBox::new();
|
let list = gtk::ListBox::new();
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user