From f9f015a21118163e20e68ef3018ace5f86301449 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 4 Feb 2018 16:33:10 +0200 Subject: [PATCH 01/18] Hammond-data: switch database module to use Failure. --- Cargo.lock | 4 ++++ hammond-data/Cargo.toml | 2 ++ hammond-data/src/database.rs | 6 +++--- hammond-data/src/errors.rs | 8 ++++++++ hammond-data/src/lib.rs | 5 ++++- hammond-downloader/Cargo.toml | 2 ++ hammond-downloader/src/downloader.rs | 2 ++ hammond-downloader/src/errors.rs | 9 +++++++++ hammond-downloader/src/lib.rs | 10 +++++++--- hammond-gtk/src/main.rs | 9 +++++---- 10 files changed, 46 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6c1d6ad..76aba78 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -617,6 +617,8 @@ dependencies = [ "diesel_migrations 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "dotenv 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.11.15 (registry+https://github.com/rust-lang/crates.io-index)", @@ -642,6 +644,8 @@ name = "hammond-downloader" version = "0.1.0" dependencies = [ "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "hammond-data 0.1.0", "hyper 0.11.15 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/hammond-data/Cargo.toml b/hammond-data/Cargo.toml index 01e42cf..fa8e733 100644 --- a/hammond-data/Cargo.toml +++ b/hammond-data/Cargo.toml @@ -26,6 +26,8 @@ hyper-tls = "0.1.2" native-tls = "0.1.5" futures-cpupool = "0.1.8" num_cpus = "1.8.0" +failure = "0.1.1" +failure_derive = "0.1.1" [dependencies.diesel] features = ["sqlite", "r2d2"] diff --git a/hammond-data/src/database.rs b/hammond-data/src/database.rs index 0a37aaa..8cc0a5f 100644 --- a/hammond-data/src/database.rs +++ b/hammond-data/src/database.rs @@ -7,7 +7,7 @@ use diesel::r2d2::ConnectionManager; use std::io; use std::path::PathBuf; -use errors::*; +use failure::Error; #[cfg(not(test))] use xdg_dirs; @@ -57,7 +57,7 @@ fn init_pool(db_path: &str) -> Pool { pool } -fn run_migration_on(connection: &SqliteConnection) -> Result<()> { +fn run_migration_on(connection: &SqliteConnection) -> Result<(), Error> { info!("Running DB Migrations..."); // embedded_migrations::run(connection)?; embedded_migrations::run_with_output(connection, &mut io::stdout()).map_err(From::from) @@ -66,7 +66,7 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<()> { /// Reset the database into a clean state. // Test share a Temp file db. #[allow(dead_code)] -pub fn truncate_db() -> Result<()> { +pub fn truncate_db() -> Result<(), Error> { let db = connection(); let con = db.get()?; con.execute("DELETE FROM episode")?; diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 6a2d00c..299cbf6 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -23,3 +23,11 @@ error_chain! { IoError(io::Error); } } + +#[derive(Fail, Debug)] +pub enum DatabaseError { + #[fail(display = "SQL Query failed: {}", _0)] DieselResultError(diesel::result::Error), + #[fail(display = "Database Migration error: {}", _0)] DieselMigrationError(RunMigrationsError), + #[fail(display = "R2D2 error: {}", _0)] R2D2Error(r2d2::Error), + #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(r2d2::PoolError), +} diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 399d119..e6fe7cc 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -17,7 +17,7 @@ unconditional_recursion, unions_with_drop_fields, unused_allocation, unused_comparisons, unused_parens, while_true)] #![deny(missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts)] -#![deny(unused_extern_crates, unused)] +// #![deny(unused_extern_crates, unused)] // #![feature(conservative_impl_trait)] @@ -30,12 +30,15 @@ extern crate diesel_migrations; #[macro_use] extern crate error_chain; #[macro_use] +extern crate failure_derive; +#[macro_use] extern crate lazy_static; #[macro_use] extern crate log; extern crate ammonia; extern crate chrono; +extern crate failure; extern crate futures; extern crate futures_cpupool; extern crate hyper; diff --git a/hammond-downloader/Cargo.toml b/hammond-downloader/Cargo.toml index 29a2b34..48fc70e 100644 --- a/hammond-downloader/Cargo.toml +++ b/hammond-downloader/Cargo.toml @@ -12,6 +12,8 @@ mime_guess = "1.8.3" reqwest = "0.8.4" tempdir = "0.3.5" glob = "0.2.11" +failure = "0.1.1" +failure_derive = "0.1.1" [dependencies.hammond-data] path = "../hammond-data" diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index b54573d..26ad08f 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -15,6 +15,8 @@ use errors::*; use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save}; use hammond_data::xdg_dirs::HAMMOND_CACHE; +// use failure::Error; + // TODO: Replace path that are of type &str with std::path. // TODO: Have a convention/document absolute/relative paths, if they should end with / or not. diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index a87f2ca..7ff0295 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -9,3 +9,12 @@ error_chain! { DataError(hammond_data::errors::Error); } } + +#[derive(Fail, Debug)] +enum DownloaderError { + #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), + // NOT SYNC. + // #[fail(display = "Data error: {}", _0)] + // DataError(hammond_data::errors::Error), + #[fail(display = "Io error: {}", _0)] IoError(io::Error), +} diff --git a/hammond-downloader/src/lib.rs b/hammond-downloader/src/lib.rs index 82be1f6..cd6b3d3 100644 --- a/hammond-downloader/src/lib.rs +++ b/hammond-downloader/src/lib.rs @@ -1,13 +1,17 @@ #![recursion_limit = "1024"] -#![deny(unused_extern_crates, unused)] +// #![deny(unused_extern_crates, unused)] #[macro_use] extern crate error_chain; +#[macro_use] +extern crate failure_derive; +#[macro_use] +extern crate log; + +extern crate failure; extern crate glob; extern crate hammond_data; extern crate hyper; -#[macro_use] -extern crate log; extern crate mime_guess; extern crate reqwest; extern crate tempdir; diff --git a/hammond-gtk/src/main.rs b/hammond-gtk/src/main.rs index b36e2ef..7245aee 100644 --- a/hammond-gtk/src/main.rs +++ b/hammond-gtk/src/main.rs @@ -7,15 +7,16 @@ extern crate gio; extern crate glib; extern crate gtk; +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate log; + extern crate chrono; extern crate dissolve; extern crate hammond_data; extern crate hammond_downloader; extern crate humansize; -#[macro_use] -extern crate lazy_static; -#[macro_use] -extern crate log; extern crate loggerv; extern crate open; extern crate send_cell; From de43cae0157b78d2601cea702713bf0925f5aca9 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 4 Feb 2018 17:36:27 +0200 Subject: [PATCH 02/18] Switch rest stuff of data/downloader to Failure Crate. --- Cargo.lock | 2 ++ hammond-data/src/dbqueries.rs | 6 ++++- hammond-data/src/errors.rs | 33 ++++++++++++++------------ hammond-data/src/feed.rs | 2 +- hammond-data/src/lib.rs | 3 +-- hammond-data/src/models/episode.rs | 12 +++++----- hammond-data/src/models/mod.rs | 10 ++++---- hammond-data/src/models/new_episode.rs | 26 ++++++++++---------- hammond-data/src/models/new_podcast.rs | 11 ++++----- hammond-data/src/models/new_source.rs | 7 +++--- hammond-data/src/models/podcast.rs | 4 ++-- hammond-data/src/models/source.rs | 19 ++++++++------- hammond-data/src/pipeline.rs | 16 +++++++------ hammond-data/src/utils.rs | 15 ++++++------ hammond-downloader/src/downloader.rs | 6 +++-- hammond-downloader/src/errors.rs | 12 +--------- hammond-downloader/src/lib.rs | 3 +-- hammond-gtk/Cargo.toml | 2 ++ hammond-gtk/src/main.rs | 6 ++++- hammond-gtk/src/widgets/episode.rs | 4 ++-- 20 files changed, 102 insertions(+), 97 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 76aba78..c7dd21a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -661,6 +661,8 @@ version = "0.1.0" dependencies = [ "chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "dissolve 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "gdk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "gdk-pixbuf 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "gio 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 7fe277f..5a4417a 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -6,11 +6,15 @@ use diesel::prelude::*; use diesel; use diesel::dsl::exists; use diesel::select; +use failure::Error; use database::connection; -use errors::*; use models::*; +// Feel free to open a Merge request that manually replaces Result if you feel bored. +use std::result; +type Result = result::Result; + pub fn get_sources() -> Result> { use schema::source::dsl::*; let db = connection(); diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 299cbf6..b4f0549 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -4,25 +4,20 @@ use diesel_migrations::RunMigrationsError; use hyper; use native_tls; use reqwest; -use rss; +// use rss; use url; use std::io; -error_chain! { - foreign_links { - DieselResultError(diesel::result::Error); - DieselMigrationError(RunMigrationsError); - R2D2Error(r2d2::Error); - R2D2PoolError(r2d2::PoolError); - RSSError(rss::Error); - ReqError(reqwest::Error); - HyperError(hyper::Error); - UrlError(url::ParseError); - TLSError(native_tls::Error); - IoError(io::Error); - } -} +#[allow(dead_code)] +#[derive(Fail, Debug)] +#[fail(display = "IO Error: {}", _0)] +struct IOError(io::Error); + +// fadsadfs NOT SYNC +// #[derive(Fail, Debug)] +// #[fail(display = "RSS Error: {}", _0)] +// struct RSSError(rss::Error); #[derive(Fail, Debug)] pub enum DatabaseError { @@ -31,3 +26,11 @@ pub enum DatabaseError { #[fail(display = "R2D2 error: {}", _0)] R2D2Error(r2d2::Error), #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(r2d2::PoolError), } + +#[derive(Fail, Debug)] +pub enum HttpError { + #[fail(display = "Reqwest Error: {}", _0)] ReqError(reqwest::Error), + #[fail(display = "Hyper Error: {}", _0)] HyperError(hyper::Error), + #[fail(display = "Url Error: {}", _0)] UrlError(url::ParseError), + #[fail(display = "TLS Error: {}", _0)] TLSError(native_tls::Error), +} diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 9f39cd0..998fab1 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -1,11 +1,11 @@ //! Index Feeds. +use failure::Error; use futures::future::*; use itertools::{Either, Itertools}; use rss; use dbqueries; -use errors::*; use models::{Index, IndexState, Update}; use models::{NewEpisode, NewPodcast, Podcast}; use pipeline::*; diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index e6fe7cc..9602c5c 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -28,7 +28,7 @@ extern crate diesel; #[macro_use] extern crate diesel_migrations; #[macro_use] -extern crate error_chain; +extern crate failure; #[macro_use] extern crate failure_derive; #[macro_use] @@ -38,7 +38,6 @@ extern crate log; extern crate ammonia; extern crate chrono; -extern crate failure; extern crate futures; extern crate futures_cpupool; extern crate hyper; diff --git a/hammond-data/src/models/episode.rs b/hammond-data/src/models/episode.rs index 948422c..afb1c2a 100644 --- a/hammond-data/src/models/episode.rs +++ b/hammond-data/src/models/episode.rs @@ -2,9 +2,9 @@ use chrono::prelude::*; use diesel; use diesel::SaveChangesDsl; use diesel::prelude::*; +use failure::Error; use database::connection; -use errors::*; use models::{Podcast, Save}; use schema::episode; @@ -33,7 +33,7 @@ pub struct Episode { impl Save for Episode { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; @@ -180,7 +180,7 @@ impl Episode { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<()> { + pub fn set_played_now(&mut self) -> Result<(), Error> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -225,7 +225,7 @@ impl From for EpisodeWidgetQuery { impl Save for EpisodeWidgetQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); @@ -342,7 +342,7 @@ impl EpisodeWidgetQuery { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<()> { + pub fn set_played_now(&mut self) -> Result<(), Error> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -363,7 +363,7 @@ pub struct EpisodeCleanerQuery { impl Save for EpisodeCleanerQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); diff --git a/hammond-data/src/models/mod.rs b/hammond-data/src/models/mod.rs index 87efe30..8c2a89d 100644 --- a/hammond-data/src/models/mod.rs +++ b/hammond-data/src/models/mod.rs @@ -23,7 +23,7 @@ pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery}; pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::source::Source; -use errors::*; +use failure::Error; #[derive(Debug, Clone, PartialEq)] pub enum IndexState { @@ -33,19 +33,19 @@ pub enum IndexState { } pub trait Insert { - fn insert(&self) -> Result<()>; + fn insert(&self) -> Result<(), Error>; } pub trait Update { - fn update(&self, i32) -> Result<()>; + fn update(&self, i32) -> Result<(), Error>; } pub trait Index: Insert + Update { - fn index(&self) -> Result<()>; + fn index(&self) -> Result<(), Error>; } /// FIXME: DOCS pub trait Save { /// Helper method to easily save/"sync" current state of a diesel model to the Database. - fn save(&self) -> Result; + fn save(&self) -> Result; } diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 3de8043..6e487a7 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -1,17 +1,15 @@ -use diesel::prelude::*; - -use diesel; -use schema::episode; - use ammonia; +use diesel; +use diesel::prelude::*; +use failure::Error; use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822; use rss; use database::connection; use dbqueries; -use errors::*; use models::{Episode, EpisodeMinimal, Index, Insert, Update}; use parser; +use schema::episode; use utils::{replace_extra_spaces, url_cleaner}; #[derive(Insertable, AsChangeset)] @@ -46,7 +44,7 @@ impl From for NewEpisode { } impl Insert for NewEpisode { - fn insert(&self) -> Result<()> { + fn insert(&self) -> Result<(), Error> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -61,7 +59,7 @@ impl Insert for NewEpisode { } impl Update for NewEpisode { - fn update(&self, episode_id: i32) -> Result<()> { + fn update(&self, episode_id: i32) -> Result<(), Error> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -77,7 +75,7 @@ impl Update for NewEpisode { impl Index for NewEpisode { // Does not update the episode description if it's the only thing that has changed. - fn index(&self) -> Result<()> { + fn index(&self) -> Result<(), Error> { let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?; if exists { @@ -115,12 +113,12 @@ impl PartialEq for NewEpisode { impl NewEpisode { /// Parses an `rss::Item` into a `NewEpisode` Struct. #[allow(dead_code)] - pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) } #[allow(dead_code)] - pub(crate) fn to_episode(&self) -> Result { + pub(crate) fn to_episode(&self) -> Result { self.index()?; dbqueries::get_episode_from_pk(&self.title, self.podcast_id) } @@ -184,7 +182,7 @@ impl PartialEq for NewEpisodeMinimal { } impl NewEpisodeMinimal { - pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { if item.title().is_none() { bail!("No title specified for the item.") } @@ -208,7 +206,7 @@ impl NewEpisodeMinimal { let duration = parser::parse_itunes_duration(item.itunes_ext()); - NewEpisodeMinimalBuilder::default() + Ok(NewEpisodeMinimalBuilder::default() .title(title) .uri(uri) .duration(duration) @@ -216,7 +214,7 @@ impl NewEpisodeMinimal { .guid(guid) .podcast_id(parent_id) .build() - .map_err(From::from) + .unwrap()) } pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode { diff --git a/hammond-data/src/models/new_podcast.rs b/hammond-data/src/models/new_podcast.rs index f6da37c..d02f5df 100644 --- a/hammond-data/src/models/new_podcast.rs +++ b/hammond-data/src/models/new_podcast.rs @@ -2,6 +2,7 @@ use diesel; use diesel::prelude::*; use ammonia; +use failure::Error; use rss; use models::{Index, Insert, Update}; @@ -12,8 +13,6 @@ use database::connection; use dbqueries; use utils::{replace_extra_spaces, url_cleaner}; -use errors::*; - #[derive(Insertable, AsChangeset)] #[table_name = "podcast"] #[derive(Debug, Clone, Default, Builder, PartialEq)] @@ -29,7 +28,7 @@ pub(crate) struct NewPodcast { } impl Insert for NewPodcast { - fn insert(&self) -> Result<()> { + fn insert(&self) -> Result<(), Error> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -43,7 +42,7 @@ impl Insert for NewPodcast { } impl Update for NewPodcast { - fn update(&self, podcast_id: i32) -> Result<()> { + fn update(&self, podcast_id: i32) -> Result<(), Error> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -60,7 +59,7 @@ impl Update for NewPodcast { // TODO: Maybe return an Enum Instead. // It would make unti testing better too. impl Index for NewPodcast { - fn index(&self) -> Result<()> { + fn index(&self) -> Result<(), Error> { let exists = dbqueries::podcast_exists(self.source_id)?; if exists { @@ -119,7 +118,7 @@ impl NewPodcast { } // Look out for when tryinto lands into stable. - pub(crate) fn to_podcast(&self) -> Result { + pub(crate) fn to_podcast(&self) -> Result { self.index()?; dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from) } diff --git a/hammond-data/src/models/new_source.rs b/hammond-data/src/models/new_source.rs index 944e7c1..fbe79f1 100644 --- a/hammond-data/src/models/new_source.rs +++ b/hammond-data/src/models/new_source.rs @@ -2,6 +2,7 @@ use diesel; use diesel::prelude::*; +use failure::Error; use url::Url; use database::connection; @@ -10,8 +11,6 @@ use dbqueries; use models::Source; use schema::source; -use errors::*; - #[derive(Insertable)] #[table_name = "source"] #[derive(Debug, Clone, Default, Builder, PartialEq)] @@ -33,7 +32,7 @@ impl NewSource { } } - pub(crate) fn insert_or_ignore(&self) -> Result<()> { + pub(crate) fn insert_or_ignore(&self) -> Result<(), Error> { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -46,7 +45,7 @@ impl NewSource { } // Look out for when tryinto lands into stable. - pub(crate) fn to_source(&self) -> Result { + pub(crate) fn to_source(&self) -> Result { self.insert_or_ignore()?; dbqueries::get_source_from_uri(&self.uri) } diff --git a/hammond-data/src/models/podcast.rs b/hammond-data/src/models/podcast.rs index fb6ab74..dcfad9c 100644 --- a/hammond-data/src/models/podcast.rs +++ b/hammond-data/src/models/podcast.rs @@ -1,7 +1,7 @@ use diesel::SaveChangesDsl; +use failure::Error; use database::connection; -use errors::*; use models::{Save, Source}; use schema::podcast; @@ -25,7 +25,7 @@ pub struct Podcast { impl Save for Podcast { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index aa08ad9..90062ba 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -1,4 +1,5 @@ use diesel::SaveChangesDsl; +use failure::Error; use rss::Channel; use url::Url; @@ -13,7 +14,6 @@ use futures::prelude::*; use futures_cpupool::CpuPool; use database::connection; -use errors::*; use feed::{Feed, FeedBuilder}; use models::{NewSource, Save}; use schema::source; @@ -34,7 +34,7 @@ pub struct Source { impl Save for Source { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let con = db.get()?; @@ -85,7 +85,7 @@ impl Source { /// Extract Etag and LastModifier from res, and update self and the /// corresponding db row. - fn update_etag(&mut self, res: &Response) -> Result<()> { + fn update_etag(&mut self, res: &Response) -> Result<(), Error> { let headers = res.headers(); let etag = headers.get::().map(|x| x.tag()); @@ -109,7 +109,7 @@ impl Source { // 403: Forbidden // 408: Timeout // 410: Feed deleted - fn match_status(mut self, res: Response) -> Result<(Self, Response)> { + fn match_status(mut self, res: Response) -> Result<(Self, Response), Error> { self.update_etag(&res)?; let code = res.status(); match code { @@ -131,7 +131,7 @@ impl Source { Ok((self, res)) } - fn handle_301(&mut self, res: &Response) -> Result<()> { + fn handle_301(&mut self, res: &Response) -> Result<(), Error> { let headers = res.headers(); if let Some(url) = headers.get::() { @@ -150,7 +150,7 @@ impl Source { /// Construct a new `Source` with the given `uri` and index it. /// /// This only indexes the `Source` struct, not the Podcast Feed. - pub fn from_url(uri: &str) -> Result { + pub fn from_url(uri: &str) -> Result { let url = Url::parse(uri)?; NewSource::new(&url).to_source() @@ -174,11 +174,11 @@ impl Source { let feed = self.request_constructor(client, ignore_etags) .and_then(move |(_, res)| response_to_channel(res, pool)) .and_then(move |chan| { - FeedBuilder::default() + Ok(FeedBuilder::default() .channel(chan) .source_id(id) .build() - .map_err(From::from) + .unwrap()) }); Box::new(feed) @@ -228,7 +228,8 @@ fn response_to_channel( .map_err(From::from) .map(|iter| iter.collect::>()) .map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned()) - .and_then(|buf| Channel::from_str(&buf).map_err(From::from)); + // FIXME: Unwrap + .and_then(|buf| Ok(Channel::from_str(&buf).unwrap())); let cpu_chan = pool.spawn(chan); Box::new(cpu_chan) } diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index 418ecdf..9c77513 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -10,16 +10,15 @@ use hyper::client::HttpConnector; use hyper_tls::HttpsConnector; use tokio_core::reactor::Core; +use failure::Error; use num_cpus; use rss; use Source; use dbqueries; -use errors::*; use models::{IndexState, NewEpisode, NewEpisodeMinimal}; // use Feed; -use std; // use std::sync::{Arc, Mutex}; macro_rules! clone { @@ -51,7 +50,7 @@ pub fn pipeline>( tokio_core: &mut Core, pool: &CpuPool, client: Client>, -) -> Result<()> { +) -> Result<(), Error> { let list: Vec<_> = sources .into_iter() .map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags))) @@ -70,7 +69,7 @@ pub fn pipeline>( } /// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline. -pub fn run(sources: Vec, ignore_etags: bool) -> Result<()> { +pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), Error> { if sources.is_empty() { return Ok(()); } @@ -86,7 +85,7 @@ pub fn run(sources: Vec, ignore_etags: bool) -> Result<()> { } /// Docs -pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> { +pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> { let pool = CpuPool::new_num_cpus(); let mut core = Core::new()?; let handle = core.handle(); @@ -102,7 +101,10 @@ pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> { core.run(work) } -fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result> { +fn determine_ep_state( + ep: NewEpisodeMinimal, + item: &rss::Item, +) -> Result, Error> { // Check if feed exists let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; @@ -135,7 +137,7 @@ pub(crate) fn glue_async<'a>( #[cfg_attr(feature = "cargo-clippy", allow(type_complexity))] pub fn collect_futures( futures: Vec, -) -> Box>, Error = Error>> +) -> Box>, Error = Error>> where F: 'static + Future, ::Item: 'static, diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index 97f762b..d7e1de2 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -3,11 +3,12 @@ use chrono::prelude::*; use rayon::prelude::*; +use failure::Error; use itertools::Itertools; use url::{Position, Url}; use dbqueries; -use errors::*; +// use errors::*; use models::{EpisodeCleanerQuery, Podcast, Save}; use xdg_dirs::DL_DIR; @@ -15,7 +16,7 @@ use std::fs; use std::path::Path; /// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`. -fn download_checker() -> Result<()> { +fn download_checker() -> Result<(), Error> { let mut episodes = dbqueries::get_downloaded_episodes()?; episodes @@ -33,7 +34,7 @@ fn download_checker() -> Result<()> { } /// Delete watched `episodes` that have exceded their liftime after played. -fn played_cleaner() -> Result<()> { +fn played_cleaner() -> Result<(), Error> { let mut episodes = dbqueries::get_played_cleaner_episodes()?; let now_utc = Utc::now().timestamp() as i32; @@ -57,7 +58,7 @@ fn played_cleaner() -> Result<()> { } /// Check `ep.local_uri` field and delete the file it points to. -fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> { +fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); if Path::new(&uri).exists() { @@ -86,7 +87,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> { /// /// Runs a cleaner for played Episode's that are pass the lifetime limit and /// scheduled for removal. -pub fn checkup() -> Result<()> { +pub fn checkup() -> Result<(), Error> { info!("Running database checks."); download_checker()?; played_cleaner()?; @@ -123,7 +124,7 @@ pub fn replace_extra_spaces(s: &str) -> String { } /// Returns the URI of a Podcast Downloads given it's title. -pub fn get_download_folder(pd_title: &str) -> Result { +pub fn get_download_folder(pd_title: &str) -> Result { // It might be better to make it a hash of the title or the podcast rowid let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); @@ -137,7 +138,7 @@ pub fn get_download_folder(pd_title: &str) -> Result { /// Removes all the entries associated with the given show from the database, /// and deletes all of the downloaded content. // TODO: Write Tests -pub fn delete_show(pd: &Podcast) -> Result<()> { +pub fn delete_show(pd: &Podcast) -> Result<(), Error> { dbqueries::remove_feed(pd)?; info!("{} was removed succesfully.", pd.title()); diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 26ad08f..34f80ba 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -11,11 +11,13 @@ use std::io::{BufWriter, Read, Write}; use std::path::Path; use std::sync::{Arc, Mutex}; -use errors::*; use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save}; use hammond_data::xdg_dirs::HAMMOND_CACHE; -// use failure::Error; +use std::result; + +use failure::Error; +type Result = result::Result; // TODO: Replace path that are of type &str with std::path. // TODO: Have a convention/document absolute/relative paths, if they should end with / or not. diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index 7ff0295..ca23fba 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -2,19 +2,9 @@ use hammond_data; use reqwest; use std::io; -error_chain! { - foreign_links { - ReqError(reqwest::Error); - IoError(io::Error); - DataError(hammond_data::errors::Error); - } -} - #[derive(Fail, Debug)] enum DownloaderError { #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), - // NOT SYNC. - // #[fail(display = "Data error: {}", _0)] - // DataError(hammond_data::errors::Error), + #[fail(display = "Data error: {}", _0)] DataError(hammond_data::errors::DatabaseError), #[fail(display = "Io error: {}", _0)] IoError(io::Error), } diff --git a/hammond-downloader/src/lib.rs b/hammond-downloader/src/lib.rs index cd6b3d3..a607d26 100644 --- a/hammond-downloader/src/lib.rs +++ b/hammond-downloader/src/lib.rs @@ -2,13 +2,12 @@ // #![deny(unused_extern_crates, unused)] #[macro_use] -extern crate error_chain; +extern crate failure; #[macro_use] extern crate failure_derive; #[macro_use] extern crate log; -extern crate failure; extern crate glob; extern crate hammond_data; extern crate hyper; diff --git a/hammond-gtk/Cargo.toml b/hammond-gtk/Cargo.toml index e1c8c49..d8b2ab2 100644 --- a/hammond-gtk/Cargo.toml +++ b/hammond-gtk/Cargo.toml @@ -20,6 +20,8 @@ open = "1.2.1" rayon = "0.9.0" send-cell = "0.1.2" url = "1.6.0" +failure = "0.1.1" +failure_derive = "0.1.1" [dependencies.gtk] features = ["v3_22"] diff --git a/hammond-gtk/src/main.rs b/hammond-gtk/src/main.rs index 7245aee..e241db8 100644 --- a/hammond-gtk/src/main.rs +++ b/hammond-gtk/src/main.rs @@ -1,5 +1,5 @@ #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))] -#![deny(unused_extern_crates, unused)] +// #![deny(unused_extern_crates, unused)] extern crate gdk; extern crate gdk_pixbuf; @@ -7,6 +7,10 @@ extern crate gio; extern crate glib; extern crate gtk; +#[macro_use] +extern crate failure; +#[macro_use] +extern crate failure_derive; #[macro_use] extern crate lazy_static; #[macro_use] diff --git a/hammond-gtk/src/widgets/episode.rs b/hammond-gtk/src/widgets/episode.rs index 0490b8d..2886176 100644 --- a/hammond-gtk/src/widgets/episode.rs +++ b/hammond-gtk/src/widgets/episode.rs @@ -4,12 +4,12 @@ use gtk; use chrono::prelude::*; use gtk::prelude::*; +use failure::Error; use humansize::{file_size_opts as size_opts, FileSize}; use open; use hammond_data::{EpisodeWidgetQuery, Podcast}; use hammond_data::dbqueries; -use hammond_data::errors::*; use hammond_data::utils::get_download_folder; use app::Action; @@ -368,7 +368,7 @@ fn update_total_size_callback(prog: Arc>, total_size: g // }; // } -pub fn episodes_listbox(pd: &Podcast, sender: Sender) -> Result { +pub fn episodes_listbox(pd: &Podcast, sender: Sender) -> Result { let mut episodes = dbqueries::get_pd_episodeswidgets(pd)?; let list = gtk::ListBox::new(); From 8f0a63fdb71d7e97d3eaa8503d9550551c687cae Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 4 Feb 2018 17:46:01 +0200 Subject: [PATCH 03/18] Downloader: Make the Error type public. --- hammond-downloader/src/errors.rs | 2 +- hammond-downloader/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index ca23fba..360654a 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -3,7 +3,7 @@ use reqwest; use std::io; #[derive(Fail, Debug)] -enum DownloaderError { +pub enum DownloaderError { #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), #[fail(display = "Data error: {}", _0)] DataError(hammond_data::errors::DatabaseError), #[fail(display = "Io error: {}", _0)] IoError(io::Error), diff --git a/hammond-downloader/src/lib.rs b/hammond-downloader/src/lib.rs index a607d26..2a5f991 100644 --- a/hammond-downloader/src/lib.rs +++ b/hammond-downloader/src/lib.rs @@ -1,5 +1,5 @@ #![recursion_limit = "1024"] -// #![deny(unused_extern_crates, unused)] +#![deny(unused_extern_crates, unused)] #[macro_use] extern crate failure; From 1085d468b91f59a06d71d8e5a5d213592b133f5c Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 4 Feb 2018 18:45:19 +0200 Subject: [PATCH 04/18] cargo fmt --- hammond-data/src/errors.rs | 24 ++++++++++++++++-------- hammond-downloader/src/errors.rs | 9 ++++++--- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index b4f0549..d694969 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -21,16 +21,24 @@ struct IOError(io::Error); #[derive(Fail, Debug)] pub enum DatabaseError { - #[fail(display = "SQL Query failed: {}", _0)] DieselResultError(diesel::result::Error), - #[fail(display = "Database Migration error: {}", _0)] DieselMigrationError(RunMigrationsError), - #[fail(display = "R2D2 error: {}", _0)] R2D2Error(r2d2::Error), - #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(r2d2::PoolError), + #[fail(display = "SQL Query failed: {}", _0)] + DieselResultError(diesel::result::Error), + #[fail(display = "Database Migration error: {}", _0)] + DieselMigrationError(RunMigrationsError), + #[fail(display = "R2D2 error: {}", _0)] + R2D2Error(r2d2::Error), + #[fail(display = "R2D2 Pool error: {}", _0)] + R2D2PoolError(r2d2::PoolError), } #[derive(Fail, Debug)] pub enum HttpError { - #[fail(display = "Reqwest Error: {}", _0)] ReqError(reqwest::Error), - #[fail(display = "Hyper Error: {}", _0)] HyperError(hyper::Error), - #[fail(display = "Url Error: {}", _0)] UrlError(url::ParseError), - #[fail(display = "TLS Error: {}", _0)] TLSError(native_tls::Error), + #[fail(display = "Reqwest Error: {}", _0)] + ReqError(reqwest::Error), + #[fail(display = "Hyper Error: {}", _0)] + HyperError(hyper::Error), + #[fail(display = "Url Error: {}", _0)] + UrlError(url::ParseError), + #[fail(display = "TLS Error: {}", _0)] + TLSError(native_tls::Error), } diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index 360654a..7bab62d 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -4,7 +4,10 @@ use std::io; #[derive(Fail, Debug)] pub enum DownloaderError { - #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), - #[fail(display = "Data error: {}", _0)] DataError(hammond_data::errors::DatabaseError), - #[fail(display = "Io error: {}", _0)] IoError(io::Error), + #[fail(display = "Reqwest error: {}", _0)] + RequestError(reqwest::Error), + #[fail(display = "Data error: {}", _0)] + DataError(hammond_data::errors::DatabaseError), + #[fail(display = "Io error: {}", _0)] + IoError(io::Error), } From 0892fe26ba01d7b633312d40a073ee958ea3e4a3 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 01:20:01 +0200 Subject: [PATCH 05/18] Manually implement From Trait for the DatabaseError. --- hammond-data/src/database.rs | 5 +++-- hammond-data/src/dbqueries.rs | 1 + hammond-data/src/errors.rs | 40 ++++++++++++++++++++++++++++------- 3 files changed, 36 insertions(+), 10 deletions(-) diff --git a/hammond-data/src/database.rs b/hammond-data/src/database.rs index 8cc0a5f..d943957 100644 --- a/hammond-data/src/database.rs +++ b/hammond-data/src/database.rs @@ -7,6 +7,7 @@ use diesel::r2d2::ConnectionManager; use std::io; use std::path::PathBuf; +use errors::DatabaseError; use failure::Error; #[cfg(not(test))] @@ -57,7 +58,7 @@ fn init_pool(db_path: &str) -> Pool { pool } -fn run_migration_on(connection: &SqliteConnection) -> Result<(), Error> { +fn run_migration_on(connection: &SqliteConnection) -> Result<(), DatabaseError> { info!("Running DB Migrations..."); // embedded_migrations::run(connection)?; embedded_migrations::run_with_output(connection, &mut io::stdout()).map_err(From::from) @@ -66,7 +67,7 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), Error> { /// Reset the database into a clean state. // Test share a Temp file db. #[allow(dead_code)] -pub fn truncate_db() -> Result<(), Error> { +pub fn truncate_db() -> Result<(), DatabaseError> { let db = connection(); let con = db.get()?; con.execute("DELETE FROM episode")?; diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 5a4417a..7dc68a5 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -10,6 +10,7 @@ use failure::Error; use database::connection; use models::*; +// use errors::DatabaseError; // Feel free to open a Merge request that manually replaces Result if you feel bored. use std::result; diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index d694969..ed5ffd1 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -22,23 +22,47 @@ struct IOError(io::Error); #[derive(Fail, Debug)] pub enum DatabaseError { #[fail(display = "SQL Query failed: {}", _0)] - DieselResultError(diesel::result::Error), + DieselResultError(#[cause] diesel::result::Error), #[fail(display = "Database Migration error: {}", _0)] - DieselMigrationError(RunMigrationsError), + DieselMigrationError(#[cause] RunMigrationsError), #[fail(display = "R2D2 error: {}", _0)] - R2D2Error(r2d2::Error), + R2D2Error(#[cause] r2d2::Error), #[fail(display = "R2D2 Pool error: {}", _0)] - R2D2PoolError(r2d2::PoolError), + R2D2PoolError(#[cause] r2d2::PoolError), +} + +impl From for DatabaseError { + fn from(err: RunMigrationsError) -> Self { + DatabaseError::DieselMigrationError(err) + } +} + +impl From for DatabaseError { + fn from(err: diesel::result::Error) -> Self { + DatabaseError::DieselResultError(err) + } +} + +impl From for DatabaseError { + fn from(err: r2d2::Error) -> Self { + DatabaseError::R2D2Error(err) + } +} + +impl From for DatabaseError { + fn from(err: r2d2::PoolError) -> Self { + DatabaseError::R2D2PoolError(err) + } } #[derive(Fail, Debug)] pub enum HttpError { #[fail(display = "Reqwest Error: {}", _0)] - ReqError(reqwest::Error), + ReqError(#[cause] reqwest::Error), #[fail(display = "Hyper Error: {}", _0)] - HyperError(hyper::Error), + HyperError(#[cause] hyper::Error), #[fail(display = "Url Error: {}", _0)] - UrlError(url::ParseError), + UrlError(#[cause] url::ParseError), #[fail(display = "TLS Error: {}", _0)] - TLSError(native_tls::Error), + TLSError(#[cause] native_tls::Error), } From ede4c21e30af22f07a902427c557a77acbd28b97 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 01:39:50 +0200 Subject: [PATCH 06/18] Merge the Errors into a single Enum. --- hammond-data/src/database.rs | 7 ++- hammond-data/src/dbqueries.rs | 66 +++++++++++++------------- hammond-data/src/errors.rs | 54 ++++++++++----------- hammond-data/src/models/new_episode.rs | 2 +- hammond-data/src/models/new_source.rs | 2 +- hammond-downloader/src/errors.rs | 2 +- 6 files changed, 65 insertions(+), 68 deletions(-) diff --git a/hammond-data/src/database.rs b/hammond-data/src/database.rs index d943957..d6e5be0 100644 --- a/hammond-data/src/database.rs +++ b/hammond-data/src/database.rs @@ -7,8 +7,7 @@ use diesel::r2d2::ConnectionManager; use std::io; use std::path::PathBuf; -use errors::DatabaseError; -use failure::Error; +use errors::DataError; #[cfg(not(test))] use xdg_dirs; @@ -58,7 +57,7 @@ fn init_pool(db_path: &str) -> Pool { pool } -fn run_migration_on(connection: &SqliteConnection) -> Result<(), DatabaseError> { +fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> { info!("Running DB Migrations..."); // embedded_migrations::run(connection)?; embedded_migrations::run_with_output(connection, &mut io::stdout()).map_err(From::from) @@ -67,7 +66,7 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DatabaseError> /// Reset the database into a clean state. // Test share a Temp file db. #[allow(dead_code)] -pub fn truncate_db() -> Result<(), DatabaseError> { +pub fn truncate_db() -> Result<(), DataError> { let db = connection(); let con = db.get()?; con.execute("DELETE FROM episode")?; diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 7dc68a5..758488b 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -6,17 +6,16 @@ use diesel::prelude::*; use diesel; use diesel::dsl::exists; use diesel::select; -use failure::Error; use database::connection; +use errors::DataError; use models::*; -// use errors::DatabaseError; // Feel free to open a Merge request that manually replaces Result if you feel bored. use std::result; -type Result = result::Result; +type DatabaseResult = result::Result; -pub fn get_sources() -> Result> { +pub fn get_sources() -> DatabaseResult> { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -27,7 +26,7 @@ pub fn get_sources() -> Result> { .map_err(From::from) } -pub fn get_podcasts() -> Result> { +pub fn get_podcasts() -> DatabaseResult> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -38,7 +37,7 @@ pub fn get_podcasts() -> Result> { .map_err(From::from) } -pub fn get_episodes() -> Result> { +pub fn get_episodes() -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -49,7 +48,7 @@ pub fn get_episodes() -> Result> { .map_err(From::from) } -pub(crate) fn get_downloaded_episodes() -> Result> { +pub(crate) fn get_downloaded_episodes() -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -61,7 +60,7 @@ pub(crate) fn get_downloaded_episodes() -> Result> { .map_err(From::from) } -// pub(crate) fn get_played_episodes() -> Result> { +// pub(crate) fn get_played_episodes() -> DatabaseResult> { // use schema::episode::dsl::*; // let db = connection(); @@ -72,7 +71,7 @@ pub(crate) fn get_downloaded_episodes() -> Result> { // .map_err(From::from) // } -pub(crate) fn get_played_cleaner_episodes() -> Result> { +pub(crate) fn get_played_cleaner_episodes() -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -84,7 +83,7 @@ pub(crate) fn get_played_cleaner_episodes() -> Result> .map_err(From::from) } -pub fn get_episode_from_rowid(ep_id: i32) -> Result { +pub fn get_episode_from_rowid(ep_id: i32) -> DatabaseResult { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -95,7 +94,7 @@ pub fn get_episode_from_rowid(ep_id: i32) -> Result { .map_err(From::from) } -pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result> { +pub fn get_episode_local_uri_from_id(ep_id: i32) -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -107,7 +106,7 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result> { .map_err(From::from) } -pub fn get_episodes_widgets_with_limit(limit: u32) -> Result> { +pub fn get_episodes_widgets_with_limit(limit: u32) -> DatabaseResult> { use schema::episode; let db = connection(); let con = db.get()?; @@ -130,7 +129,7 @@ pub fn get_episodes_widgets_with_limit(limit: u32) -> Result Result { +pub fn get_podcast_from_id(pid: i32) -> DatabaseResult { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -141,7 +140,7 @@ pub fn get_podcast_from_id(pid: i32) -> Result { .map_err(From::from) } -pub fn get_podcast_cover_from_id(pid: i32) -> Result { +pub fn get_podcast_cover_from_id(pid: i32) -> DatabaseResult { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -153,7 +152,7 @@ pub fn get_podcast_cover_from_id(pid: i32) -> Result { .map_err(From::from) } -pub fn get_pd_episodes(parent: &Podcast) -> Result> { +pub fn get_pd_episodes(parent: &Podcast) -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -164,7 +163,7 @@ pub fn get_pd_episodes(parent: &Podcast) -> Result> { .map_err(From::from) } -pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result> { +pub fn get_pd_episodeswidgets(parent: &Podcast) -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -177,7 +176,7 @@ pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result Result> { +pub fn get_pd_unplayed_episodes(parent: &Podcast) -> DatabaseResult> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -189,8 +188,8 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result> { .map_err(From::from) } -// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> Result> { -// use schema::episode::dsl::*; +// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> +// DatabaseResult> { use schema::episode::dsl::*; // let db = connection(); // let con = db.get()?; @@ -202,7 +201,7 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result> { // .map_err(From::from) // } -pub fn get_source_from_uri(uri_: &str) -> Result { +pub fn get_source_from_uri(uri_: &str) -> DatabaseResult { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -213,7 +212,7 @@ pub fn get_source_from_uri(uri_: &str) -> Result { .map_err(From::from) } -pub fn get_source_from_id(id_: i32) -> Result { +pub fn get_source_from_id(id_: i32) -> DatabaseResult { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -224,7 +223,7 @@ pub fn get_source_from_id(id_: i32) -> Result { .map_err(From::from) } -pub fn get_podcast_from_source_id(sid: i32) -> Result { +pub fn get_podcast_from_source_id(sid: i32) -> DatabaseResult { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -235,7 +234,7 @@ pub fn get_podcast_from_source_id(sid: i32) -> Result { .map_err(From::from) } -pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result { +pub fn get_episode_from_pk(title_: &str, pid: i32) -> DatabaseResult { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -247,7 +246,10 @@ pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result { .map_err(From::from) } -pub(crate) fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result { +pub(crate) fn get_episode_minimal_from_pk( + title_: &str, + pid: i32, +) -> DatabaseResult { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -260,11 +262,11 @@ pub(crate) fn get_episode_minimal_from_pk(title_: &str, pid: i32) -> Result Result<()> { +pub(crate) fn remove_feed(pd: &Podcast) -> DatabaseResult<()> { let db = connection(); let con = db.get()?; - con.transaction(|| -> Result<()> { + con.transaction(|| { delete_source(&con, pd.source_id())?; delete_podcast(&con, pd.id())?; delete_podcast_episodes(&con, pd.id())?; @@ -291,7 +293,7 @@ fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResul diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con) } -pub fn source_exists(url: &str) -> Result { +pub fn source_exists(url: &str) -> DatabaseResult { use schema::source::dsl::*; let db = connection(); @@ -302,7 +304,7 @@ pub fn source_exists(url: &str) -> Result { .map_err(From::from) } -pub(crate) fn podcast_exists(source_id_: i32) -> Result { +pub(crate) fn podcast_exists(source_id_: i32) -> DatabaseResult { use schema::podcast::dsl::*; let db = connection(); @@ -314,7 +316,7 @@ pub(crate) fn podcast_exists(source_id_: i32) -> Result { } #[cfg_attr(rustfmt, rustfmt_skip)] -pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result { +pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> DatabaseResult { use schema::episode::dsl::*; let db = connection(); @@ -325,7 +327,7 @@ pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result { .map_err(From::from) } -pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> { +pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> DatabaseResult<()> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -337,13 +339,13 @@ pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> { .map(|_| ()) } -pub fn update_none_to_played_now(parent: &Podcast) -> Result { +pub fn update_none_to_played_now(parent: &Podcast) -> DatabaseResult { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; let epoch_now = Utc::now().timestamp() as i32; - con.transaction(|| -> Result { + con.transaction(|| { diesel::update(Episode::belonging_to(parent).filter(played.is_null())) .set(played.eq(Some(epoch_now))) .execute(&con) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index ed5ffd1..34f409b 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -20,7 +20,7 @@ struct IOError(io::Error); // struct RSSError(rss::Error); #[derive(Fail, Debug)] -pub enum DatabaseError { +pub enum DataError { #[fail(display = "SQL Query failed: {}", _0)] DieselResultError(#[cause] diesel::result::Error), #[fail(display = "Database Migration error: {}", _0)] @@ -29,34 +29,6 @@ pub enum DatabaseError { R2D2Error(#[cause] r2d2::Error), #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(#[cause] r2d2::PoolError), -} - -impl From for DatabaseError { - fn from(err: RunMigrationsError) -> Self { - DatabaseError::DieselMigrationError(err) - } -} - -impl From for DatabaseError { - fn from(err: diesel::result::Error) -> Self { - DatabaseError::DieselResultError(err) - } -} - -impl From for DatabaseError { - fn from(err: r2d2::Error) -> Self { - DatabaseError::R2D2Error(err) - } -} - -impl From for DatabaseError { - fn from(err: r2d2::PoolError) -> Self { - DatabaseError::R2D2PoolError(err) - } -} - -#[derive(Fail, Debug)] -pub enum HttpError { #[fail(display = "Reqwest Error: {}", _0)] ReqError(#[cause] reqwest::Error), #[fail(display = "Hyper Error: {}", _0)] @@ -66,3 +38,27 @@ pub enum HttpError { #[fail(display = "TLS Error: {}", _0)] TLSError(#[cause] native_tls::Error), } + +impl From for DataError { + fn from(err: RunMigrationsError) -> Self { + DataError::DieselMigrationError(err) + } +} + +impl From for DataError { + fn from(err: diesel::result::Error) -> Self { + DataError::DieselResultError(err) + } +} + +impl From for DataError { + fn from(err: r2d2::Error) -> Self { + DataError::R2D2Error(err) + } +} + +impl From for DataError { + fn from(err: r2d2::PoolError) -> Self { + DataError::R2D2PoolError(err) + } +} diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 6e487a7..668787f 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -120,7 +120,7 @@ impl NewEpisode { #[allow(dead_code)] pub(crate) fn to_episode(&self) -> Result { self.index()?; - dbqueries::get_episode_from_pk(&self.title, self.podcast_id) + dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from) } } diff --git a/hammond-data/src/models/new_source.rs b/hammond-data/src/models/new_source.rs index fbe79f1..0fb0400 100644 --- a/hammond-data/src/models/new_source.rs +++ b/hammond-data/src/models/new_source.rs @@ -47,6 +47,6 @@ impl NewSource { // Look out for when tryinto lands into stable. pub(crate) fn to_source(&self) -> Result { self.insert_or_ignore()?; - dbqueries::get_source_from_uri(&self.uri) + dbqueries::get_source_from_uri(&self.uri).map_err(From::from) } } diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index 7bab62d..8ee10d3 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -7,7 +7,7 @@ pub enum DownloaderError { #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), #[fail(display = "Data error: {}", _0)] - DataError(hammond_data::errors::DatabaseError), + DataError(hammond_data::errors::DataError), #[fail(display = "Io error: {}", _0)] IoError(io::Error), } From 435ce05ac735d2228c32ed2eaa8635794d37a7b4 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 01:53:01 +0200 Subject: [PATCH 07/18] Add rest of the From impls for the Error Type. --- Cargo.lock | 1 - hammond-data/Cargo.toml | 1 - hammond-data/src/errors.rs | 23 +++++++++++++++++++---- hammond-data/src/lib.rs | 1 - 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c7dd21a..604f560 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -630,7 +630,6 @@ dependencies = [ "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "reqwest 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "rfc822_sanitizer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "rss 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/hammond-data/Cargo.toml b/hammond-data/Cargo.toml index fa8e733..286743d 100644 --- a/hammond-data/Cargo.toml +++ b/hammond-data/Cargo.toml @@ -14,7 +14,6 @@ itertools = "0.7.6" lazy_static = "1.0.0" log = "0.4.1" rayon = "0.9.0" -reqwest = "0.8.4" rfc822_sanitizer = "0.3.3" rss = "1.2.1" url = "1.6.0" diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 34f409b..9fd441d 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -3,7 +3,6 @@ use diesel::r2d2; use diesel_migrations::RunMigrationsError; use hyper; use native_tls; -use reqwest; // use rss; use url; @@ -29,11 +28,9 @@ pub enum DataError { R2D2Error(#[cause] r2d2::Error), #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(#[cause] r2d2::PoolError), - #[fail(display = "Reqwest Error: {}", _0)] - ReqError(#[cause] reqwest::Error), #[fail(display = "Hyper Error: {}", _0)] HyperError(#[cause] hyper::Error), - #[fail(display = "Url Error: {}", _0)] + #[fail(display = "Failed to parse a url: {}", _0)] UrlError(#[cause] url::ParseError), #[fail(display = "TLS Error: {}", _0)] TLSError(#[cause] native_tls::Error), @@ -62,3 +59,21 @@ impl From for DataError { DataError::R2D2PoolError(err) } } + +impl From for DataError { + fn from(err: hyper::Error) -> Self { + DataError::HyperError(err) + } +} + +impl From for DataError { + fn from(err: url::ParseError) -> Self { + DataError::UrlError(err) + } +} + +impl From for DataError { + fn from(err: native_tls::Error) -> Self { + DataError::TLSError(err) + } +} diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 9602c5c..5fbcd49 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -46,7 +46,6 @@ extern crate itertools; extern crate native_tls; extern crate num_cpus; extern crate rayon; -extern crate reqwest; extern crate rfc822_sanitizer; extern crate rss; extern crate tokio_core; From 8ba9f928d60dfbcc9a3c69db83667fd26ed49c85 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 02:43:56 +0200 Subject: [PATCH 08/18] Switch to using DataError instead of failure::Error. --- hammond-data/src/errors.rs | 15 +++++--- hammond-data/src/feed.rs | 18 ++++++---- hammond-data/src/lib.rs | 2 +- hammond-data/src/models/episode.rs | 18 +++++----- hammond-data/src/models/mod.rs | 19 +++++------ hammond-data/src/models/new_episode.rs | 28 ++++++++------- hammond-data/src/models/new_podcast.rs | 16 ++++----- hammond-data/src/models/new_source.rs | 6 ++-- hammond-data/src/models/podcast.rs | 6 ++-- hammond-data/src/models/source.rs | 47 ++++++++++++++++---------- hammond-data/src/pipeline.rs | 19 ++++++----- hammond-data/src/utils.rs | 15 ++++---- hammond-downloader/src/downloader.rs | 1 + 13 files changed, 119 insertions(+), 91 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 9fd441d..fa88ca5 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -8,11 +8,6 @@ use url; use std::io; -#[allow(dead_code)] -#[derive(Fail, Debug)] -#[fail(display = "IO Error: {}", _0)] -struct IOError(io::Error); - // fadsadfs NOT SYNC // #[derive(Fail, Debug)] // #[fail(display = "RSS Error: {}", _0)] @@ -34,6 +29,10 @@ pub enum DataError { UrlError(#[cause] url::ParseError), #[fail(display = "TLS Error: {}", _0)] TLSError(#[cause] native_tls::Error), + #[fail(display = "IO Error: {}", _0)] + IOError(io::Error), + #[fail(display = "WANNABE BAIL ERROR: {}", _0)] + DiscountBail(String), } impl From for DataError { @@ -77,3 +76,9 @@ impl From for DataError { DataError::TLSError(err) } } + +impl From for DataError { + fn from(err: io::Error) -> Self { + DataError::IOError(err) + } +} diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 998fab1..f6a3c6e 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -1,11 +1,11 @@ //! Index Feeds. -use failure::Error; use futures::future::*; use itertools::{Either, Itertools}; use rss; use dbqueries; +use errors::DataError; use models::{Index, IndexState, Update}; use models::{NewEpisode, NewPodcast, Podcast}; use pipeline::*; @@ -26,7 +26,7 @@ pub struct Feed { impl Feed { /// Index the contents of the RSS `Feed` into the database. - pub fn index(self) -> Box + Send> { + pub fn index(self) -> Box + Send> { let fut = self.parse_podcast_async() .and_then(|pd| pd.to_podcast()) .and_then(move |pd| self.index_channel_items(&pd)); @@ -38,11 +38,14 @@ impl Feed { NewPodcast::new(&self.channel, self.source_id) } - fn parse_podcast_async(&self) -> Box + Send> { + fn parse_podcast_async(&self) -> Box + Send> { Box::new(ok(self.parse_podcast())) } - fn index_channel_items(&self, pd: &Podcast) -> Box + Send> { + fn index_channel_items( + &self, + pd: &Podcast, + ) -> Box + Send> { let fut = self.get_stuff(pd) .and_then(|(insert, update)| { if !insert.is_empty() { @@ -79,7 +82,10 @@ impl Feed { Box::new(fut) } - fn get_stuff(&self, pd: &Podcast) -> Box + Send> { + fn get_stuff( + &self, + pd: &Podcast, + ) -> Box + Send> { let (insert, update): (Vec<_>, Vec<_>) = self.channel .items() .into_iter() @@ -90,7 +96,7 @@ impl Feed { // I am not sure what the optimizations are on match vs allocating None. .map(|fut| { fut.and_then(|x| match x { - IndexState::NotChanged => bail!("Nothing to do here."), + IndexState::NotChanged => return Err(DataError::DiscountBail(format!("Nothing to do here."))), _ => Ok(x), }) }) diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 5fbcd49..3fbbadc 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -27,7 +27,7 @@ extern crate derive_builder; extern crate diesel; #[macro_use] extern crate diesel_migrations; -#[macro_use] +// #[macro_use] extern crate failure; #[macro_use] extern crate failure_derive; diff --git a/hammond-data/src/models/episode.rs b/hammond-data/src/models/episode.rs index afb1c2a..65dafaa 100644 --- a/hammond-data/src/models/episode.rs +++ b/hammond-data/src/models/episode.rs @@ -2,9 +2,9 @@ use chrono::prelude::*; use diesel; use diesel::SaveChangesDsl; use diesel::prelude::*; -use failure::Error; use database::connection; +use errors::DataError; use models::{Podcast, Save}; use schema::episode; @@ -31,9 +31,9 @@ pub struct Episode { podcast_id: i32, } -impl Save for Episode { +impl Save for Episode { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; @@ -180,7 +180,7 @@ impl Episode { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<(), Error> { + pub fn set_played_now(&mut self) -> Result<(), DataError> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -223,9 +223,9 @@ impl From for EpisodeWidgetQuery { } } -impl Save for EpisodeWidgetQuery { +impl Save for EpisodeWidgetQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); @@ -342,7 +342,7 @@ impl EpisodeWidgetQuery { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<(), Error> { + pub fn set_played_now(&mut self) -> Result<(), DataError> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -361,9 +361,9 @@ pub struct EpisodeCleanerQuery { played: Option, } -impl Save for EpisodeCleanerQuery { +impl Save for EpisodeCleanerQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); diff --git a/hammond-data/src/models/mod.rs b/hammond-data/src/models/mod.rs index 8c2a89d..10f19ed 100644 --- a/hammond-data/src/models/mod.rs +++ b/hammond-data/src/models/mod.rs @@ -23,8 +23,6 @@ pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery}; pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::source::Source; -use failure::Error; - #[derive(Debug, Clone, PartialEq)] pub enum IndexState { Index(T), @@ -32,20 +30,21 @@ pub enum IndexState { NotChanged, } -pub trait Insert { - fn insert(&self) -> Result<(), Error>; +pub trait Insert { + fn insert(&self) -> Result; } -pub trait Update { - fn update(&self, i32) -> Result<(), Error>; +pub trait Update { + fn update(&self, i32) -> Result; } -pub trait Index: Insert + Update { - fn index(&self) -> Result<(), Error>; +// This might need to change in the future +pub trait Index: Insert + Update { + fn index(&self) -> Result; } /// FIXME: DOCS -pub trait Save { +pub trait Save { /// Helper method to easily save/"sync" current state of a diesel model to the Database. - fn save(&self) -> Result; + fn save(&self) -> Result; } diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 668787f..6b2b63e 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -1,12 +1,12 @@ use ammonia; use diesel; use diesel::prelude::*; -use failure::Error; use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822; use rss; use database::connection; use dbqueries; +use errors::DataError; use models::{Episode, EpisodeMinimal, Index, Insert, Update}; use parser; use schema::episode; @@ -43,8 +43,8 @@ impl From for NewEpisode { } } -impl Insert for NewEpisode { - fn insert(&self) -> Result<(), Error> { +impl Insert<(), DataError> for NewEpisode { + fn insert(&self) -> Result<(), DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -58,8 +58,8 @@ impl Insert for NewEpisode { } } -impl Update for NewEpisode { - fn update(&self, episode_id: i32) -> Result<(), Error> { +impl Update<(), DataError> for NewEpisode { + fn update(&self, episode_id: i32) -> Result<(), DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -73,9 +73,9 @@ impl Update for NewEpisode { } } -impl Index for NewEpisode { +impl Index<(), DataError> for NewEpisode { // Does not update the episode description if it's the only thing that has changed. - fn index(&self) -> Result<(), Error> { + fn index(&self) -> Result<(), DataError> { let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?; if exists { @@ -113,12 +113,12 @@ impl PartialEq for NewEpisode { impl NewEpisode { /// Parses an `rss::Item` into a `NewEpisode` Struct. #[allow(dead_code)] - pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) } #[allow(dead_code)] - pub(crate) fn to_episode(&self) -> Result { + pub(crate) fn to_episode(&self) -> Result { self.index()?; dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from) } @@ -182,9 +182,11 @@ impl PartialEq for NewEpisodeMinimal { } impl NewEpisodeMinimal { - pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { if item.title().is_none() { - bail!("No title specified for the item.") + return Err(DataError::DiscountBail(format!( + "No title specified for the item." + ))); } let title = item.title().unwrap().trim().to_owned(); @@ -195,7 +197,9 @@ impl NewEpisodeMinimal { } else if item.link().is_some() { item.link().map(|s| url_cleaner(s)) } else { - bail!("No url specified for the item.") + return Err(DataError::DiscountBail(format!( + "No url specified for the item." + ))); }; // Default to rfc2822 represantation of epoch 0. diff --git a/hammond-data/src/models/new_podcast.rs b/hammond-data/src/models/new_podcast.rs index d02f5df..c3e9632 100644 --- a/hammond-data/src/models/new_podcast.rs +++ b/hammond-data/src/models/new_podcast.rs @@ -2,9 +2,9 @@ use diesel; use diesel::prelude::*; use ammonia; -use failure::Error; use rss; +use errors::DataError; use models::{Index, Insert, Update}; use models::Podcast; use schema::podcast; @@ -27,8 +27,8 @@ pub(crate) struct NewPodcast { source_id: i32, } -impl Insert for NewPodcast { - fn insert(&self) -> Result<(), Error> { +impl Insert<(), DataError> for NewPodcast { + fn insert(&self) -> Result<(), DataError> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -41,8 +41,8 @@ impl Insert for NewPodcast { } } -impl Update for NewPodcast { - fn update(&self, podcast_id: i32) -> Result<(), Error> { +impl Update<(), DataError> for NewPodcast { + fn update(&self, podcast_id: i32) -> Result<(), DataError> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -58,8 +58,8 @@ impl Update for NewPodcast { // TODO: Maybe return an Enum Instead. // It would make unti testing better too. -impl Index for NewPodcast { - fn index(&self) -> Result<(), Error> { +impl Index<(), DataError> for NewPodcast { + fn index(&self) -> Result<(), DataError> { let exists = dbqueries::podcast_exists(self.source_id)?; if exists { @@ -118,7 +118,7 @@ impl NewPodcast { } // Look out for when tryinto lands into stable. - pub(crate) fn to_podcast(&self) -> Result { + pub(crate) fn to_podcast(&self) -> Result { self.index()?; dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from) } diff --git a/hammond-data/src/models/new_source.rs b/hammond-data/src/models/new_source.rs index 0fb0400..881b008 100644 --- a/hammond-data/src/models/new_source.rs +++ b/hammond-data/src/models/new_source.rs @@ -2,12 +2,12 @@ use diesel; use diesel::prelude::*; -use failure::Error; use url::Url; use database::connection; use dbqueries; // use models::{Insert, Update}; +use errors::DataError; use models::Source; use schema::source; @@ -32,7 +32,7 @@ impl NewSource { } } - pub(crate) fn insert_or_ignore(&self) -> Result<(), Error> { + pub(crate) fn insert_or_ignore(&self) -> Result<(), DataError> { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -45,7 +45,7 @@ impl NewSource { } // Look out for when tryinto lands into stable. - pub(crate) fn to_source(&self) -> Result { + pub(crate) fn to_source(&self) -> Result { self.insert_or_ignore()?; dbqueries::get_source_from_uri(&self.uri).map_err(From::from) } diff --git a/hammond-data/src/models/podcast.rs b/hammond-data/src/models/podcast.rs index dcfad9c..a5ac44b 100644 --- a/hammond-data/src/models/podcast.rs +++ b/hammond-data/src/models/podcast.rs @@ -1,7 +1,7 @@ use diesel::SaveChangesDsl; -use failure::Error; use database::connection; +use errors::DataError; use models::{Save, Source}; use schema::podcast; @@ -23,9 +23,9 @@ pub struct Podcast { source_id: i32, } -impl Save for Podcast { +impl Save for Podcast { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index 90062ba..34f32dd 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -1,5 +1,4 @@ use diesel::SaveChangesDsl; -use failure::Error; use rss::Channel; use url::Url; @@ -14,6 +13,7 @@ use futures::prelude::*; use futures_cpupool::CpuPool; use database::connection; +use errors::DataError; use feed::{Feed, FeedBuilder}; use models::{NewSource, Save}; use schema::source; @@ -32,9 +32,9 @@ pub struct Source { http_etag: Option, } -impl Save for Source { +impl Save for Source { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let con = db.get()?; @@ -85,7 +85,7 @@ impl Source { /// Extract Etag and LastModifier from res, and update self and the /// corresponding db row. - fn update_etag(&mut self, res: &Response) -> Result<(), Error> { + fn update_etag(&mut self, res: &Response) -> Result<(), DataError> { let headers = res.headers(); let etag = headers.get::().map(|x| x.tag()); @@ -109,29 +109,42 @@ impl Source { // 403: Forbidden // 408: Timeout // 410: Feed deleted - fn match_status(mut self, res: Response) -> Result<(Self, Response), Error> { + // TODO: Rething this api, + fn match_status(mut self, res: Response) -> Result<(Self, Response), DataError> { self.update_etag(&res)?; let code = res.status(); match code { - StatusCode::NotModified => bail!("304: skipping.."), + StatusCode::NotModified => { + return Err(DataError::DiscountBail(format!("304: skipping.."))) + } StatusCode::MovedPermanently => { error!("Feed was moved permanently."); self.handle_301(&res)?; - bail!("301: Feed was moved permanently.") + return Err(DataError::DiscountBail(format!( + "301: Feed was moved permanently." + ))); } StatusCode::TemporaryRedirect => debug!("307: Temporary Redirect."), StatusCode::PermanentRedirect => warn!("308: Permanent Redirect."), - StatusCode::Unauthorized => bail!("401: Unauthorized."), - StatusCode::Forbidden => bail!("403: Forbidden."), - StatusCode::NotFound => bail!("404: Not found."), - StatusCode::RequestTimeout => bail!("408: Request Timeout."), - StatusCode::Gone => bail!("410: Feed was deleted."), + StatusCode::Unauthorized => { + return Err(DataError::DiscountBail(format!("401: Unauthorized."))) + } + StatusCode::Forbidden => { + return Err(DataError::DiscountBail(format!("403: Forbidden."))) + } + StatusCode::NotFound => return Err(DataError::DiscountBail(format!("404: Not found."))), + StatusCode::RequestTimeout => { + return Err(DataError::DiscountBail(format!("408: Request Timeout."))) + } + StatusCode::Gone => { + return Err(DataError::DiscountBail(format!("410: Feed was deleted."))) + } _ => info!("HTTP StatusCode: {}", code), }; Ok((self, res)) } - fn handle_301(&mut self, res: &Response) -> Result<(), Error> { + fn handle_301(&mut self, res: &Response) -> Result<(), DataError> { let headers = res.headers(); if let Some(url) = headers.get::() { @@ -150,7 +163,7 @@ impl Source { /// Construct a new `Source` with the given `uri` and index it. /// /// This only indexes the `Source` struct, not the Podcast Feed. - pub fn from_url(uri: &str) -> Result { + pub fn from_url(uri: &str) -> Result { let url = Url::parse(uri)?; NewSource::new(&url).to_source() @@ -169,7 +182,7 @@ impl Source { client: &Client>, pool: CpuPool, ignore_etags: bool, - ) -> Box> { + ) -> Box> { let id = self.id(); let feed = self.request_constructor(client, ignore_etags) .and_then(move |(_, res)| response_to_channel(res, pool)) @@ -190,7 +203,7 @@ impl Source { self, client: &Client>, ignore_etags: bool, - ) -> Box> { + ) -> Box> { // FIXME: remove unwrap somehow let uri = Uri::from_str(self.uri()).unwrap(); let mut req = Request::new(Method::Get, uri); @@ -221,7 +234,7 @@ impl Source { fn response_to_channel( res: Response, pool: CpuPool, -) -> Box + Send> { +) -> Box + Send> { let chan = res.body() .concat2() .map(|x| x.into_iter()) diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index 9c77513..e7915a1 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -10,14 +10,13 @@ use hyper::client::HttpConnector; use hyper_tls::HttpsConnector; use tokio_core::reactor::Core; -use failure::Error; use num_cpus; use rss; use Source; use dbqueries; +use errors::DataError; use models::{IndexState, NewEpisode, NewEpisodeMinimal}; -// use Feed; // use std::sync::{Arc, Mutex}; @@ -50,7 +49,7 @@ pub fn pipeline>( tokio_core: &mut Core, pool: &CpuPool, client: Client>, -) -> Result<(), Error> { +) -> Result<(), DataError> { let list: Vec<_> = sources .into_iter() .map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags))) @@ -59,7 +58,9 @@ pub fn pipeline>( .collect(); if list.is_empty() { - bail!("No futures were found to run."); + return Err(DataError::DiscountBail(format!( + "No futures were found to run." + ))); } // Thats not really concurrent yet I think. @@ -69,7 +70,7 @@ pub fn pipeline>( } /// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline. -pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), Error> { +pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), DataError> { if sources.is_empty() { return Ok(()); } @@ -85,7 +86,7 @@ pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), Error> { } /// Docs -pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> { +pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), DataError> { let pool = CpuPool::new_num_cpus(); let mut core = Core::new()?; let handle = core.handle(); @@ -104,7 +105,7 @@ pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> { fn determine_ep_state( ep: NewEpisodeMinimal, item: &rss::Item, -) -> Result, Error> { +) -> Result, DataError> { // Check if feed exists let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; @@ -125,7 +126,7 @@ fn determine_ep_state( pub(crate) fn glue_async<'a>( item: &'a rss::Item, id: i32, -) -> Box, Error = Error> + 'a> { +) -> Box, Error = DataError> + 'a> { Box::new( result(NewEpisodeMinimal::new(item, id)).and_then(move |ep| determine_ep_state(ep, item)), ) @@ -137,7 +138,7 @@ pub(crate) fn glue_async<'a>( #[cfg_attr(feature = "cargo-clippy", allow(type_complexity))] pub fn collect_futures( futures: Vec, -) -> Box>, Error = Error>> +) -> Box>, Error = DataError>> where F: 'static + Future, ::Item: 'static, diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index d7e1de2..11ec3bf 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -3,12 +3,11 @@ use chrono::prelude::*; use rayon::prelude::*; -use failure::Error; use itertools::Itertools; use url::{Position, Url}; use dbqueries; -// use errors::*; +use errors::DataError; use models::{EpisodeCleanerQuery, Podcast, Save}; use xdg_dirs::DL_DIR; @@ -16,7 +15,7 @@ use std::fs; use std::path::Path; /// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`. -fn download_checker() -> Result<(), Error> { +fn download_checker() -> Result<(), DataError> { let mut episodes = dbqueries::get_downloaded_episodes()?; episodes @@ -34,7 +33,7 @@ fn download_checker() -> Result<(), Error> { } /// Delete watched `episodes` that have exceded their liftime after played. -fn played_cleaner() -> Result<(), Error> { +fn played_cleaner() -> Result<(), DataError> { let mut episodes = dbqueries::get_played_cleaner_episodes()?; let now_utc = Utc::now().timestamp() as i32; @@ -58,7 +57,7 @@ fn played_cleaner() -> Result<(), Error> { } /// Check `ep.local_uri` field and delete the file it points to. -fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> { +fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); if Path::new(&uri).exists() { @@ -87,7 +86,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> { /// /// Runs a cleaner for played Episode's that are pass the lifetime limit and /// scheduled for removal. -pub fn checkup() -> Result<(), Error> { +pub fn checkup() -> Result<(), DataError> { info!("Running database checks."); download_checker()?; played_cleaner()?; @@ -124,7 +123,7 @@ pub fn replace_extra_spaces(s: &str) -> String { } /// Returns the URI of a Podcast Downloads given it's title. -pub fn get_download_folder(pd_title: &str) -> Result { +pub fn get_download_folder(pd_title: &str) -> Result { // It might be better to make it a hash of the title or the podcast rowid let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); @@ -138,7 +137,7 @@ pub fn get_download_folder(pd_title: &str) -> Result { /// Removes all the entries associated with the given show from the database, /// and deletes all of the downloaded content. // TODO: Write Tests -pub fn delete_show(pd: &Podcast) -> Result<(), Error> { +pub fn delete_show(pd: &Podcast) -> Result<(), DataError> { dbqueries::remove_feed(pd)?; info!("{} was removed succesfully.", pd.title()); diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 34f80ba..07ad6a6 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -17,6 +17,7 @@ use hammond_data::xdg_dirs::HAMMOND_CACHE; use std::result; use failure::Error; + type Result = result::Result; // TODO: Replace path that are of type &str with std::path. From e84e7df3f16d17436d204e15e7ca1ee27823b95b Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 15:59:34 +0200 Subject: [PATCH 09/18] Remove unwrap call. --- hammond-data/src/errors.rs | 2 +- hammond-data/src/models/source.rs | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index fa88ca5..5afe6b8 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -30,7 +30,7 @@ pub enum DataError { #[fail(display = "TLS Error: {}", _0)] TLSError(#[cause] native_tls::Error), #[fail(display = "IO Error: {}", _0)] - IOError(io::Error), + IOError(#[cause] io::Error), #[fail(display = "WANNABE BAIL ERROR: {}", _0)] DiscountBail(String), } diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index 34f32dd..2352757 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -241,8 +241,10 @@ fn response_to_channel( .map_err(From::from) .map(|iter| iter.collect::>()) .map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned()) - // FIXME: Unwrap - .and_then(|buf| Ok(Channel::from_str(&buf).unwrap())); + .and_then(|buf| { + Channel::from_str(&buf) + .or_else(|_| Err(DataError::DiscountBail(format!("RSS crate Error")))) + }); let cpu_chan = pool.spawn(chan); Box::new(cpu_chan) } From 34d7391363ce639e178730273b4cc489ff4e991d Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 16:37:50 +0200 Subject: [PATCH 10/18] Make a counterfeit rss Error. --- hammond-data/src/errors.rs | 3 +++ hammond-data/src/models/source.rs | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 5afe6b8..3c73e1a 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -31,6 +31,9 @@ pub enum DataError { TLSError(#[cause] native_tls::Error), #[fail(display = "IO Error: {}", _0)] IOError(#[cause] io::Error), + #[fail(display = "RSS Error: {}", _0)] + // Rss::Error is not yet Sync + RssCrateError(String), #[fail(display = "WANNABE BAIL ERROR: {}", _0)] DiscountBail(String), } diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index 2352757..c009041 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -242,8 +242,7 @@ fn response_to_channel( .map(|iter| iter.collect::>()) .map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned()) .and_then(|buf| { - Channel::from_str(&buf) - .or_else(|_| Err(DataError::DiscountBail(format!("RSS crate Error")))) + Channel::from_str(&buf).or_else(|err| Err(DataError::RssCrateError(format!("{}", err)))) }); let cpu_chan = pool.spawn(chan); Box::new(cpu_chan) From 064c2b4be097ef7f386dfeaa382fb9e51b39f13e Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 18:25:56 +0200 Subject: [PATCH 11/18] Start creating custom errors instead of using bail! macro. --- hammond-data/src/errors.rs | 16 ++++++- hammond-data/src/lib.rs | 2 +- hammond-data/src/models/new_episode.rs | 4 +- hammond-data/src/models/source.rs | 58 +++++++++++++++++++++----- 4 files changed, 65 insertions(+), 15 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 3c73e1a..57a65cd 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -7,6 +7,7 @@ use native_tls; use url; use std::io; +// use std::fmt; // fadsadfs NOT SYNC // #[derive(Fail, Debug)] @@ -26,6 +27,7 @@ pub enum DataError { #[fail(display = "Hyper Error: {}", _0)] HyperError(#[cause] hyper::Error), #[fail(display = "Failed to parse a url: {}", _0)] + // TODO: print the url too UrlError(#[cause] url::ParseError), #[fail(display = "TLS Error: {}", _0)] TLSError(#[cause] native_tls::Error), @@ -34,8 +36,14 @@ pub enum DataError { #[fail(display = "RSS Error: {}", _0)] // Rss::Error is not yet Sync RssCrateError(String), - #[fail(display = "WANNABE BAIL ERROR: {}", _0)] + #[fail(display = "Error: {}", _0)] DiscountBail(String), + #[fail(display = "Request to {} returned {}. Contex: {}", url, status_code, contex)] + HttpStatusError { + url: String, + status_code: hyper::StatusCode, + contex: String, + }, } impl From for DataError { @@ -85,3 +93,9 @@ impl From for DataError { DataError::IOError(err) } } + +impl From for DataError { + fn from(err: String) -> Self { + DataError::DiscountBail(err) + } +} diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 3fbbadc..5fbcd49 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -27,7 +27,7 @@ extern crate derive_builder; extern crate diesel; #[macro_use] extern crate diesel_migrations; -// #[macro_use] +#[macro_use] extern crate failure; #[macro_use] extern crate failure_derive; diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 6b2b63e..e943688 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -210,7 +210,7 @@ impl NewEpisodeMinimal { let duration = parser::parse_itunes_duration(item.itunes_ext()); - Ok(NewEpisodeMinimalBuilder::default() + NewEpisodeMinimalBuilder::default() .title(title) .uri(uri) .duration(duration) @@ -218,7 +218,7 @@ impl NewEpisodeMinimal { .guid(guid) .podcast_id(parent_id) .build() - .unwrap()) + .map_err(From::from) } pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode { diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index c009041..f283c6b 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -1,4 +1,5 @@ use diesel::SaveChangesDsl; +// use failure::ResultExt; use rss::Channel; use url::Url; @@ -115,29 +116,64 @@ impl Source { let code = res.status(); match code { StatusCode::NotModified => { - return Err(DataError::DiscountBail(format!("304: skipping.."))) + let err = DataError::HttpStatusError { + url: self.uri, + status_code: code, + contex: format!("304: skipping.."), + }; + + return Err(err); } StatusCode::MovedPermanently => { error!("Feed was moved permanently."); self.handle_301(&res)?; - return Err(DataError::DiscountBail(format!( - "301: Feed was moved permanently." - ))); + + let err = DataError::HttpStatusError { + url: self.uri, + status_code: code, + contex: format!("301: Feed was moved permanently."), + }; + + return Err(err); } StatusCode::TemporaryRedirect => debug!("307: Temporary Redirect."), StatusCode::PermanentRedirect => warn!("308: Permanent Redirect."), StatusCode::Unauthorized => { - return Err(DataError::DiscountBail(format!("401: Unauthorized."))) + let err = DataError::HttpStatusError { + url: self.uri, + status_code: code, + contex: format!("401: Unauthorized."), + }; + + return Err(err); } StatusCode::Forbidden => { - return Err(DataError::DiscountBail(format!("403: Forbidden."))) + let err = DataError::HttpStatusError { + url: self.uri, + status_code: code, + contex: format!("403: Forbidden."), + }; + + return Err(err); } - StatusCode::NotFound => return Err(DataError::DiscountBail(format!("404: Not found."))), + StatusCode::NotFound => return Err(format!("404: Not found.")).map_err(From::from), StatusCode::RequestTimeout => { - return Err(DataError::DiscountBail(format!("408: Request Timeout."))) + let err = DataError::HttpStatusError { + url: self.uri, + status_code: code, + contex: format!("408: Request Timeout."), + }; + + return Err(err); } StatusCode::Gone => { - return Err(DataError::DiscountBail(format!("410: Feed was deleted."))) + let err = DataError::HttpStatusError { + url: self.uri, + status_code: code, + contex: format!("410: Feed was deleted.."), + }; + + return Err(err); } _ => info!("HTTP StatusCode: {}", code), }; @@ -187,11 +223,11 @@ impl Source { let feed = self.request_constructor(client, ignore_etags) .and_then(move |(_, res)| response_to_channel(res, pool)) .and_then(move |chan| { - Ok(FeedBuilder::default() + FeedBuilder::default() .channel(chan) .source_id(id) .build() - .unwrap()) + .map_err(From::from) }); Box::new(feed) From d0ae5a4198fb7ca6e957d2fd790a5b83d67a73f0 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 18:33:12 +0200 Subject: [PATCH 12/18] Typo fix. --- hammond-data/src/errors.rs | 4 ++-- hammond-data/src/models/source.rs | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 57a65cd..59dec70 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -38,11 +38,11 @@ pub enum DataError { RssCrateError(String), #[fail(display = "Error: {}", _0)] DiscountBail(String), - #[fail(display = "Request to {} returned {}. Contex: {}", url, status_code, contex)] + #[fail(display = "Request to {} returned {}. Context: {}", url, status_code, context)] HttpStatusError { url: String, status_code: hyper::StatusCode, - contex: String, + context: String, }, } diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index f283c6b..4a52dc0 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -119,7 +119,7 @@ impl Source { let err = DataError::HttpStatusError { url: self.uri, status_code: code, - contex: format!("304: skipping.."), + context: format!("304: skipping.."), }; return Err(err); @@ -131,7 +131,7 @@ impl Source { let err = DataError::HttpStatusError { url: self.uri, status_code: code, - contex: format!("301: Feed was moved permanently."), + context: format!("301: Feed was moved permanently."), }; return Err(err); @@ -142,7 +142,7 @@ impl Source { let err = DataError::HttpStatusError { url: self.uri, status_code: code, - contex: format!("401: Unauthorized."), + context: format!("401: Unauthorized."), }; return Err(err); @@ -151,7 +151,7 @@ impl Source { let err = DataError::HttpStatusError { url: self.uri, status_code: code, - contex: format!("403: Forbidden."), + context: format!("403: Forbidden."), }; return Err(err); @@ -161,7 +161,7 @@ impl Source { let err = DataError::HttpStatusError { url: self.uri, status_code: code, - contex: format!("408: Request Timeout."), + context: format!("408: Request Timeout."), }; return Err(err); @@ -170,7 +170,7 @@ impl Source { let err = DataError::HttpStatusError { url: self.uri, status_code: code, - contex: format!("410: Feed was deleted.."), + context: format!("410: Feed was deleted.."), }; return Err(err); From 008f57bec410d6924ca25c73f33cdfd0d41bf46c Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 18:51:21 +0200 Subject: [PATCH 13/18] Create a ParseEpisodeError and replace bail! invocations. --- hammond-data/src/errors.rs | 2 ++ hammond-data/src/feed.rs | 6 +++--- hammond-data/src/models/new_episode.rs | 18 ++++++++++++------ hammond-data/src/utils.rs | 6 +++--- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 59dec70..689ee91 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -44,6 +44,8 @@ pub enum DataError { status_code: hyper::StatusCode, context: String, }, + #[fail(display = "Error occured while Parsing an Episode. Reason: {}", reason)] + ParseEpisodeError { reason: String, parent_id: i32 }, } impl From for DataError { diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index f6a3c6e..c5797b3 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -52,11 +52,11 @@ impl Feed { info!("Indexing {} episodes.", insert.len()); if let Err(err) = dbqueries::index_new_episodes(insert.as_slice()) { error!("Failed batch indexng, Fallign back to individual indexing."); - error!("Error: {}", err); + error!("{}", err); insert.iter().for_each(|ep| { if let Err(err) = ep.index() { error!("Failed to index episode: {:?}.", ep.title()); - error!("Error msg: {}", err); + error!("{}", err); }; }) } @@ -73,7 +73,7 @@ impl Feed { .for_each(|(ref ep, rowid)| { if let Err(err) = ep.update(rowid) { error!("Failed to index episode: {:?}.", ep.title()); - error!("Error msg: {}", err); + error!("{}", err); }; }) } diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index e943688..7cc56c7 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -184,9 +184,12 @@ impl PartialEq for NewEpisodeMinimal { impl NewEpisodeMinimal { pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { if item.title().is_none() { - return Err(DataError::DiscountBail(format!( - "No title specified for the item." - ))); + let err = DataError::ParseEpisodeError { + reason: format!("No title specified for this Episode."), + parent_id, + }; + + return Err(err); } let title = item.title().unwrap().trim().to_owned(); @@ -197,9 +200,12 @@ impl NewEpisodeMinimal { } else if item.link().is_some() { item.link().map(|s| url_cleaner(s)) } else { - return Err(DataError::DiscountBail(format!( - "No url specified for the item." - ))); + let err = DataError::ParseEpisodeError { + reason: format!("No url specified for the item."), + parent_id, + }; + + return Err(err); }; // Default to rfc2822 represantation of epoch 0. diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index 11ec3bf..5681686 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -25,7 +25,7 @@ fn download_checker() -> Result<(), DataError> { ep.set_local_uri(None); if let Err(err) = ep.save() { error!("Error while trying to update episode: {:#?}", ep); - error!("Error: {}", err); + error!("{}", err); }; }); @@ -47,7 +47,7 @@ fn played_cleaner() -> Result<(), DataError> { if now_utc > limit { if let Err(err) = delete_local_content(ep) { error!("Error while trying to delete file: {:?}", ep.local_uri()); - error!("Error: {}", err); + error!("{}", err); } else { info!("Episode {:?} was deleted succesfully.", ep.local_uri()); }; @@ -67,7 +67,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> { ep.save()?; } else { error!("Error while trying to delete file: {}", uri); - error!("Error: {}", res.unwrap_err()); + error!("{}", res.unwrap_err()); }; } } else { From 44ebe46f10f41e3db514e3672799f564ec1e4f2b Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 19:00:31 +0200 Subject: [PATCH 14/18] Replace last bail! invocations. --- hammond-data/src/errors.rs | 4 ++++ hammond-data/src/feed.rs | 2 +- hammond-data/src/lib.rs | 2 +- hammond-data/src/pipeline.rs | 4 +--- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 689ee91..e4237fc 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -46,6 +46,10 @@ pub enum DataError { }, #[fail(display = "Error occured while Parsing an Episode. Reason: {}", reason)] ParseEpisodeError { reason: String, parent_id: i32 }, + #[fail(display = "No Futures where produced to be run.")] + EmptyFuturesList, + #[fail(display = "Episode was not changed and thus skipped.")] + EpisodeNotChanged, } impl From for DataError { diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index c5797b3..151234d 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -96,7 +96,7 @@ impl Feed { // I am not sure what the optimizations are on match vs allocating None. .map(|fut| { fut.and_then(|x| match x { - IndexState::NotChanged => return Err(DataError::DiscountBail(format!("Nothing to do here."))), + IndexState::NotChanged => return Err(DataError::EpisodeNotChanged), _ => Ok(x), }) }) diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 5fbcd49..3fbbadc 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -27,7 +27,7 @@ extern crate derive_builder; extern crate diesel; #[macro_use] extern crate diesel_migrations; -#[macro_use] +// #[macro_use] extern crate failure; #[macro_use] extern crate failure_derive; diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index e7915a1..893bc23 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -58,9 +58,7 @@ pub fn pipeline>( .collect(); if list.is_empty() { - return Err(DataError::DiscountBail(format!( - "No futures were found to run." - ))); + return Err(DataError::EmptyFuturesList); } // Thats not really concurrent yet I think. From 7eec01a52ea7d61ce00e1f1baaf8d66032b7479e Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 19:05:40 +0200 Subject: [PATCH 15/18] Minor rename of DataError::Bail. --- hammond-data/src/errors.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index e4237fc..ccddd85 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -37,7 +37,7 @@ pub enum DataError { // Rss::Error is not yet Sync RssCrateError(String), #[fail(display = "Error: {}", _0)] - DiscountBail(String), + Bail(String), #[fail(display = "Request to {} returned {}. Context: {}", url, status_code, context)] HttpStatusError { url: String, @@ -102,6 +102,6 @@ impl From for DataError { impl From for DataError { fn from(err: String) -> Self { - DataError::DiscountBail(err) + DataError::Bail(err) } } From 4d1168803c00bb6102fe192e3c90c3557d2219c2 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 19:43:26 +0200 Subject: [PATCH 16/18] Dbquerries: Remove Result type allias. --- hammond-data/src/dbqueries.rs | 56 ++++++++++++++++------------------- 1 file changed, 26 insertions(+), 30 deletions(-) diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 758488b..27c46f7 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -11,11 +11,7 @@ use database::connection; use errors::DataError; use models::*; -// Feel free to open a Merge request that manually replaces Result if you feel bored. -use std::result; -type DatabaseResult = result::Result; - -pub fn get_sources() -> DatabaseResult> { +pub fn get_sources() -> Result, DataError> { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -26,7 +22,7 @@ pub fn get_sources() -> DatabaseResult> { .map_err(From::from) } -pub fn get_podcasts() -> DatabaseResult> { +pub fn get_podcasts() -> Result, DataError> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -37,7 +33,7 @@ pub fn get_podcasts() -> DatabaseResult> { .map_err(From::from) } -pub fn get_episodes() -> DatabaseResult> { +pub fn get_episodes() -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -48,7 +44,7 @@ pub fn get_episodes() -> DatabaseResult> { .map_err(From::from) } -pub(crate) fn get_downloaded_episodes() -> DatabaseResult> { +pub(crate) fn get_downloaded_episodes() -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -60,7 +56,7 @@ pub(crate) fn get_downloaded_episodes() -> DatabaseResult DatabaseResult> { +// pub(crate) fn get_played_episodes() -> Result, DataError> { // use schema::episode::dsl::*; // let db = connection(); @@ -71,7 +67,7 @@ pub(crate) fn get_downloaded_episodes() -> DatabaseResult DatabaseResult> { +pub(crate) fn get_played_cleaner_episodes() -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -83,7 +79,7 @@ pub(crate) fn get_played_cleaner_episodes() -> DatabaseResult DatabaseResult { +pub fn get_episode_from_rowid(ep_id: i32) -> Result { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -94,7 +90,7 @@ pub fn get_episode_from_rowid(ep_id: i32) -> DatabaseResult { .map_err(From::from) } -pub fn get_episode_local_uri_from_id(ep_id: i32) -> DatabaseResult> { +pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -106,7 +102,7 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> DatabaseResult DatabaseResult> { +pub fn get_episodes_widgets_with_limit(limit: u32) -> Result, DataError> { use schema::episode; let db = connection(); let con = db.get()?; @@ -129,7 +125,7 @@ pub fn get_episodes_widgets_with_limit(limit: u32) -> DatabaseResult DatabaseResult { +pub fn get_podcast_from_id(pid: i32) -> Result { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -140,7 +136,7 @@ pub fn get_podcast_from_id(pid: i32) -> DatabaseResult { .map_err(From::from) } -pub fn get_podcast_cover_from_id(pid: i32) -> DatabaseResult { +pub fn get_podcast_cover_from_id(pid: i32) -> Result { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -152,7 +148,7 @@ pub fn get_podcast_cover_from_id(pid: i32) -> DatabaseResult .map_err(From::from) } -pub fn get_pd_episodes(parent: &Podcast) -> DatabaseResult> { +pub fn get_pd_episodes(parent: &Podcast) -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -163,7 +159,7 @@ pub fn get_pd_episodes(parent: &Podcast) -> DatabaseResult> { .map_err(From::from) } -pub fn get_pd_episodeswidgets(parent: &Podcast) -> DatabaseResult> { +pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -176,7 +172,7 @@ pub fn get_pd_episodeswidgets(parent: &Podcast) -> DatabaseResult DatabaseResult> { +pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result, DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -189,7 +185,7 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> DatabaseResult } // pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> -// DatabaseResult> { use schema::episode::dsl::*; +// Result, DataError> { use schema::episode::dsl::*; // let db = connection(); // let con = db.get()?; @@ -201,7 +197,7 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> DatabaseResult // .map_err(From::from) // } -pub fn get_source_from_uri(uri_: &str) -> DatabaseResult { +pub fn get_source_from_uri(uri_: &str) -> Result { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -212,7 +208,7 @@ pub fn get_source_from_uri(uri_: &str) -> DatabaseResult { .map_err(From::from) } -pub fn get_source_from_id(id_: i32) -> DatabaseResult { +pub fn get_source_from_id(id_: i32) -> Result { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -223,7 +219,7 @@ pub fn get_source_from_id(id_: i32) -> DatabaseResult { .map_err(From::from) } -pub fn get_podcast_from_source_id(sid: i32) -> DatabaseResult { +pub fn get_podcast_from_source_id(sid: i32) -> Result { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -234,7 +230,7 @@ pub fn get_podcast_from_source_id(sid: i32) -> DatabaseResult { .map_err(From::from) } -pub fn get_episode_from_pk(title_: &str, pid: i32) -> DatabaseResult { +pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -249,7 +245,7 @@ pub fn get_episode_from_pk(title_: &str, pid: i32) -> DatabaseResult { pub(crate) fn get_episode_minimal_from_pk( title_: &str, pid: i32, -) -> DatabaseResult { +) -> Result { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -262,7 +258,7 @@ pub(crate) fn get_episode_minimal_from_pk( .map_err(From::from) } -pub(crate) fn remove_feed(pd: &Podcast) -> DatabaseResult<()> { +pub(crate) fn remove_feed(pd: &Podcast) -> Result<(), DataError> { let db = connection(); let con = db.get()?; @@ -293,7 +289,7 @@ fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResul diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con) } -pub fn source_exists(url: &str) -> DatabaseResult { +pub fn source_exists(url: &str) -> Result { use schema::source::dsl::*; let db = connection(); @@ -304,7 +300,7 @@ pub fn source_exists(url: &str) -> DatabaseResult { .map_err(From::from) } -pub(crate) fn podcast_exists(source_id_: i32) -> DatabaseResult { +pub(crate) fn podcast_exists(source_id_: i32) -> Result { use schema::podcast::dsl::*; let db = connection(); @@ -316,7 +312,7 @@ pub(crate) fn podcast_exists(source_id_: i32) -> DatabaseResult { } #[cfg_attr(rustfmt, rustfmt_skip)] -pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> DatabaseResult { +pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result { use schema::episode::dsl::*; let db = connection(); @@ -327,7 +323,7 @@ pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> DatabaseResult DatabaseResult<()> { +pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -339,7 +335,7 @@ pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> DatabaseResult<()> { .map(|_| ()) } -pub fn update_none_to_played_now(parent: &Podcast) -> DatabaseResult { +pub fn update_none_to_played_now(parent: &Podcast) -> Result { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; From 5cd3dff1d4aa483c187a2d151284bc668db6c40b Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 20:41:45 +0200 Subject: [PATCH 17/18] Downloader: Switch to returning Downloader::Error instead of failure::Error. --- hammond-downloader/src/downloader.rs | 17 ++++++--------- hammond-downloader/src/errors.rs | 32 +++++++++++++++++++++++----- hammond-downloader/src/lib.rs | 1 - 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 07ad6a6..53851aa 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -14,11 +14,8 @@ use std::sync::{Arc, Mutex}; use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save}; use hammond_data::xdg_dirs::HAMMOND_CACHE; -use std::result; - -use failure::Error; - -type Result = result::Result; +// use failure::Error; +use errors::DownloadError; // TODO: Replace path that are of type &str with std::path. // TODO: Have a convention/document absolute/relative paths, if they should end with / or not. @@ -41,7 +38,7 @@ fn download_into( file_title: &str, url: &str, progress: Option>>, -) -> Result { +) -> Result { info!("GET request to: {}", url); // Haven't included the loop check as // Steal the Stars would tigger it as @@ -65,7 +62,7 @@ fn download_into( info!("Status Resp: {}", resp.status()); if !resp.status().is_success() { - bail!("Unexpected server response: {}", resp.status()) + return Err(DownloadError::UnexpectedResponse(resp.status())); } let headers = resp.headers().clone(); @@ -122,7 +119,7 @@ fn save_io( resp: &mut reqwest::Response, content_lenght: Option, progress: Option>>, -) -> Result<()> { +) -> Result<(), DownloadError> { info!("Downloading into: {}", file); let chunk_size = match content_lenght { Some(x) => x as usize / 99, @@ -144,7 +141,7 @@ fn save_io( if let Ok(l) = len { if let Ok(mut m) = prog.lock() { if m.should_cancel() { - bail!("Download was cancelled."); + return Err(DownloadError::DownloadCancelled); } m.set_downloaded(l); } @@ -163,7 +160,7 @@ pub fn get_episode( ep: &mut EpisodeWidgetQuery, download_folder: &str, progress: Option>>, -) -> Result<()> { +) -> Result<(), DownloadError> { // Check if its alrdy downloaded if ep.local_uri().is_some() { if Path::new(ep.local_uri().unwrap()).exists() { diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index 8ee10d3..7df3bd1 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -1,13 +1,35 @@ -use hammond_data; +use hammond_data::errors::DataError; use reqwest; use std::io; #[derive(Fail, Debug)] -pub enum DownloaderError { +pub enum DownloadError { #[fail(display = "Reqwest error: {}", _0)] - RequestError(reqwest::Error), + RequestError(#[cause] reqwest::Error), #[fail(display = "Data error: {}", _0)] - DataError(hammond_data::errors::DataError), + DataError(#[cause] DataError), #[fail(display = "Io error: {}", _0)] - IoError(io::Error), + IoError(#[cause] io::Error), + #[fail(display = "The Download was cancelled")] + DownloadCancelled, + #[fail(display = "Unexpected server response: {}", _0)] + UnexpectedResponse(reqwest::StatusCode), +} + +impl From for DownloadError { + fn from(err: reqwest::Error) -> Self { + DownloadError::RequestError(err) + } +} + +impl From for DownloadError { + fn from(err: io::Error) -> Self { + DownloadError::IoError(err) + } +} + +impl From for DownloadError { + fn from(err: DataError) -> Self { + DownloadError::DataError(err) + } } diff --git a/hammond-downloader/src/lib.rs b/hammond-downloader/src/lib.rs index 2a5f991..64276d3 100644 --- a/hammond-downloader/src/lib.rs +++ b/hammond-downloader/src/lib.rs @@ -1,7 +1,6 @@ #![recursion_limit = "1024"] #![deny(unused_extern_crates, unused)] -#[macro_use] extern crate failure; #[macro_use] extern crate failure_derive; From d3472b32057951d35c8ffc416aa70c6c6535dc33 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Mon, 5 Feb 2018 20:51:34 +0200 Subject: [PATCH 18/18] hammond-data: Deny Unused. --- hammond-data/src/lib.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 3fbbadc..52e58d3 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -6,9 +6,6 @@ wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names, unicode_not_nfc, enum_glob_use, if_not_else, items_after_statements, used_underscore_binding))] - -//! FIXME: Docs - #![allow(unknown_lints)] #![deny(bad_style, const_err, dead_code, improper_ctypes, legacy_directory_ownership, non_shorthand_field_patterns, no_mangle_generic_items, overflowing_literals, @@ -17,10 +14,12 @@ unconditional_recursion, unions_with_drop_fields, unused_allocation, unused_comparisons, unused_parens, while_true)] #![deny(missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts)] -// #![deny(unused_extern_crates, unused)] +#![deny(unused_extern_crates, unused)] // #![feature(conservative_impl_trait)] +//! FIXME: Docs + #[macro_use] extern crate derive_builder; #[macro_use]