From de43cae0157b78d2601cea702713bf0925f5aca9 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sun, 4 Feb 2018 17:36:27 +0200 Subject: [PATCH] Switch rest stuff of data/downloader to Failure Crate. --- Cargo.lock | 2 ++ hammond-data/src/dbqueries.rs | 6 ++++- hammond-data/src/errors.rs | 33 ++++++++++++++------------ hammond-data/src/feed.rs | 2 +- hammond-data/src/lib.rs | 3 +-- hammond-data/src/models/episode.rs | 12 +++++----- hammond-data/src/models/mod.rs | 10 ++++---- hammond-data/src/models/new_episode.rs | 26 ++++++++++---------- hammond-data/src/models/new_podcast.rs | 11 ++++----- hammond-data/src/models/new_source.rs | 7 +++--- hammond-data/src/models/podcast.rs | 4 ++-- hammond-data/src/models/source.rs | 19 ++++++++------- hammond-data/src/pipeline.rs | 16 +++++++------ hammond-data/src/utils.rs | 15 ++++++------ hammond-downloader/src/downloader.rs | 6 +++-- hammond-downloader/src/errors.rs | 12 +--------- hammond-downloader/src/lib.rs | 3 +-- hammond-gtk/Cargo.toml | 2 ++ hammond-gtk/src/main.rs | 6 ++++- hammond-gtk/src/widgets/episode.rs | 4 ++-- 20 files changed, 102 insertions(+), 97 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 76aba78..c7dd21a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -661,6 +661,8 @@ version = "0.1.0" dependencies = [ "chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "dissolve 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "gdk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "gdk-pixbuf 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "gio 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 7fe277f..5a4417a 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -6,11 +6,15 @@ use diesel::prelude::*; use diesel; use diesel::dsl::exists; use diesel::select; +use failure::Error; use database::connection; -use errors::*; use models::*; +// Feel free to open a Merge request that manually replaces Result if you feel bored. +use std::result; +type Result = result::Result; + pub fn get_sources() -> Result> { use schema::source::dsl::*; let db = connection(); diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 299cbf6..b4f0549 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -4,25 +4,20 @@ use diesel_migrations::RunMigrationsError; use hyper; use native_tls; use reqwest; -use rss; +// use rss; use url; use std::io; -error_chain! { - foreign_links { - DieselResultError(diesel::result::Error); - DieselMigrationError(RunMigrationsError); - R2D2Error(r2d2::Error); - R2D2PoolError(r2d2::PoolError); - RSSError(rss::Error); - ReqError(reqwest::Error); - HyperError(hyper::Error); - UrlError(url::ParseError); - TLSError(native_tls::Error); - IoError(io::Error); - } -} +#[allow(dead_code)] +#[derive(Fail, Debug)] +#[fail(display = "IO Error: {}", _0)] +struct IOError(io::Error); + +// fadsadfs NOT SYNC +// #[derive(Fail, Debug)] +// #[fail(display = "RSS Error: {}", _0)] +// struct RSSError(rss::Error); #[derive(Fail, Debug)] pub enum DatabaseError { @@ -31,3 +26,11 @@ pub enum DatabaseError { #[fail(display = "R2D2 error: {}", _0)] R2D2Error(r2d2::Error), #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(r2d2::PoolError), } + +#[derive(Fail, Debug)] +pub enum HttpError { + #[fail(display = "Reqwest Error: {}", _0)] ReqError(reqwest::Error), + #[fail(display = "Hyper Error: {}", _0)] HyperError(hyper::Error), + #[fail(display = "Url Error: {}", _0)] UrlError(url::ParseError), + #[fail(display = "TLS Error: {}", _0)] TLSError(native_tls::Error), +} diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 9f39cd0..998fab1 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -1,11 +1,11 @@ //! Index Feeds. +use failure::Error; use futures::future::*; use itertools::{Either, Itertools}; use rss; use dbqueries; -use errors::*; use models::{Index, IndexState, Update}; use models::{NewEpisode, NewPodcast, Podcast}; use pipeline::*; diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index e6fe7cc..9602c5c 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -28,7 +28,7 @@ extern crate diesel; #[macro_use] extern crate diesel_migrations; #[macro_use] -extern crate error_chain; +extern crate failure; #[macro_use] extern crate failure_derive; #[macro_use] @@ -38,7 +38,6 @@ extern crate log; extern crate ammonia; extern crate chrono; -extern crate failure; extern crate futures; extern crate futures_cpupool; extern crate hyper; diff --git a/hammond-data/src/models/episode.rs b/hammond-data/src/models/episode.rs index 948422c..afb1c2a 100644 --- a/hammond-data/src/models/episode.rs +++ b/hammond-data/src/models/episode.rs @@ -2,9 +2,9 @@ use chrono::prelude::*; use diesel; use diesel::SaveChangesDsl; use diesel::prelude::*; +use failure::Error; use database::connection; -use errors::*; use models::{Podcast, Save}; use schema::episode; @@ -33,7 +33,7 @@ pub struct Episode { impl Save for Episode { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; @@ -180,7 +180,7 @@ impl Episode { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<()> { + pub fn set_played_now(&mut self) -> Result<(), Error> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -225,7 +225,7 @@ impl From for EpisodeWidgetQuery { impl Save for EpisodeWidgetQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); @@ -342,7 +342,7 @@ impl EpisodeWidgetQuery { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<()> { + pub fn set_played_now(&mut self) -> Result<(), Error> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -363,7 +363,7 @@ pub struct EpisodeCleanerQuery { impl Save for EpisodeCleanerQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); diff --git a/hammond-data/src/models/mod.rs b/hammond-data/src/models/mod.rs index 87efe30..8c2a89d 100644 --- a/hammond-data/src/models/mod.rs +++ b/hammond-data/src/models/mod.rs @@ -23,7 +23,7 @@ pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery}; pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::source::Source; -use errors::*; +use failure::Error; #[derive(Debug, Clone, PartialEq)] pub enum IndexState { @@ -33,19 +33,19 @@ pub enum IndexState { } pub trait Insert { - fn insert(&self) -> Result<()>; + fn insert(&self) -> Result<(), Error>; } pub trait Update { - fn update(&self, i32) -> Result<()>; + fn update(&self, i32) -> Result<(), Error>; } pub trait Index: Insert + Update { - fn index(&self) -> Result<()>; + fn index(&self) -> Result<(), Error>; } /// FIXME: DOCS pub trait Save { /// Helper method to easily save/"sync" current state of a diesel model to the Database. - fn save(&self) -> Result; + fn save(&self) -> Result; } diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 3de8043..6e487a7 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -1,17 +1,15 @@ -use diesel::prelude::*; - -use diesel; -use schema::episode; - use ammonia; +use diesel; +use diesel::prelude::*; +use failure::Error; use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822; use rss; use database::connection; use dbqueries; -use errors::*; use models::{Episode, EpisodeMinimal, Index, Insert, Update}; use parser; +use schema::episode; use utils::{replace_extra_spaces, url_cleaner}; #[derive(Insertable, AsChangeset)] @@ -46,7 +44,7 @@ impl From for NewEpisode { } impl Insert for NewEpisode { - fn insert(&self) -> Result<()> { + fn insert(&self) -> Result<(), Error> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -61,7 +59,7 @@ impl Insert for NewEpisode { } impl Update for NewEpisode { - fn update(&self, episode_id: i32) -> Result<()> { + fn update(&self, episode_id: i32) -> Result<(), Error> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -77,7 +75,7 @@ impl Update for NewEpisode { impl Index for NewEpisode { // Does not update the episode description if it's the only thing that has changed. - fn index(&self) -> Result<()> { + fn index(&self) -> Result<(), Error> { let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?; if exists { @@ -115,12 +113,12 @@ impl PartialEq for NewEpisode { impl NewEpisode { /// Parses an `rss::Item` into a `NewEpisode` Struct. #[allow(dead_code)] - pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) } #[allow(dead_code)] - pub(crate) fn to_episode(&self) -> Result { + pub(crate) fn to_episode(&self) -> Result { self.index()?; dbqueries::get_episode_from_pk(&self.title, self.podcast_id) } @@ -184,7 +182,7 @@ impl PartialEq for NewEpisodeMinimal { } impl NewEpisodeMinimal { - pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { if item.title().is_none() { bail!("No title specified for the item.") } @@ -208,7 +206,7 @@ impl NewEpisodeMinimal { let duration = parser::parse_itunes_duration(item.itunes_ext()); - NewEpisodeMinimalBuilder::default() + Ok(NewEpisodeMinimalBuilder::default() .title(title) .uri(uri) .duration(duration) @@ -216,7 +214,7 @@ impl NewEpisodeMinimal { .guid(guid) .podcast_id(parent_id) .build() - .map_err(From::from) + .unwrap()) } pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode { diff --git a/hammond-data/src/models/new_podcast.rs b/hammond-data/src/models/new_podcast.rs index f6da37c..d02f5df 100644 --- a/hammond-data/src/models/new_podcast.rs +++ b/hammond-data/src/models/new_podcast.rs @@ -2,6 +2,7 @@ use diesel; use diesel::prelude::*; use ammonia; +use failure::Error; use rss; use models::{Index, Insert, Update}; @@ -12,8 +13,6 @@ use database::connection; use dbqueries; use utils::{replace_extra_spaces, url_cleaner}; -use errors::*; - #[derive(Insertable, AsChangeset)] #[table_name = "podcast"] #[derive(Debug, Clone, Default, Builder, PartialEq)] @@ -29,7 +28,7 @@ pub(crate) struct NewPodcast { } impl Insert for NewPodcast { - fn insert(&self) -> Result<()> { + fn insert(&self) -> Result<(), Error> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -43,7 +42,7 @@ impl Insert for NewPodcast { } impl Update for NewPodcast { - fn update(&self, podcast_id: i32) -> Result<()> { + fn update(&self, podcast_id: i32) -> Result<(), Error> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -60,7 +59,7 @@ impl Update for NewPodcast { // TODO: Maybe return an Enum Instead. // It would make unti testing better too. impl Index for NewPodcast { - fn index(&self) -> Result<()> { + fn index(&self) -> Result<(), Error> { let exists = dbqueries::podcast_exists(self.source_id)?; if exists { @@ -119,7 +118,7 @@ impl NewPodcast { } // Look out for when tryinto lands into stable. - pub(crate) fn to_podcast(&self) -> Result { + pub(crate) fn to_podcast(&self) -> Result { self.index()?; dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from) } diff --git a/hammond-data/src/models/new_source.rs b/hammond-data/src/models/new_source.rs index 944e7c1..fbe79f1 100644 --- a/hammond-data/src/models/new_source.rs +++ b/hammond-data/src/models/new_source.rs @@ -2,6 +2,7 @@ use diesel; use diesel::prelude::*; +use failure::Error; use url::Url; use database::connection; @@ -10,8 +11,6 @@ use dbqueries; use models::Source; use schema::source; -use errors::*; - #[derive(Insertable)] #[table_name = "source"] #[derive(Debug, Clone, Default, Builder, PartialEq)] @@ -33,7 +32,7 @@ impl NewSource { } } - pub(crate) fn insert_or_ignore(&self) -> Result<()> { + pub(crate) fn insert_or_ignore(&self) -> Result<(), Error> { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -46,7 +45,7 @@ impl NewSource { } // Look out for when tryinto lands into stable. - pub(crate) fn to_source(&self) -> Result { + pub(crate) fn to_source(&self) -> Result { self.insert_or_ignore()?; dbqueries::get_source_from_uri(&self.uri) } diff --git a/hammond-data/src/models/podcast.rs b/hammond-data/src/models/podcast.rs index fb6ab74..dcfad9c 100644 --- a/hammond-data/src/models/podcast.rs +++ b/hammond-data/src/models/podcast.rs @@ -1,7 +1,7 @@ use diesel::SaveChangesDsl; +use failure::Error; use database::connection; -use errors::*; use models::{Save, Source}; use schema::podcast; @@ -25,7 +25,7 @@ pub struct Podcast { impl Save for Podcast { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index aa08ad9..90062ba 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -1,4 +1,5 @@ use diesel::SaveChangesDsl; +use failure::Error; use rss::Channel; use url::Url; @@ -13,7 +14,6 @@ use futures::prelude::*; use futures_cpupool::CpuPool; use database::connection; -use errors::*; use feed::{Feed, FeedBuilder}; use models::{NewSource, Save}; use schema::source; @@ -34,7 +34,7 @@ pub struct Source { impl Save for Source { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let con = db.get()?; @@ -85,7 +85,7 @@ impl Source { /// Extract Etag and LastModifier from res, and update self and the /// corresponding db row. - fn update_etag(&mut self, res: &Response) -> Result<()> { + fn update_etag(&mut self, res: &Response) -> Result<(), Error> { let headers = res.headers(); let etag = headers.get::().map(|x| x.tag()); @@ -109,7 +109,7 @@ impl Source { // 403: Forbidden // 408: Timeout // 410: Feed deleted - fn match_status(mut self, res: Response) -> Result<(Self, Response)> { + fn match_status(mut self, res: Response) -> Result<(Self, Response), Error> { self.update_etag(&res)?; let code = res.status(); match code { @@ -131,7 +131,7 @@ impl Source { Ok((self, res)) } - fn handle_301(&mut self, res: &Response) -> Result<()> { + fn handle_301(&mut self, res: &Response) -> Result<(), Error> { let headers = res.headers(); if let Some(url) = headers.get::() { @@ -150,7 +150,7 @@ impl Source { /// Construct a new `Source` with the given `uri` and index it. /// /// This only indexes the `Source` struct, not the Podcast Feed. - pub fn from_url(uri: &str) -> Result { + pub fn from_url(uri: &str) -> Result { let url = Url::parse(uri)?; NewSource::new(&url).to_source() @@ -174,11 +174,11 @@ impl Source { let feed = self.request_constructor(client, ignore_etags) .and_then(move |(_, res)| response_to_channel(res, pool)) .and_then(move |chan| { - FeedBuilder::default() + Ok(FeedBuilder::default() .channel(chan) .source_id(id) .build() - .map_err(From::from) + .unwrap()) }); Box::new(feed) @@ -228,7 +228,8 @@ fn response_to_channel( .map_err(From::from) .map(|iter| iter.collect::>()) .map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned()) - .and_then(|buf| Channel::from_str(&buf).map_err(From::from)); + // FIXME: Unwrap + .and_then(|buf| Ok(Channel::from_str(&buf).unwrap())); let cpu_chan = pool.spawn(chan); Box::new(cpu_chan) } diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index 418ecdf..9c77513 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -10,16 +10,15 @@ use hyper::client::HttpConnector; use hyper_tls::HttpsConnector; use tokio_core::reactor::Core; +use failure::Error; use num_cpus; use rss; use Source; use dbqueries; -use errors::*; use models::{IndexState, NewEpisode, NewEpisodeMinimal}; // use Feed; -use std; // use std::sync::{Arc, Mutex}; macro_rules! clone { @@ -51,7 +50,7 @@ pub fn pipeline>( tokio_core: &mut Core, pool: &CpuPool, client: Client>, -) -> Result<()> { +) -> Result<(), Error> { let list: Vec<_> = sources .into_iter() .map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags))) @@ -70,7 +69,7 @@ pub fn pipeline>( } /// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline. -pub fn run(sources: Vec, ignore_etags: bool) -> Result<()> { +pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), Error> { if sources.is_empty() { return Ok(()); } @@ -86,7 +85,7 @@ pub fn run(sources: Vec, ignore_etags: bool) -> Result<()> { } /// Docs -pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> { +pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> { let pool = CpuPool::new_num_cpus(); let mut core = Core::new()?; let handle = core.handle(); @@ -102,7 +101,10 @@ pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> { core.run(work) } -fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result> { +fn determine_ep_state( + ep: NewEpisodeMinimal, + item: &rss::Item, +) -> Result, Error> { // Check if feed exists let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; @@ -135,7 +137,7 @@ pub(crate) fn glue_async<'a>( #[cfg_attr(feature = "cargo-clippy", allow(type_complexity))] pub fn collect_futures( futures: Vec, -) -> Box>, Error = Error>> +) -> Box>, Error = Error>> where F: 'static + Future, ::Item: 'static, diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index 97f762b..d7e1de2 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -3,11 +3,12 @@ use chrono::prelude::*; use rayon::prelude::*; +use failure::Error; use itertools::Itertools; use url::{Position, Url}; use dbqueries; -use errors::*; +// use errors::*; use models::{EpisodeCleanerQuery, Podcast, Save}; use xdg_dirs::DL_DIR; @@ -15,7 +16,7 @@ use std::fs; use std::path::Path; /// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`. -fn download_checker() -> Result<()> { +fn download_checker() -> Result<(), Error> { let mut episodes = dbqueries::get_downloaded_episodes()?; episodes @@ -33,7 +34,7 @@ fn download_checker() -> Result<()> { } /// Delete watched `episodes` that have exceded their liftime after played. -fn played_cleaner() -> Result<()> { +fn played_cleaner() -> Result<(), Error> { let mut episodes = dbqueries::get_played_cleaner_episodes()?; let now_utc = Utc::now().timestamp() as i32; @@ -57,7 +58,7 @@ fn played_cleaner() -> Result<()> { } /// Check `ep.local_uri` field and delete the file it points to. -fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> { +fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); if Path::new(&uri).exists() { @@ -86,7 +87,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> { /// /// Runs a cleaner for played Episode's that are pass the lifetime limit and /// scheduled for removal. -pub fn checkup() -> Result<()> { +pub fn checkup() -> Result<(), Error> { info!("Running database checks."); download_checker()?; played_cleaner()?; @@ -123,7 +124,7 @@ pub fn replace_extra_spaces(s: &str) -> String { } /// Returns the URI of a Podcast Downloads given it's title. -pub fn get_download_folder(pd_title: &str) -> Result { +pub fn get_download_folder(pd_title: &str) -> Result { // It might be better to make it a hash of the title or the podcast rowid let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); @@ -137,7 +138,7 @@ pub fn get_download_folder(pd_title: &str) -> Result { /// Removes all the entries associated with the given show from the database, /// and deletes all of the downloaded content. // TODO: Write Tests -pub fn delete_show(pd: &Podcast) -> Result<()> { +pub fn delete_show(pd: &Podcast) -> Result<(), Error> { dbqueries::remove_feed(pd)?; info!("{} was removed succesfully.", pd.title()); diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 26ad08f..34f80ba 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -11,11 +11,13 @@ use std::io::{BufWriter, Read, Write}; use std::path::Path; use std::sync::{Arc, Mutex}; -use errors::*; use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save}; use hammond_data::xdg_dirs::HAMMOND_CACHE; -// use failure::Error; +use std::result; + +use failure::Error; +type Result = result::Result; // TODO: Replace path that are of type &str with std::path. // TODO: Have a convention/document absolute/relative paths, if they should end with / or not. diff --git a/hammond-downloader/src/errors.rs b/hammond-downloader/src/errors.rs index 7ff0295..ca23fba 100644 --- a/hammond-downloader/src/errors.rs +++ b/hammond-downloader/src/errors.rs @@ -2,19 +2,9 @@ use hammond_data; use reqwest; use std::io; -error_chain! { - foreign_links { - ReqError(reqwest::Error); - IoError(io::Error); - DataError(hammond_data::errors::Error); - } -} - #[derive(Fail, Debug)] enum DownloaderError { #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), - // NOT SYNC. - // #[fail(display = "Data error: {}", _0)] - // DataError(hammond_data::errors::Error), + #[fail(display = "Data error: {}", _0)] DataError(hammond_data::errors::DatabaseError), #[fail(display = "Io error: {}", _0)] IoError(io::Error), } diff --git a/hammond-downloader/src/lib.rs b/hammond-downloader/src/lib.rs index cd6b3d3..a607d26 100644 --- a/hammond-downloader/src/lib.rs +++ b/hammond-downloader/src/lib.rs @@ -2,13 +2,12 @@ // #![deny(unused_extern_crates, unused)] #[macro_use] -extern crate error_chain; +extern crate failure; #[macro_use] extern crate failure_derive; #[macro_use] extern crate log; -extern crate failure; extern crate glob; extern crate hammond_data; extern crate hyper; diff --git a/hammond-gtk/Cargo.toml b/hammond-gtk/Cargo.toml index e1c8c49..d8b2ab2 100644 --- a/hammond-gtk/Cargo.toml +++ b/hammond-gtk/Cargo.toml @@ -20,6 +20,8 @@ open = "1.2.1" rayon = "0.9.0" send-cell = "0.1.2" url = "1.6.0" +failure = "0.1.1" +failure_derive = "0.1.1" [dependencies.gtk] features = ["v3_22"] diff --git a/hammond-gtk/src/main.rs b/hammond-gtk/src/main.rs index 7245aee..e241db8 100644 --- a/hammond-gtk/src/main.rs +++ b/hammond-gtk/src/main.rs @@ -1,5 +1,5 @@ #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))] -#![deny(unused_extern_crates, unused)] +// #![deny(unused_extern_crates, unused)] extern crate gdk; extern crate gdk_pixbuf; @@ -7,6 +7,10 @@ extern crate gio; extern crate glib; extern crate gtk; +#[macro_use] +extern crate failure; +#[macro_use] +extern crate failure_derive; #[macro_use] extern crate lazy_static; #[macro_use] diff --git a/hammond-gtk/src/widgets/episode.rs b/hammond-gtk/src/widgets/episode.rs index 0490b8d..2886176 100644 --- a/hammond-gtk/src/widgets/episode.rs +++ b/hammond-gtk/src/widgets/episode.rs @@ -4,12 +4,12 @@ use gtk; use chrono::prelude::*; use gtk::prelude::*; +use failure::Error; use humansize::{file_size_opts as size_opts, FileSize}; use open; use hammond_data::{EpisodeWidgetQuery, Podcast}; use hammond_data::dbqueries; -use hammond_data::errors::*; use hammond_data::utils::get_download_folder; use app::Action; @@ -368,7 +368,7 @@ fn update_total_size_callback(prog: Arc>, total_size: g // }; // } -pub fn episodes_listbox(pd: &Podcast, sender: Sender) -> Result { +pub fn episodes_listbox(pd: &Podcast, sender: Sender) -> Result { let mut episodes = dbqueries::get_pd_episodeswidgets(pd)?; let list = gtk::ListBox::new();