diff --git a/hammond-data/src/errors.rs b/hammond-data/src/errors.rs index 9fd441d..fa88ca5 100644 --- a/hammond-data/src/errors.rs +++ b/hammond-data/src/errors.rs @@ -8,11 +8,6 @@ use url; use std::io; -#[allow(dead_code)] -#[derive(Fail, Debug)] -#[fail(display = "IO Error: {}", _0)] -struct IOError(io::Error); - // fadsadfs NOT SYNC // #[derive(Fail, Debug)] // #[fail(display = "RSS Error: {}", _0)] @@ -34,6 +29,10 @@ pub enum DataError { UrlError(#[cause] url::ParseError), #[fail(display = "TLS Error: {}", _0)] TLSError(#[cause] native_tls::Error), + #[fail(display = "IO Error: {}", _0)] + IOError(io::Error), + #[fail(display = "WANNABE BAIL ERROR: {}", _0)] + DiscountBail(String), } impl From for DataError { @@ -77,3 +76,9 @@ impl From for DataError { DataError::TLSError(err) } } + +impl From for DataError { + fn from(err: io::Error) -> Self { + DataError::IOError(err) + } +} diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 998fab1..f6a3c6e 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -1,11 +1,11 @@ //! Index Feeds. -use failure::Error; use futures::future::*; use itertools::{Either, Itertools}; use rss; use dbqueries; +use errors::DataError; use models::{Index, IndexState, Update}; use models::{NewEpisode, NewPodcast, Podcast}; use pipeline::*; @@ -26,7 +26,7 @@ pub struct Feed { impl Feed { /// Index the contents of the RSS `Feed` into the database. - pub fn index(self) -> Box + Send> { + pub fn index(self) -> Box + Send> { let fut = self.parse_podcast_async() .and_then(|pd| pd.to_podcast()) .and_then(move |pd| self.index_channel_items(&pd)); @@ -38,11 +38,14 @@ impl Feed { NewPodcast::new(&self.channel, self.source_id) } - fn parse_podcast_async(&self) -> Box + Send> { + fn parse_podcast_async(&self) -> Box + Send> { Box::new(ok(self.parse_podcast())) } - fn index_channel_items(&self, pd: &Podcast) -> Box + Send> { + fn index_channel_items( + &self, + pd: &Podcast, + ) -> Box + Send> { let fut = self.get_stuff(pd) .and_then(|(insert, update)| { if !insert.is_empty() { @@ -79,7 +82,10 @@ impl Feed { Box::new(fut) } - fn get_stuff(&self, pd: &Podcast) -> Box + Send> { + fn get_stuff( + &self, + pd: &Podcast, + ) -> Box + Send> { let (insert, update): (Vec<_>, Vec<_>) = self.channel .items() .into_iter() @@ -90,7 +96,7 @@ impl Feed { // I am not sure what the optimizations are on match vs allocating None. .map(|fut| { fut.and_then(|x| match x { - IndexState::NotChanged => bail!("Nothing to do here."), + IndexState::NotChanged => return Err(DataError::DiscountBail(format!("Nothing to do here."))), _ => Ok(x), }) }) diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index 5fbcd49..3fbbadc 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -27,7 +27,7 @@ extern crate derive_builder; extern crate diesel; #[macro_use] extern crate diesel_migrations; -#[macro_use] +// #[macro_use] extern crate failure; #[macro_use] extern crate failure_derive; diff --git a/hammond-data/src/models/episode.rs b/hammond-data/src/models/episode.rs index afb1c2a..65dafaa 100644 --- a/hammond-data/src/models/episode.rs +++ b/hammond-data/src/models/episode.rs @@ -2,9 +2,9 @@ use chrono::prelude::*; use diesel; use diesel::SaveChangesDsl; use diesel::prelude::*; -use failure::Error; use database::connection; +use errors::DataError; use models::{Podcast, Save}; use schema::episode; @@ -31,9 +31,9 @@ pub struct Episode { podcast_id: i32, } -impl Save for Episode { +impl Save for Episode { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; @@ -180,7 +180,7 @@ impl Episode { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<(), Error> { + pub fn set_played_now(&mut self) -> Result<(), DataError> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -223,9 +223,9 @@ impl From for EpisodeWidgetQuery { } } -impl Save for EpisodeWidgetQuery { +impl Save for EpisodeWidgetQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); @@ -342,7 +342,7 @@ impl EpisodeWidgetQuery { } /// Sets the `played` value with the current `epoch` timestap and save it. - pub fn set_played_now(&mut self) -> Result<(), Error> { + pub fn set_played_now(&mut self) -> Result<(), DataError> { let epoch = Utc::now().timestamp() as i32; self.set_played(Some(epoch)); self.save().map(|_| ()) @@ -361,9 +361,9 @@ pub struct EpisodeCleanerQuery { played: Option, } -impl Save for EpisodeCleanerQuery { +impl Save for EpisodeCleanerQuery { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { use schema::episode::dsl::*; let db = connection(); diff --git a/hammond-data/src/models/mod.rs b/hammond-data/src/models/mod.rs index 8c2a89d..10f19ed 100644 --- a/hammond-data/src/models/mod.rs +++ b/hammond-data/src/models/mod.rs @@ -23,8 +23,6 @@ pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery}; pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::source::Source; -use failure::Error; - #[derive(Debug, Clone, PartialEq)] pub enum IndexState { Index(T), @@ -32,20 +30,21 @@ pub enum IndexState { NotChanged, } -pub trait Insert { - fn insert(&self) -> Result<(), Error>; +pub trait Insert { + fn insert(&self) -> Result; } -pub trait Update { - fn update(&self, i32) -> Result<(), Error>; +pub trait Update { + fn update(&self, i32) -> Result; } -pub trait Index: Insert + Update { - fn index(&self) -> Result<(), Error>; +// This might need to change in the future +pub trait Index: Insert + Update { + fn index(&self) -> Result; } /// FIXME: DOCS -pub trait Save { +pub trait Save { /// Helper method to easily save/"sync" current state of a diesel model to the Database. - fn save(&self) -> Result; + fn save(&self) -> Result; } diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 668787f..6b2b63e 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -1,12 +1,12 @@ use ammonia; use diesel; use diesel::prelude::*; -use failure::Error; use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822; use rss; use database::connection; use dbqueries; +use errors::DataError; use models::{Episode, EpisodeMinimal, Index, Insert, Update}; use parser; use schema::episode; @@ -43,8 +43,8 @@ impl From for NewEpisode { } } -impl Insert for NewEpisode { - fn insert(&self) -> Result<(), Error> { +impl Insert<(), DataError> for NewEpisode { + fn insert(&self) -> Result<(), DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -58,8 +58,8 @@ impl Insert for NewEpisode { } } -impl Update for NewEpisode { - fn update(&self, episode_id: i32) -> Result<(), Error> { +impl Update<(), DataError> for NewEpisode { + fn update(&self, episode_id: i32) -> Result<(), DataError> { use schema::episode::dsl::*; let db = connection(); let con = db.get()?; @@ -73,9 +73,9 @@ impl Update for NewEpisode { } } -impl Index for NewEpisode { +impl Index<(), DataError> for NewEpisode { // Does not update the episode description if it's the only thing that has changed. - fn index(&self) -> Result<(), Error> { + fn index(&self) -> Result<(), DataError> { let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?; if exists { @@ -113,12 +113,12 @@ impl PartialEq for NewEpisode { impl NewEpisode { /// Parses an `rss::Item` into a `NewEpisode` Struct. #[allow(dead_code)] - pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) } #[allow(dead_code)] - pub(crate) fn to_episode(&self) -> Result { + pub(crate) fn to_episode(&self) -> Result { self.index()?; dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from) } @@ -182,9 +182,11 @@ impl PartialEq for NewEpisodeMinimal { } impl NewEpisodeMinimal { - pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { + pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result { if item.title().is_none() { - bail!("No title specified for the item.") + return Err(DataError::DiscountBail(format!( + "No title specified for the item." + ))); } let title = item.title().unwrap().trim().to_owned(); @@ -195,7 +197,9 @@ impl NewEpisodeMinimal { } else if item.link().is_some() { item.link().map(|s| url_cleaner(s)) } else { - bail!("No url specified for the item.") + return Err(DataError::DiscountBail(format!( + "No url specified for the item." + ))); }; // Default to rfc2822 represantation of epoch 0. diff --git a/hammond-data/src/models/new_podcast.rs b/hammond-data/src/models/new_podcast.rs index d02f5df..c3e9632 100644 --- a/hammond-data/src/models/new_podcast.rs +++ b/hammond-data/src/models/new_podcast.rs @@ -2,9 +2,9 @@ use diesel; use diesel::prelude::*; use ammonia; -use failure::Error; use rss; +use errors::DataError; use models::{Index, Insert, Update}; use models::Podcast; use schema::podcast; @@ -27,8 +27,8 @@ pub(crate) struct NewPodcast { source_id: i32, } -impl Insert for NewPodcast { - fn insert(&self) -> Result<(), Error> { +impl Insert<(), DataError> for NewPodcast { + fn insert(&self) -> Result<(), DataError> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -41,8 +41,8 @@ impl Insert for NewPodcast { } } -impl Update for NewPodcast { - fn update(&self, podcast_id: i32) -> Result<(), Error> { +impl Update<(), DataError> for NewPodcast { + fn update(&self, podcast_id: i32) -> Result<(), DataError> { use schema::podcast::dsl::*; let db = connection(); let con = db.get()?; @@ -58,8 +58,8 @@ impl Update for NewPodcast { // TODO: Maybe return an Enum Instead. // It would make unti testing better too. -impl Index for NewPodcast { - fn index(&self) -> Result<(), Error> { +impl Index<(), DataError> for NewPodcast { + fn index(&self) -> Result<(), DataError> { let exists = dbqueries::podcast_exists(self.source_id)?; if exists { @@ -118,7 +118,7 @@ impl NewPodcast { } // Look out for when tryinto lands into stable. - pub(crate) fn to_podcast(&self) -> Result { + pub(crate) fn to_podcast(&self) -> Result { self.index()?; dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from) } diff --git a/hammond-data/src/models/new_source.rs b/hammond-data/src/models/new_source.rs index 0fb0400..881b008 100644 --- a/hammond-data/src/models/new_source.rs +++ b/hammond-data/src/models/new_source.rs @@ -2,12 +2,12 @@ use diesel; use diesel::prelude::*; -use failure::Error; use url::Url; use database::connection; use dbqueries; // use models::{Insert, Update}; +use errors::DataError; use models::Source; use schema::source; @@ -32,7 +32,7 @@ impl NewSource { } } - pub(crate) fn insert_or_ignore(&self) -> Result<(), Error> { + pub(crate) fn insert_or_ignore(&self) -> Result<(), DataError> { use schema::source::dsl::*; let db = connection(); let con = db.get()?; @@ -45,7 +45,7 @@ impl NewSource { } // Look out for when tryinto lands into stable. - pub(crate) fn to_source(&self) -> Result { + pub(crate) fn to_source(&self) -> Result { self.insert_or_ignore()?; dbqueries::get_source_from_uri(&self.uri).map_err(From::from) } diff --git a/hammond-data/src/models/podcast.rs b/hammond-data/src/models/podcast.rs index dcfad9c..a5ac44b 100644 --- a/hammond-data/src/models/podcast.rs +++ b/hammond-data/src/models/podcast.rs @@ -1,7 +1,7 @@ use diesel::SaveChangesDsl; -use failure::Error; use database::connection; +use errors::DataError; use models::{Save, Source}; use schema::podcast; @@ -23,9 +23,9 @@ pub struct Podcast { source_id: i32, } -impl Save for Podcast { +impl Save for Podcast { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index 90062ba..34f32dd 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -1,5 +1,4 @@ use diesel::SaveChangesDsl; -use failure::Error; use rss::Channel; use url::Url; @@ -14,6 +13,7 @@ use futures::prelude::*; use futures_cpupool::CpuPool; use database::connection; +use errors::DataError; use feed::{Feed, FeedBuilder}; use models::{NewSource, Save}; use schema::source; @@ -32,9 +32,9 @@ pub struct Source { http_etag: Option, } -impl Save for Source { +impl Save for Source { /// Helper method to easily save/"sync" current state of self to the Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let con = db.get()?; @@ -85,7 +85,7 @@ impl Source { /// Extract Etag and LastModifier from res, and update self and the /// corresponding db row. - fn update_etag(&mut self, res: &Response) -> Result<(), Error> { + fn update_etag(&mut self, res: &Response) -> Result<(), DataError> { let headers = res.headers(); let etag = headers.get::().map(|x| x.tag()); @@ -109,29 +109,42 @@ impl Source { // 403: Forbidden // 408: Timeout // 410: Feed deleted - fn match_status(mut self, res: Response) -> Result<(Self, Response), Error> { + // TODO: Rething this api, + fn match_status(mut self, res: Response) -> Result<(Self, Response), DataError> { self.update_etag(&res)?; let code = res.status(); match code { - StatusCode::NotModified => bail!("304: skipping.."), + StatusCode::NotModified => { + return Err(DataError::DiscountBail(format!("304: skipping.."))) + } StatusCode::MovedPermanently => { error!("Feed was moved permanently."); self.handle_301(&res)?; - bail!("301: Feed was moved permanently.") + return Err(DataError::DiscountBail(format!( + "301: Feed was moved permanently." + ))); } StatusCode::TemporaryRedirect => debug!("307: Temporary Redirect."), StatusCode::PermanentRedirect => warn!("308: Permanent Redirect."), - StatusCode::Unauthorized => bail!("401: Unauthorized."), - StatusCode::Forbidden => bail!("403: Forbidden."), - StatusCode::NotFound => bail!("404: Not found."), - StatusCode::RequestTimeout => bail!("408: Request Timeout."), - StatusCode::Gone => bail!("410: Feed was deleted."), + StatusCode::Unauthorized => { + return Err(DataError::DiscountBail(format!("401: Unauthorized."))) + } + StatusCode::Forbidden => { + return Err(DataError::DiscountBail(format!("403: Forbidden."))) + } + StatusCode::NotFound => return Err(DataError::DiscountBail(format!("404: Not found."))), + StatusCode::RequestTimeout => { + return Err(DataError::DiscountBail(format!("408: Request Timeout."))) + } + StatusCode::Gone => { + return Err(DataError::DiscountBail(format!("410: Feed was deleted."))) + } _ => info!("HTTP StatusCode: {}", code), }; Ok((self, res)) } - fn handle_301(&mut self, res: &Response) -> Result<(), Error> { + fn handle_301(&mut self, res: &Response) -> Result<(), DataError> { let headers = res.headers(); if let Some(url) = headers.get::() { @@ -150,7 +163,7 @@ impl Source { /// Construct a new `Source` with the given `uri` and index it. /// /// This only indexes the `Source` struct, not the Podcast Feed. - pub fn from_url(uri: &str) -> Result { + pub fn from_url(uri: &str) -> Result { let url = Url::parse(uri)?; NewSource::new(&url).to_source() @@ -169,7 +182,7 @@ impl Source { client: &Client>, pool: CpuPool, ignore_etags: bool, - ) -> Box> { + ) -> Box> { let id = self.id(); let feed = self.request_constructor(client, ignore_etags) .and_then(move |(_, res)| response_to_channel(res, pool)) @@ -190,7 +203,7 @@ impl Source { self, client: &Client>, ignore_etags: bool, - ) -> Box> { + ) -> Box> { // FIXME: remove unwrap somehow let uri = Uri::from_str(self.uri()).unwrap(); let mut req = Request::new(Method::Get, uri); @@ -221,7 +234,7 @@ impl Source { fn response_to_channel( res: Response, pool: CpuPool, -) -> Box + Send> { +) -> Box + Send> { let chan = res.body() .concat2() .map(|x| x.into_iter()) diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index 9c77513..e7915a1 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -10,14 +10,13 @@ use hyper::client::HttpConnector; use hyper_tls::HttpsConnector; use tokio_core::reactor::Core; -use failure::Error; use num_cpus; use rss; use Source; use dbqueries; +use errors::DataError; use models::{IndexState, NewEpisode, NewEpisodeMinimal}; -// use Feed; // use std::sync::{Arc, Mutex}; @@ -50,7 +49,7 @@ pub fn pipeline>( tokio_core: &mut Core, pool: &CpuPool, client: Client>, -) -> Result<(), Error> { +) -> Result<(), DataError> { let list: Vec<_> = sources .into_iter() .map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags))) @@ -59,7 +58,9 @@ pub fn pipeline>( .collect(); if list.is_empty() { - bail!("No futures were found to run."); + return Err(DataError::DiscountBail(format!( + "No futures were found to run." + ))); } // Thats not really concurrent yet I think. @@ -69,7 +70,7 @@ pub fn pipeline>( } /// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline. -pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), Error> { +pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), DataError> { if sources.is_empty() { return Ok(()); } @@ -85,7 +86,7 @@ pub fn run(sources: Vec, ignore_etags: bool) -> Result<(), Error> { } /// Docs -pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> { +pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), DataError> { let pool = CpuPool::new_num_cpus(); let mut core = Core::new()?; let handle = core.handle(); @@ -104,7 +105,7 @@ pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> { fn determine_ep_state( ep: NewEpisodeMinimal, item: &rss::Item, -) -> Result, Error> { +) -> Result, DataError> { // Check if feed exists let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; @@ -125,7 +126,7 @@ fn determine_ep_state( pub(crate) fn glue_async<'a>( item: &'a rss::Item, id: i32, -) -> Box, Error = Error> + 'a> { +) -> Box, Error = DataError> + 'a> { Box::new( result(NewEpisodeMinimal::new(item, id)).and_then(move |ep| determine_ep_state(ep, item)), ) @@ -137,7 +138,7 @@ pub(crate) fn glue_async<'a>( #[cfg_attr(feature = "cargo-clippy", allow(type_complexity))] pub fn collect_futures( futures: Vec, -) -> Box>, Error = Error>> +) -> Box>, Error = DataError>> where F: 'static + Future, ::Item: 'static, diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index d7e1de2..11ec3bf 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -3,12 +3,11 @@ use chrono::prelude::*; use rayon::prelude::*; -use failure::Error; use itertools::Itertools; use url::{Position, Url}; use dbqueries; -// use errors::*; +use errors::DataError; use models::{EpisodeCleanerQuery, Podcast, Save}; use xdg_dirs::DL_DIR; @@ -16,7 +15,7 @@ use std::fs; use std::path::Path; /// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`. -fn download_checker() -> Result<(), Error> { +fn download_checker() -> Result<(), DataError> { let mut episodes = dbqueries::get_downloaded_episodes()?; episodes @@ -34,7 +33,7 @@ fn download_checker() -> Result<(), Error> { } /// Delete watched `episodes` that have exceded their liftime after played. -fn played_cleaner() -> Result<(), Error> { +fn played_cleaner() -> Result<(), DataError> { let mut episodes = dbqueries::get_played_cleaner_episodes()?; let now_utc = Utc::now().timestamp() as i32; @@ -58,7 +57,7 @@ fn played_cleaner() -> Result<(), Error> { } /// Check `ep.local_uri` field and delete the file it points to. -fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> { +fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); if Path::new(&uri).exists() { @@ -87,7 +86,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> { /// /// Runs a cleaner for played Episode's that are pass the lifetime limit and /// scheduled for removal. -pub fn checkup() -> Result<(), Error> { +pub fn checkup() -> Result<(), DataError> { info!("Running database checks."); download_checker()?; played_cleaner()?; @@ -124,7 +123,7 @@ pub fn replace_extra_spaces(s: &str) -> String { } /// Returns the URI of a Podcast Downloads given it's title. -pub fn get_download_folder(pd_title: &str) -> Result { +pub fn get_download_folder(pd_title: &str) -> Result { // It might be better to make it a hash of the title or the podcast rowid let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); @@ -138,7 +137,7 @@ pub fn get_download_folder(pd_title: &str) -> Result { /// Removes all the entries associated with the given show from the database, /// and deletes all of the downloaded content. // TODO: Write Tests -pub fn delete_show(pd: &Podcast) -> Result<(), Error> { +pub fn delete_show(pd: &Podcast) -> Result<(), DataError> { dbqueries::remove_feed(pd)?; info!("{} was removed succesfully.", pd.title()); diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 34f80ba..07ad6a6 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -17,6 +17,7 @@ use hammond_data::xdg_dirs::HAMMOND_CACHE; use std::result; use failure::Error; + type Result = result::Result; // TODO: Replace path that are of type &str with std::path.