Switch rest stuff of data/downloader to Failure Crate.

This commit is contained in:
Jordan Petridis 2018-02-04 17:36:27 +02:00
parent f9f015a211
commit de43cae015
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
20 changed files with 102 additions and 97 deletions

2
Cargo.lock generated
View File

@ -661,6 +661,8 @@ version = "0.1.0"
dependencies = [ dependencies = [
"chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"dissolve 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "dissolve 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"gdk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "gdk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gdk-pixbuf 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "gdk-pixbuf 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gio 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "gio 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -6,11 +6,15 @@ use diesel::prelude::*;
use diesel; use diesel;
use diesel::dsl::exists; use diesel::dsl::exists;
use diesel::select; use diesel::select;
use failure::Error;
use database::connection; use database::connection;
use errors::*;
use models::*; use models::*;
// Feel free to open a Merge request that manually replaces Result<T> if you feel bored.
use std::result;
type Result<T> = result::Result<T, Error>;
pub fn get_sources() -> Result<Vec<Source>> { pub fn get_sources() -> Result<Vec<Source>> {
use schema::source::dsl::*; use schema::source::dsl::*;
let db = connection(); let db = connection();

View File

@ -4,25 +4,20 @@ use diesel_migrations::RunMigrationsError;
use hyper; use hyper;
use native_tls; use native_tls;
use reqwest; use reqwest;
use rss; // use rss;
use url; use url;
use std::io; use std::io;
error_chain! { #[allow(dead_code)]
foreign_links { #[derive(Fail, Debug)]
DieselResultError(diesel::result::Error); #[fail(display = "IO Error: {}", _0)]
DieselMigrationError(RunMigrationsError); struct IOError(io::Error);
R2D2Error(r2d2::Error);
R2D2PoolError(r2d2::PoolError); // fadsadfs NOT SYNC
RSSError(rss::Error); // #[derive(Fail, Debug)]
ReqError(reqwest::Error); // #[fail(display = "RSS Error: {}", _0)]
HyperError(hyper::Error); // struct RSSError(rss::Error);
UrlError(url::ParseError);
TLSError(native_tls::Error);
IoError(io::Error);
}
}
#[derive(Fail, Debug)] #[derive(Fail, Debug)]
pub enum DatabaseError { pub enum DatabaseError {
@ -31,3 +26,11 @@ pub enum DatabaseError {
#[fail(display = "R2D2 error: {}", _0)] R2D2Error(r2d2::Error), #[fail(display = "R2D2 error: {}", _0)] R2D2Error(r2d2::Error),
#[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(r2d2::PoolError), #[fail(display = "R2D2 Pool error: {}", _0)] R2D2PoolError(r2d2::PoolError),
} }
#[derive(Fail, Debug)]
pub enum HttpError {
#[fail(display = "Reqwest Error: {}", _0)] ReqError(reqwest::Error),
#[fail(display = "Hyper Error: {}", _0)] HyperError(hyper::Error),
#[fail(display = "Url Error: {}", _0)] UrlError(url::ParseError),
#[fail(display = "TLS Error: {}", _0)] TLSError(native_tls::Error),
}

View File

@ -1,11 +1,11 @@
//! Index Feeds. //! Index Feeds.
use failure::Error;
use futures::future::*; use futures::future::*;
use itertools::{Either, Itertools}; use itertools::{Either, Itertools};
use rss; use rss;
use dbqueries; use dbqueries;
use errors::*;
use models::{Index, IndexState, Update}; use models::{Index, IndexState, Update};
use models::{NewEpisode, NewPodcast, Podcast}; use models::{NewEpisode, NewPodcast, Podcast};
use pipeline::*; use pipeline::*;

View File

@ -28,7 +28,7 @@ extern crate diesel;
#[macro_use] #[macro_use]
extern crate diesel_migrations; extern crate diesel_migrations;
#[macro_use] #[macro_use]
extern crate error_chain; extern crate failure;
#[macro_use] #[macro_use]
extern crate failure_derive; extern crate failure_derive;
#[macro_use] #[macro_use]
@ -38,7 +38,6 @@ extern crate log;
extern crate ammonia; extern crate ammonia;
extern crate chrono; extern crate chrono;
extern crate failure;
extern crate futures; extern crate futures;
extern crate futures_cpupool; extern crate futures_cpupool;
extern crate hyper; extern crate hyper;

View File

@ -2,9 +2,9 @@ use chrono::prelude::*;
use diesel; use diesel;
use diesel::SaveChangesDsl; use diesel::SaveChangesDsl;
use diesel::prelude::*; use diesel::prelude::*;
use failure::Error;
use database::connection; use database::connection;
use errors::*;
use models::{Podcast, Save}; use models::{Podcast, Save};
use schema::episode; use schema::episode;
@ -33,7 +33,7 @@ pub struct Episode {
impl Save<Episode> for Episode { impl Save<Episode> for Episode {
/// Helper method to easily save/"sync" current state of self to the Database. /// Helper method to easily save/"sync" current state of self to the Database.
fn save(&self) -> Result<Episode> { fn save(&self) -> Result<Episode, Error> {
let db = connection(); let db = connection();
let tempdb = db.get()?; let tempdb = db.get()?;
@ -180,7 +180,7 @@ impl Episode {
} }
/// Sets the `played` value with the current `epoch` timestap and save it. /// Sets the `played` value with the current `epoch` timestap and save it.
pub fn set_played_now(&mut self) -> Result<()> { pub fn set_played_now(&mut self) -> Result<(), Error> {
let epoch = Utc::now().timestamp() as i32; let epoch = Utc::now().timestamp() as i32;
self.set_played(Some(epoch)); self.set_played(Some(epoch));
self.save().map(|_| ()) self.save().map(|_| ())
@ -225,7 +225,7 @@ impl From<Episode> for EpisodeWidgetQuery {
impl Save<usize> for EpisodeWidgetQuery { impl Save<usize> for EpisodeWidgetQuery {
/// Helper method to easily save/"sync" current state of self to the Database. /// Helper method to easily save/"sync" current state of self to the Database.
fn save(&self) -> Result<usize> { fn save(&self) -> Result<usize, Error> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
@ -342,7 +342,7 @@ impl EpisodeWidgetQuery {
} }
/// Sets the `played` value with the current `epoch` timestap and save it. /// Sets the `played` value with the current `epoch` timestap and save it.
pub fn set_played_now(&mut self) -> Result<()> { pub fn set_played_now(&mut self) -> Result<(), Error> {
let epoch = Utc::now().timestamp() as i32; let epoch = Utc::now().timestamp() as i32;
self.set_played(Some(epoch)); self.set_played(Some(epoch));
self.save().map(|_| ()) self.save().map(|_| ())
@ -363,7 +363,7 @@ pub struct EpisodeCleanerQuery {
impl Save<usize> for EpisodeCleanerQuery { impl Save<usize> for EpisodeCleanerQuery {
/// Helper method to easily save/"sync" current state of self to the Database. /// Helper method to easily save/"sync" current state of self to the Database.
fn save(&self) -> Result<usize> { fn save(&self) -> Result<usize, Error> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();

View File

@ -23,7 +23,7 @@ pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery};
pub use self::podcast::{Podcast, PodcastCoverQuery}; pub use self::podcast::{Podcast, PodcastCoverQuery};
pub use self::source::Source; pub use self::source::Source;
use errors::*; use failure::Error;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum IndexState<T> { pub enum IndexState<T> {
@ -33,19 +33,19 @@ pub enum IndexState<T> {
} }
pub trait Insert { pub trait Insert {
fn insert(&self) -> Result<()>; fn insert(&self) -> Result<(), Error>;
} }
pub trait Update { pub trait Update {
fn update(&self, i32) -> Result<()>; fn update(&self, i32) -> Result<(), Error>;
} }
pub trait Index: Insert + Update { pub trait Index: Insert + Update {
fn index(&self) -> Result<()>; fn index(&self) -> Result<(), Error>;
} }
/// FIXME: DOCS /// FIXME: DOCS
pub trait Save<T> { pub trait Save<T> {
/// Helper method to easily save/"sync" current state of a diesel model to the Database. /// Helper method to easily save/"sync" current state of a diesel model to the Database.
fn save(&self) -> Result<T>; fn save(&self) -> Result<T, Error>;
} }

View File

@ -1,17 +1,15 @@
use diesel::prelude::*;
use diesel;
use schema::episode;
use ammonia; use ammonia;
use diesel;
use diesel::prelude::*;
use failure::Error;
use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822; use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822;
use rss; use rss;
use database::connection; use database::connection;
use dbqueries; use dbqueries;
use errors::*;
use models::{Episode, EpisodeMinimal, Index, Insert, Update}; use models::{Episode, EpisodeMinimal, Index, Insert, Update};
use parser; use parser;
use schema::episode;
use utils::{replace_extra_spaces, url_cleaner}; use utils::{replace_extra_spaces, url_cleaner};
#[derive(Insertable, AsChangeset)] #[derive(Insertable, AsChangeset)]
@ -46,7 +44,7 @@ impl From<NewEpisodeMinimal> for NewEpisode {
} }
impl Insert for NewEpisode { impl Insert for NewEpisode {
fn insert(&self) -> Result<()> { fn insert(&self) -> Result<(), Error> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -61,7 +59,7 @@ impl Insert for NewEpisode {
} }
impl Update for NewEpisode { impl Update for NewEpisode {
fn update(&self, episode_id: i32) -> Result<()> { fn update(&self, episode_id: i32) -> Result<(), Error> {
use schema::episode::dsl::*; use schema::episode::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -77,7 +75,7 @@ impl Update for NewEpisode {
impl Index for NewEpisode { impl Index for NewEpisode {
// Does not update the episode description if it's the only thing that has changed. // Does not update the episode description if it's the only thing that has changed.
fn index(&self) -> Result<()> { fn index(&self) -> Result<(), Error> {
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?; let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
if exists { if exists {
@ -115,12 +113,12 @@ impl PartialEq<Episode> for NewEpisode {
impl NewEpisode { impl NewEpisode {
/// Parses an `rss::Item` into a `NewEpisode` Struct. /// Parses an `rss::Item` into a `NewEpisode` Struct.
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self> { pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self, Error> {
NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item))
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn to_episode(&self) -> Result<Episode> { pub(crate) fn to_episode(&self) -> Result<Episode, Error> {
self.index()?; self.index()?;
dbqueries::get_episode_from_pk(&self.title, self.podcast_id) dbqueries::get_episode_from_pk(&self.title, self.podcast_id)
} }
@ -184,7 +182,7 @@ impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
} }
impl NewEpisodeMinimal { impl NewEpisodeMinimal {
pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result<Self> { pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result<Self, Error> {
if item.title().is_none() { if item.title().is_none() {
bail!("No title specified for the item.") bail!("No title specified for the item.")
} }
@ -208,7 +206,7 @@ impl NewEpisodeMinimal {
let duration = parser::parse_itunes_duration(item.itunes_ext()); let duration = parser::parse_itunes_duration(item.itunes_ext());
NewEpisodeMinimalBuilder::default() Ok(NewEpisodeMinimalBuilder::default()
.title(title) .title(title)
.uri(uri) .uri(uri)
.duration(duration) .duration(duration)
@ -216,7 +214,7 @@ impl NewEpisodeMinimal {
.guid(guid) .guid(guid)
.podcast_id(parent_id) .podcast_id(parent_id)
.build() .build()
.map_err(From::from) .unwrap())
} }
pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode { pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode {

View File

@ -2,6 +2,7 @@ use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use ammonia; use ammonia;
use failure::Error;
use rss; use rss;
use models::{Index, Insert, Update}; use models::{Index, Insert, Update};
@ -12,8 +13,6 @@ use database::connection;
use dbqueries; use dbqueries;
use utils::{replace_extra_spaces, url_cleaner}; use utils::{replace_extra_spaces, url_cleaner};
use errors::*;
#[derive(Insertable, AsChangeset)] #[derive(Insertable, AsChangeset)]
#[table_name = "podcast"] #[table_name = "podcast"]
#[derive(Debug, Clone, Default, Builder, PartialEq)] #[derive(Debug, Clone, Default, Builder, PartialEq)]
@ -29,7 +28,7 @@ pub(crate) struct NewPodcast {
} }
impl Insert for NewPodcast { impl Insert for NewPodcast {
fn insert(&self) -> Result<()> { fn insert(&self) -> Result<(), Error> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -43,7 +42,7 @@ impl Insert for NewPodcast {
} }
impl Update for NewPodcast { impl Update for NewPodcast {
fn update(&self, podcast_id: i32) -> Result<()> { fn update(&self, podcast_id: i32) -> Result<(), Error> {
use schema::podcast::dsl::*; use schema::podcast::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -60,7 +59,7 @@ impl Update for NewPodcast {
// TODO: Maybe return an Enum<Action(Resut)> Instead. // TODO: Maybe return an Enum<Action(Resut)> Instead.
// It would make unti testing better too. // It would make unti testing better too.
impl Index for NewPodcast { impl Index for NewPodcast {
fn index(&self) -> Result<()> { fn index(&self) -> Result<(), Error> {
let exists = dbqueries::podcast_exists(self.source_id)?; let exists = dbqueries::podcast_exists(self.source_id)?;
if exists { if exists {
@ -119,7 +118,7 @@ impl NewPodcast {
} }
// Look out for when tryinto lands into stable. // Look out for when tryinto lands into stable.
pub(crate) fn to_podcast(&self) -> Result<Podcast> { pub(crate) fn to_podcast(&self) -> Result<Podcast, Error> {
self.index()?; self.index()?;
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from) dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
} }

View File

@ -2,6 +2,7 @@
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use failure::Error;
use url::Url; use url::Url;
use database::connection; use database::connection;
@ -10,8 +11,6 @@ use dbqueries;
use models::Source; use models::Source;
use schema::source; use schema::source;
use errors::*;
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "source"] #[table_name = "source"]
#[derive(Debug, Clone, Default, Builder, PartialEq)] #[derive(Debug, Clone, Default, Builder, PartialEq)]
@ -33,7 +32,7 @@ impl NewSource {
} }
} }
pub(crate) fn insert_or_ignore(&self) -> Result<()> { pub(crate) fn insert_or_ignore(&self) -> Result<(), Error> {
use schema::source::dsl::*; use schema::source::dsl::*;
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -46,7 +45,7 @@ impl NewSource {
} }
// Look out for when tryinto lands into stable. // Look out for when tryinto lands into stable.
pub(crate) fn to_source(&self) -> Result<Source> { pub(crate) fn to_source(&self) -> Result<Source, Error> {
self.insert_or_ignore()?; self.insert_or_ignore()?;
dbqueries::get_source_from_uri(&self.uri) dbqueries::get_source_from_uri(&self.uri)
} }

View File

@ -1,7 +1,7 @@
use diesel::SaveChangesDsl; use diesel::SaveChangesDsl;
use failure::Error;
use database::connection; use database::connection;
use errors::*;
use models::{Save, Source}; use models::{Save, Source};
use schema::podcast; use schema::podcast;
@ -25,7 +25,7 @@ pub struct Podcast {
impl Save<Podcast> for Podcast { impl Save<Podcast> for Podcast {
/// Helper method to easily save/"sync" current state of self to the Database. /// Helper method to easily save/"sync" current state of self to the Database.
fn save(&self) -> Result<Podcast> { fn save(&self) -> Result<Podcast, Error> {
let db = connection(); let db = connection();
let tempdb = db.get()?; let tempdb = db.get()?;

View File

@ -1,4 +1,5 @@
use diesel::SaveChangesDsl; use diesel::SaveChangesDsl;
use failure::Error;
use rss::Channel; use rss::Channel;
use url::Url; use url::Url;
@ -13,7 +14,6 @@ use futures::prelude::*;
use futures_cpupool::CpuPool; use futures_cpupool::CpuPool;
use database::connection; use database::connection;
use errors::*;
use feed::{Feed, FeedBuilder}; use feed::{Feed, FeedBuilder};
use models::{NewSource, Save}; use models::{NewSource, Save};
use schema::source; use schema::source;
@ -34,7 +34,7 @@ pub struct Source {
impl Save<Source> for Source { impl Save<Source> for Source {
/// Helper method to easily save/"sync" current state of self to the Database. /// Helper method to easily save/"sync" current state of self to the Database.
fn save(&self) -> Result<Source> { fn save(&self) -> Result<Source, Error> {
let db = connection(); let db = connection();
let con = db.get()?; let con = db.get()?;
@ -85,7 +85,7 @@ impl Source {
/// Extract Etag and LastModifier from res, and update self and the /// Extract Etag and LastModifier from res, and update self and the
/// corresponding db row. /// corresponding db row.
fn update_etag(&mut self, res: &Response) -> Result<()> { fn update_etag(&mut self, res: &Response) -> Result<(), Error> {
let headers = res.headers(); let headers = res.headers();
let etag = headers.get::<ETag>().map(|x| x.tag()); let etag = headers.get::<ETag>().map(|x| x.tag());
@ -109,7 +109,7 @@ impl Source {
// 403: Forbidden // 403: Forbidden
// 408: Timeout // 408: Timeout
// 410: Feed deleted // 410: Feed deleted
fn match_status(mut self, res: Response) -> Result<(Self, Response)> { fn match_status(mut self, res: Response) -> Result<(Self, Response), Error> {
self.update_etag(&res)?; self.update_etag(&res)?;
let code = res.status(); let code = res.status();
match code { match code {
@ -131,7 +131,7 @@ impl Source {
Ok((self, res)) Ok((self, res))
} }
fn handle_301(&mut self, res: &Response) -> Result<()> { fn handle_301(&mut self, res: &Response) -> Result<(), Error> {
let headers = res.headers(); let headers = res.headers();
if let Some(url) = headers.get::<Location>() { if let Some(url) = headers.get::<Location>() {
@ -150,7 +150,7 @@ impl Source {
/// Construct a new `Source` with the given `uri` and index it. /// Construct a new `Source` with the given `uri` and index it.
/// ///
/// This only indexes the `Source` struct, not the Podcast Feed. /// This only indexes the `Source` struct, not the Podcast Feed.
pub fn from_url(uri: &str) -> Result<Source> { pub fn from_url(uri: &str) -> Result<Source, Error> {
let url = Url::parse(uri)?; let url = Url::parse(uri)?;
NewSource::new(&url).to_source() NewSource::new(&url).to_source()
@ -174,11 +174,11 @@ impl Source {
let feed = self.request_constructor(client, ignore_etags) let feed = self.request_constructor(client, ignore_etags)
.and_then(move |(_, res)| response_to_channel(res, pool)) .and_then(move |(_, res)| response_to_channel(res, pool))
.and_then(move |chan| { .and_then(move |chan| {
FeedBuilder::default() Ok(FeedBuilder::default()
.channel(chan) .channel(chan)
.source_id(id) .source_id(id)
.build() .build()
.map_err(From::from) .unwrap())
}); });
Box::new(feed) Box::new(feed)
@ -228,7 +228,8 @@ fn response_to_channel(
.map_err(From::from) .map_err(From::from)
.map(|iter| iter.collect::<Vec<u8>>()) .map(|iter| iter.collect::<Vec<u8>>())
.map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned()) .map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned())
.and_then(|buf| Channel::from_str(&buf).map_err(From::from)); // FIXME: Unwrap
.and_then(|buf| Ok(Channel::from_str(&buf).unwrap()));
let cpu_chan = pool.spawn(chan); let cpu_chan = pool.spawn(chan);
Box::new(cpu_chan) Box::new(cpu_chan)
} }

View File

@ -10,16 +10,15 @@ use hyper::client::HttpConnector;
use hyper_tls::HttpsConnector; use hyper_tls::HttpsConnector;
use tokio_core::reactor::Core; use tokio_core::reactor::Core;
use failure::Error;
use num_cpus; use num_cpus;
use rss; use rss;
use Source; use Source;
use dbqueries; use dbqueries;
use errors::*;
use models::{IndexState, NewEpisode, NewEpisodeMinimal}; use models::{IndexState, NewEpisode, NewEpisodeMinimal};
// use Feed; // use Feed;
use std;
// use std::sync::{Arc, Mutex}; // use std::sync::{Arc, Mutex};
macro_rules! clone { macro_rules! clone {
@ -51,7 +50,7 @@ pub fn pipeline<S: IntoIterator<Item = Source>>(
tokio_core: &mut Core, tokio_core: &mut Core,
pool: &CpuPool, pool: &CpuPool,
client: Client<HttpsConnector<HttpConnector>>, client: Client<HttpsConnector<HttpConnector>>,
) -> Result<()> { ) -> Result<(), Error> {
let list: Vec<_> = sources let list: Vec<_> = sources
.into_iter() .into_iter()
.map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags))) .map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags)))
@ -70,7 +69,7 @@ pub fn pipeline<S: IntoIterator<Item = Source>>(
} }
/// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline. /// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline.
pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<()> { pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<(), Error> {
if sources.is_empty() { if sources.is_empty() {
return Ok(()); return Ok(());
} }
@ -86,7 +85,7 @@ pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<()> {
} }
/// Docs /// Docs
pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> { pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), Error> {
let pool = CpuPool::new_num_cpus(); let pool = CpuPool::new_num_cpus();
let mut core = Core::new()?; let mut core = Core::new()?;
let handle = core.handle(); let handle = core.handle();
@ -102,7 +101,10 @@ pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<()> {
core.run(work) core.run(work)
} }
fn determine_ep_state(ep: NewEpisodeMinimal, item: &rss::Item) -> Result<IndexState<NewEpisode>> { fn determine_ep_state(
ep: NewEpisodeMinimal,
item: &rss::Item,
) -> Result<IndexState<NewEpisode>, Error> {
// Check if feed exists // Check if feed exists
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
@ -135,7 +137,7 @@ pub(crate) fn glue_async<'a>(
#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))] #[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
pub fn collect_futures<F>( pub fn collect_futures<F>(
futures: Vec<F>, futures: Vec<F>,
) -> Box<Future<Item = Vec<std::result::Result<F::Item, F::Error>>, Error = Error>> ) -> Box<Future<Item = Vec<Result<F::Item, F::Error>>, Error = Error>>
where where
F: 'static + Future, F: 'static + Future,
<F as Future>::Item: 'static, <F as Future>::Item: 'static,

View File

@ -3,11 +3,12 @@
use chrono::prelude::*; use chrono::prelude::*;
use rayon::prelude::*; use rayon::prelude::*;
use failure::Error;
use itertools::Itertools; use itertools::Itertools;
use url::{Position, Url}; use url::{Position, Url};
use dbqueries; use dbqueries;
use errors::*; // use errors::*;
use models::{EpisodeCleanerQuery, Podcast, Save}; use models::{EpisodeCleanerQuery, Podcast, Save};
use xdg_dirs::DL_DIR; use xdg_dirs::DL_DIR;
@ -15,7 +16,7 @@ use std::fs;
use std::path::Path; use std::path::Path;
/// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`. /// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`.
fn download_checker() -> Result<()> { fn download_checker() -> Result<(), Error> {
let mut episodes = dbqueries::get_downloaded_episodes()?; let mut episodes = dbqueries::get_downloaded_episodes()?;
episodes episodes
@ -33,7 +34,7 @@ fn download_checker() -> Result<()> {
} }
/// Delete watched `episodes` that have exceded their liftime after played. /// Delete watched `episodes` that have exceded their liftime after played.
fn played_cleaner() -> Result<()> { fn played_cleaner() -> Result<(), Error> {
let mut episodes = dbqueries::get_played_cleaner_episodes()?; let mut episodes = dbqueries::get_played_cleaner_episodes()?;
let now_utc = Utc::now().timestamp() as i32; let now_utc = Utc::now().timestamp() as i32;
@ -57,7 +58,7 @@ fn played_cleaner() -> Result<()> {
} }
/// Check `ep.local_uri` field and delete the file it points to. /// Check `ep.local_uri` field and delete the file it points to.
fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> { fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), Error> {
if ep.local_uri().is_some() { if ep.local_uri().is_some() {
let uri = ep.local_uri().unwrap().to_owned(); let uri = ep.local_uri().unwrap().to_owned();
if Path::new(&uri).exists() { if Path::new(&uri).exists() {
@ -86,7 +87,7 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<()> {
/// ///
/// Runs a cleaner for played Episode's that are pass the lifetime limit and /// Runs a cleaner for played Episode's that are pass the lifetime limit and
/// scheduled for removal. /// scheduled for removal.
pub fn checkup() -> Result<()> { pub fn checkup() -> Result<(), Error> {
info!("Running database checks."); info!("Running database checks.");
download_checker()?; download_checker()?;
played_cleaner()?; played_cleaner()?;
@ -123,7 +124,7 @@ pub fn replace_extra_spaces(s: &str) -> String {
} }
/// Returns the URI of a Podcast Downloads given it's title. /// Returns the URI of a Podcast Downloads given it's title.
pub fn get_download_folder(pd_title: &str) -> Result<String> { pub fn get_download_folder(pd_title: &str) -> Result<String, Error> {
// It might be better to make it a hash of the title or the podcast rowid // It might be better to make it a hash of the title or the podcast rowid
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
@ -137,7 +138,7 @@ pub fn get_download_folder(pd_title: &str) -> Result<String> {
/// Removes all the entries associated with the given show from the database, /// Removes all the entries associated with the given show from the database,
/// and deletes all of the downloaded content. /// and deletes all of the downloaded content.
// TODO: Write Tests // TODO: Write Tests
pub fn delete_show(pd: &Podcast) -> Result<()> { pub fn delete_show(pd: &Podcast) -> Result<(), Error> {
dbqueries::remove_feed(pd)?; dbqueries::remove_feed(pd)?;
info!("{} was removed succesfully.", pd.title()); info!("{} was removed succesfully.", pd.title());

View File

@ -11,11 +11,13 @@ use std::io::{BufWriter, Read, Write};
use std::path::Path; use std::path::Path;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use errors::*;
use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save}; use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save};
use hammond_data::xdg_dirs::HAMMOND_CACHE; use hammond_data::xdg_dirs::HAMMOND_CACHE;
// use failure::Error; use std::result;
use failure::Error;
type Result<T> = result::Result<T, Error>;
// TODO: Replace path that are of type &str with std::path. // TODO: Replace path that are of type &str with std::path.
// TODO: Have a convention/document absolute/relative paths, if they should end with / or not. // TODO: Have a convention/document absolute/relative paths, if they should end with / or not.

View File

@ -2,19 +2,9 @@ use hammond_data;
use reqwest; use reqwest;
use std::io; use std::io;
error_chain! {
foreign_links {
ReqError(reqwest::Error);
IoError(io::Error);
DataError(hammond_data::errors::Error);
}
}
#[derive(Fail, Debug)] #[derive(Fail, Debug)]
enum DownloaderError { enum DownloaderError {
#[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error), #[fail(display = "Reqwest error: {}", _0)] RequestError(reqwest::Error),
// NOT SYNC. #[fail(display = "Data error: {}", _0)] DataError(hammond_data::errors::DatabaseError),
// #[fail(display = "Data error: {}", _0)]
// DataError(hammond_data::errors::Error),
#[fail(display = "Io error: {}", _0)] IoError(io::Error), #[fail(display = "Io error: {}", _0)] IoError(io::Error),
} }

View File

@ -2,13 +2,12 @@
// #![deny(unused_extern_crates, unused)] // #![deny(unused_extern_crates, unused)]
#[macro_use] #[macro_use]
extern crate error_chain; extern crate failure;
#[macro_use] #[macro_use]
extern crate failure_derive; extern crate failure_derive;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate failure;
extern crate glob; extern crate glob;
extern crate hammond_data; extern crate hammond_data;
extern crate hyper; extern crate hyper;

View File

@ -20,6 +20,8 @@ open = "1.2.1"
rayon = "0.9.0" rayon = "0.9.0"
send-cell = "0.1.2" send-cell = "0.1.2"
url = "1.6.0" url = "1.6.0"
failure = "0.1.1"
failure_derive = "0.1.1"
[dependencies.gtk] [dependencies.gtk]
features = ["v3_22"] features = ["v3_22"]

View File

@ -1,5 +1,5 @@
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))] #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))]
#![deny(unused_extern_crates, unused)] // #![deny(unused_extern_crates, unused)]
extern crate gdk; extern crate gdk;
extern crate gdk_pixbuf; extern crate gdk_pixbuf;
@ -7,6 +7,10 @@ extern crate gio;
extern crate glib; extern crate glib;
extern crate gtk; extern crate gtk;
#[macro_use]
extern crate failure;
#[macro_use]
extern crate failure_derive;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
#[macro_use] #[macro_use]

View File

@ -4,12 +4,12 @@ use gtk;
use chrono::prelude::*; use chrono::prelude::*;
use gtk::prelude::*; use gtk::prelude::*;
use failure::Error;
use humansize::{file_size_opts as size_opts, FileSize}; use humansize::{file_size_opts as size_opts, FileSize};
use open; use open;
use hammond_data::{EpisodeWidgetQuery, Podcast}; use hammond_data::{EpisodeWidgetQuery, Podcast};
use hammond_data::dbqueries; use hammond_data::dbqueries;
use hammond_data::errors::*;
use hammond_data::utils::get_download_folder; use hammond_data::utils::get_download_folder;
use app::Action; use app::Action;
@ -368,7 +368,7 @@ fn update_total_size_callback(prog: Arc<Mutex<manager::Progress>>, total_size: g
// }; // };
// } // }
pub fn episodes_listbox(pd: &Podcast, sender: Sender<Action>) -> Result<gtk::ListBox> { pub fn episodes_listbox(pd: &Podcast, sender: Sender<Action>) -> Result<gtk::ListBox, Error> {
let mut episodes = dbqueries::get_pd_episodeswidgets(pd)?; let mut episodes = dbqueries::get_pd_episodeswidgets(pd)?;
let list = gtk::ListBox::new(); let list = gtk::ListBox::new();