diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 79ce8df..e9969c3 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -211,7 +211,7 @@ impl NewEpisodeMinimal { let duration = parser::parse_itunes_duration(item); - Ok(NewEpisodeMinimalBuilder::default() + NewEpisodeMinimalBuilder::default() .title(title) .uri(uri) .duration(duration) @@ -219,7 +219,7 @@ impl NewEpisodeMinimal { .guid(guid) .podcast_id(parent_id) .build() - .unwrap()) + .map_err(From::from) } pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode { diff --git a/hammond-data/src/models/source.rs b/hammond-data/src/models/source.rs index 83634b7..36ac7b7 100644 --- a/hammond-data/src/models/source.rs +++ b/hammond-data/src/models/source.rs @@ -172,12 +172,12 @@ impl Source { Ok(res) }) .and_then(move |res| response_to_channel(res, pool)) - .map(move |chan| { + .and_then(move |chan| { FeedBuilder::default() .channel(chan) .source_id(id) .build() - .unwrap() + .map_err(From::from) }); Box::new(feed) diff --git a/hammond-data/src/pipeline.rs b/hammond-data/src/pipeline.rs index c1fee9a..42205a3 100644 --- a/hammond-data/src/pipeline.rs +++ b/hammond-data/src/pipeline.rs @@ -57,13 +57,11 @@ pub fn pipeline>(sources: S, ignore_etags: bool) .into_iter() .map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags))) .map(|fut| fut.and_then(clone!(pool => move |feed| pool.clone().spawn(feed.index())))) + .map(|fut| fut.map(|_| ()).map_err(|err| error!("Error: {}", err))) .collect(); - let f = core.run(collect_futures(list))?; - f.into_iter() - .filter_map(|x| x.err()) - .for_each(|err| error!("Error: {}", err)); - + // Thats not really concurrent yet I think. + core.run(collect_futures(list))?; Ok(()) }