hammond_data::Feed: general cleanup of no longer needed stuff.

This commit is contained in:
Jordan Petridis 2018-01-15 11:03:40 +02:00
parent 10345ffda7
commit 3358fcd0b3
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
7 changed files with 26 additions and 36 deletions

View File

@ -62,7 +62,7 @@ fn index_urls() {
})
.collect();
feeds.iter().for_each(|x| index(x));
feeds.iter().for_each(|x| x.index().unwrap());
}
#[bench]

View File

@ -9,11 +9,14 @@ use rss;
use dbqueries;
use parser;
use models::queryables::{Episode, Podcast, Source};
use models::queryables::{Podcast, Source};
use models::insertables::{NewEpisode, NewPodcast};
use database::connection;
use errors::*;
#[cfg(test)]
use models::queryables::Episode;
#[derive(Debug)]
/// Wrapper struct that hold a `Source` and the `rss::Channel`
/// that corresponds to the `Source.uri` field.
@ -29,11 +32,8 @@ impl Feed {
}
/// Constructor that consumes a `Source` and a `rss::Channel` returns a `Feed` struct.
pub fn from_channel_source(chan: rss::Channel, s: i32) -> Feed {
Feed {
channel: chan,
source_id: s,
}
pub fn from_channel_source(channel: rss::Channel, source_id: i32) -> Feed {
Feed { channel, source_id }
}
/// docs
@ -85,15 +85,15 @@ impl Feed {
self.parse_channel().into_podcast()
}
#[allow(dead_code)]
#[cfg(test)]
/// This returns only the episodes in the xml feed.
/// Used for unit-tests only.
fn get_episodes(&self) -> Result<Vec<Episode>> {
let pd = self.get_podcast()?;
let eps = self.parse_channel_items(&pd);
let db = connection();
let con = db.get()?;
// TODO: Make it parallel
// This returns only the episodes in the xml feed.
let episodes: Vec<_> = eps.into_iter()
.filter_map(|ep| ep.into_episode(&con).ok())
.collect();
@ -102,25 +102,12 @@ impl Feed {
}
}
/// Handle the indexing of a `Feed` into the Database.
pub fn index(feed: &Feed) {
if let Err(err) = feed.index() {
error!("Error While trying to update the database.");
error!("Error msg: {}", err);
};
}
/// Consume a `Source` and return a `Feed`.
fn fetch(source: &mut Source) -> Result<Feed> {
Feed::from_source(source)
}
/// Index a "list" of `Source`s.
pub fn index_loop<S: IntoParallelIterator<Item = Source>>(sources: S) {
sources
.into_par_iter()
.filter_map(|mut x| {
let foo = fetch(&mut x);
.filter_map(|mut source| {
let foo = Feed::from_source(&mut source);
if let Err(err) = foo {
error!("Error: {}", err);
None
@ -128,7 +115,13 @@ pub fn index_loop<S: IntoParallelIterator<Item = Source>>(sources: S) {
foo.ok()
}
})
.for_each(|x| index(&x));
// Handle the indexing of a `Feed` into the Database.
.for_each(|feed| {
if let Err(err) = feed.index() {
error!("Error While trying to update the database.");
error!("Error msg: {}", err);
}
});
info!("Indexing done.");
}
@ -208,7 +201,7 @@ mod tests {
.collect();
// Index the channels
feeds.par_iter().for_each(|x| index(&x));
feeds.par_iter().for_each(|x| x.index().unwrap());
// Assert the index rows equal the controlled results
assert_eq!(dbqueries::get_sources().unwrap().len(), 4);

View File

@ -185,6 +185,7 @@ impl Update for NewEpisode {
impl NewEpisode {
// TODO: Refactor into batch indexes instead.
#[allow(dead_code)]
pub(crate) fn into_episode(self, con: &SqliteConnection) -> Result<Episode> {
self.index(con)?;
Ok(dbqueries::get_episode_from_pk(

View File

@ -13,7 +13,7 @@ use hyper::header::{ETag, EntityTag, HttpDate, IfModifiedSince, IfNoneMatch, Las
use hyper_tls::HttpsConnector;
use futures::prelude::*;
// use futures::future::ok;
// use futures::future::{ok, result};
use schema::{episode, podcast, source};
use feed::Feed;
@ -723,7 +723,6 @@ impl Source {
ignore_etags: bool,
) -> Box<Future<Item = Feed, Error = Error>> {
let id = self.id();
// TODO: make URI future
let feed = request_constructor(&self, client, ignore_etags)
.map_err(From::from)
.and_then(move |res| {
@ -755,7 +754,7 @@ fn request_constructor(
client: &Client<HttpsConnector<HttpConnector>>,
ignore_etags: bool,
) -> Box<Future<Item = Response, Error = hyper::Error>> {
// FIXME: remove unwrap
// FIXME: remove unwrap somehow
let uri = Uri::from_str(&s.uri()).unwrap();
let mut req = Request::new(Method::Get, uri);

View File

@ -217,7 +217,6 @@ pub fn cache_image(pd: &PodcastCoverQuery) -> Option<String> {
mod tests {
use super::*;
use hammond_data::Source;
use hammond_data::feed::index;
use hammond_data::dbqueries;
#[test]
@ -234,7 +233,7 @@ mod tests {
// Convert Source it into a Feed and index it
let feed = source.into_feed(true).unwrap();
index(&feed);
feed.index().unwrap();
// Get the Podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap().into();

View File

@ -120,7 +120,6 @@ mod tests {
use hammond_data::database;
use hammond_data::utils::get_download_folder;
use hammond_data::feed::*;
use hammond_data::{Episode, Source};
use hammond_data::dbqueries;
@ -144,7 +143,7 @@ mod tests {
// Convert Source it into a Feed and index it
let feed = source.into_feed(true).unwrap();
index(&feed);
feed.index().unwrap();
// Get the Podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();

View File

@ -81,7 +81,6 @@ pub fn get_pixbuf_from_path(pd: &PodcastCoverQuery, size: u32) -> Option<Pixbuf>
#[cfg(test)]
mod tests {
use hammond_data::Source;
use hammond_data::feed::index;
use hammond_data::dbqueries;
use super::*;
@ -99,7 +98,7 @@ mod tests {
// Convert Source it into a Feed and index it
let feed = source.into_feed(true).unwrap();
index(&feed);
feed.index().unwrap();
// Get the Podcast
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();