hammond-data/src/feed.rs api changes.

This commit is contained in:
Jordan Petridis 2017-11-23 18:48:41 +02:00
parent a58671ea9a
commit aa7c493e81
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
7 changed files with 132 additions and 35 deletions

View File

@ -7,9 +7,11 @@ use std::sync::{Arc, Mutex};
use std::io;
// use std::time::Duration;
use xdg_;
use errors::*;
#[cfg(not(test))]
use xdg_;
// type Pool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
type Database = Arc<Mutex<SqliteConnection>>;

View File

@ -1,11 +1,11 @@
use rayon::prelude::*;
use diesel::{Identifiable, QueryResult};
use diesel::Identifiable;
use rss;
use dbqueries;
use parser;
use models::{Podcast, Source};
use models::{Episode, NewEpisode, NewPodcast, Podcast, Source};
use errors::*;
#[derive(Debug)]
@ -26,29 +26,22 @@ impl Feed {
}
}
fn index(&self) -> Result<()> {
pub fn index(&self) -> Result<()> {
let pd = self.get_podcast()?;
self.index_channel_items(&pd)?;
Ok(())
}
pub fn index_channel(&self) -> QueryResult<()> {
let new_pd = parser::new_podcast(&self.channel, *self.source.id());
new_pd.index()
}
pub fn get_podcast(&self) -> Result<Podcast> {
parser::new_podcast(&self.channel, *self.source.id()).into_podcast()
#[allow(dead_code)]
fn index_channel(&self) -> Result<()> {
self.parse_channel().index()?;
Ok(())
}
// TODO: Refactor transcactions and find a way to do it in parallel.
fn index_channel_items(&self, pd: &Podcast) -> Result<()> {
let items = self.channel.items();
let episodes: Vec<_> = items
.into_par_iter()
.map(|item| parser::new_episode(item, *pd.id()))
.collect();
let episodes = self.parse_channel_items(pd);
episodes.into_iter().for_each(|x| {
let e = x.index();
@ -59,6 +52,39 @@ impl Feed {
});
Ok(())
}
fn parse_channel(&self) -> NewPodcast {
parser::new_podcast(&self.channel, *self.source.id())
}
fn parse_channel_items(&self, pd: &Podcast) -> Vec<NewEpisode> {
let items = self.channel.items();
let new_episodes: Vec<_> = items
.into_par_iter()
.map(|item| parser::new_episode(item, *pd.id()))
.collect();
new_episodes
}
fn get_podcast(&self) -> Result<Podcast> {
self.parse_channel().into_podcast()
}
#[allow(dead_code)]
fn get_episodes(&self) -> Result<Vec<Episode>> {
let pd = self.get_podcast()?;
let episodes: Vec<_> = self.parse_channel_items(&pd)
.into_par_iter()
.filter_map(|ep| ep.into_episode().ok())
.collect();
Ok(episodes)
// self.index_channel_items(&pd)?;
// Ok(dbqueries::get_pd_episodes(&pd)?)
}
}
pub fn index_all() -> Result<()> {
@ -179,4 +205,41 @@ mod tests {
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 4);
assert_eq!(dbqueries::get_episodes().unwrap().len(), 274);
}
#[test]
fn test_partial_index_podcast() {
let url = "https://feeds.feedburner.com/InterceptedWithJeremyScahill";
let s1 = Source::from_url(url).unwrap();
let s2 = Source::from_url(url).unwrap();
assert_eq!(s1, s2);
assert_eq!(s1.id(), s2.id());
let f1 = s1.into_feed().unwrap();
let f2 = s2.into_feed().unwrap();
let p1 = f1.get_podcast().unwrap();
let p2 = {
f2.index().unwrap();
f2.get_podcast().unwrap()
};
assert_eq!(p1, p2);
assert_eq!(p1.id(), p2.id());
assert_eq!(p1.source_id(), p2.source_id());
let eps1 = f1.get_episodes().unwrap();
let eps2 = {
f2.index().unwrap();
f2.get_episodes().unwrap()
};
eps1.into_par_iter()
.zip(eps2)
.into_par_iter()
.for_each(|(ep1, ep2): (Episode, Episode)| {
assert_eq!(ep1, ep2);
assert_eq!(ep1.id(), ep2.id());
assert_eq!(ep1.podcast_id(), ep2.podcast_id());
});
}
}

View File

@ -45,11 +45,11 @@ pub mod xdg_ {
xdg::BaseDirectories::with_prefix("hammond").unwrap()
};
static ref _HAMMOND_DATA: PathBuf = {
pub static ref HAMMOND_DATA: PathBuf = {
HAMMOND_XDG.create_data_directory(HAMMOND_XDG.get_data_home()).unwrap()
};
static ref _HAMMOND_CONFIG: PathBuf = {
pub static ref HAMMOND_CONFIG: PathBuf = {
HAMMOND_XDG.create_config_directory(HAMMOND_XDG.get_config_home()).unwrap()
};

View File

@ -1,7 +1,8 @@
use diesel::prelude::*;
use schema::{episode, podcast, source};
use models::{Podcast, Source};
use models::{Episode, Podcast, Source};
use utils::url_cleaner;
use errors::*;
use dbqueries;
@ -17,8 +18,9 @@ pub struct NewSource {
impl NewSource {
pub fn new_with_uri(uri: &str) -> NewSource {
let uri = url_cleaner(uri);
NewSource {
uri: uri.to_string(),
uri,
last_modified: None,
http_etag: None,
}
@ -56,15 +58,26 @@ impl NewEpisode {
// TODO: Currently using diesel from master git.
// Watch out for v0.99.0 beta and change the toml.
// TODO: Refactor into batch indexes instead.
pub fn into_episode(self) -> Result<Episode> {
self.index()?;
Ok(dbqueries::get_episode_from_uri(&self.uri.unwrap())?)
}
pub fn index(&self) -> QueryResult<()> {
let ep = dbqueries::get_episode_from_uri(&self.uri.clone().unwrap());
match ep {
Ok(foo) => if foo.title() != self.title.as_ref().map(|x| x.as_str())
|| foo.published_date() != self.published_date.as_ref().map(|x| x.as_str())
{
dbqueries::replace_episode(self)?;
},
Ok(foo) => {
if foo.podcast_id() != self.podcast_id {
error!("NEP pid: {}, EP pid: {}", self.podcast_id, foo.podcast_id());
};
if foo.title() != self.title.as_ref().map(|x| x.as_str())
|| foo.published_date() != self.published_date.as_ref().map(|x| x.as_str())
{
dbqueries::replace_episode(self)?;
}
}
Err(_) => {
dbqueries::insert_new_episode(self)?;
}
@ -95,9 +108,15 @@ impl NewPodcast {
let pd = dbqueries::get_podcast_from_title(&self.title);
match pd {
Ok(foo) => if foo.link() != self.link {
dbqueries::replace_podcast(self)?;
},
Ok(foo) => {
if foo.source_id() != self.source_id {
error!("NPD sid: {}, PD sid: {}", self.source_id, foo.source_id());
};
if foo.link() != self.link {
dbqueries::replace_podcast(self)?;
}
}
Err(_) => {
dbqueries::insert_new_podcast(self)?;
}

View File

@ -14,7 +14,7 @@ use database::connection;
use std::io::Read;
use std::str::FromStr;
#[derive(Queryable, Identifiable, AsChangeset, Associations)]
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
#[table_name = "episode"]
#[changeset_options(treat_none_as_null = "true")]
#[belongs_to(Podcast, foreign_key = "podcast_id")]
@ -127,6 +127,10 @@ impl Episode {
self.favorite = b
}
pub fn podcast_id(&self) -> i32 {
self.podcast_id
}
pub fn save(&self) -> QueryResult<Episode> {
let db = connection();
let tempdb = db.lock().unwrap();
@ -135,7 +139,7 @@ impl Episode {
}
}
#[derive(Queryable, Identifiable, AsChangeset, Associations)]
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
#[belongs_to(Source, foreign_key = "source_id")]
#[changeset_options(treat_none_as_null = "true")]
#[table_name = "podcast"]
@ -217,7 +221,7 @@ impl Podcast {
}
}
#[derive(Queryable, Identifiable, AsChangeset)]
#[derive(Queryable, Identifiable, AsChangeset, PartialEq)]
#[table_name = "source"]
#[changeset_options(treat_none_as_null = "true")]
#[derive(Debug, Clone)]

View File

@ -193,6 +193,7 @@ pub fn cache_image(pd: &Podcast) -> Option<String> {
mod tests {
use super::*;
use hammond_data::Source;
use hammond_data::dbqueries;
use std::fs;
@ -205,13 +206,17 @@ mod tests {
#[test]
fn test_cache_image() {
let pd = Source::from_url("http://www.newrustacean.com/feed.xml")
let url = "http://www.newrustacean.com/feed.xml";
Source::from_url(url)
.unwrap()
.into_feed()
.unwrap()
.get_podcast()
.index()
.unwrap();
let pd = dbqueries::get_podcast_from_title("New Rustacean").unwrap();
let img_path = cache_image(&pd);
let foo_ = format!(
"{}{}/cover.png",

View File

@ -123,13 +123,17 @@ mod tests {
#[test]
fn test_get_pixbuf_from_path() {
let pd = Source::from_url("http://www.newrustacean.com/feed.xml")
let url = "http://www.newrustacean.com/feed.xml";
Source::from_url(url)
.unwrap()
.into_feed()
.unwrap()
.get_podcast()
.index()
.unwrap();
let pd = dbqueries::get_podcast_from_title("New Rustacean").unwrap();
let pxbuf = get_pixbuf_from_path(&pd);
assert!(pxbuf.is_some());
}