code cleanup, initial index_feed.rs tests.

This commit is contained in:
Jordan Petridis 2017-09-30 22:39:08 +03:00
parent 108ed34b40
commit 1ca6c50fb1
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
5 changed files with 1373 additions and 27 deletions

1336
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
[dependencies]
rfc822_sanitizer = "0.2.0"
rfc822_sanitizer = "0.3.0"
rayon = "0.8.2"
regex = "0.2"
error-chain = "0.11.0"

View File

@ -20,7 +20,7 @@ pub fn run() -> Result<()> {
info!("{:?}", foo);
::init()?;
::index_feed::foo();
// ::index_feed::foo();
Ok(())
}

View File

@ -35,15 +35,17 @@ pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result<models::NewEp
// and have seperate logic to handle local_files
let local_uri = None;
let date = parse_from_rfc2822_with_fallback(item.pub_date().unwrap());
let date = parse_from_rfc2822_with_fallback(
// Default to rfc2822 represantation of epoch 0.
item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"),
);
// Should treat information from the rss feeds as invalid by default.
// Case: Thu, 05 Aug 2016 06:00:00 -0400 <-- Actually that was friday.
let pub_date = date.map(|x| x.to_rfc2822()).ok();
let epoch = date.map(|x| x.timestamp() as i32).unwrap_or(0);
let length = item.enclosure()
.map(|x| x.length().parse().unwrap_or(0));
let length = item.enclosure().map(|x| x.length().parse().unwrap_or(0));
let foo = models::NewEpisode {
title,

View File

@ -11,28 +11,6 @@ use feedparser;
use errors::*;
use models::*;
pub fn foo() {
let inpt = vec![
"https://request-for-explanation.github.io/podcast/rss.xml",
"https://feeds.feedburner.com/InterceptedWithJeremyScahill",
"http://feeds.propublica.org/propublica/podcast",
"http://feeds.feedburner.com/linuxunplugged",
];
let db = ::establish_connection();
for feed in inpt.iter() {
match insert_return_source(&db, feed) {
Ok(_) => {}
Err(foo) => {
error!("Error: {}", foo);
continue;
}
}
}
index_loop(db).unwrap();
}
fn index_source(con: &SqliteConnection, foo: &NewSource) -> Result<()> {
match dbqueries::load_source(con, foo.uri) {
Ok(_) => Ok(()),
@ -200,3 +178,33 @@ fn refresh_source(
feed.update_etag(connection, &req)?;
Ok((req, feed.clone()))
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use std::fs::File;
use std::io::stdout;
use diesel::prelude::*;
use super::*;
embed_migrations!("migrations/");
#[test]
fn foo() {
let tmp_dir = tempdir::TempDir::new("hammond_unit_test").unwrap();
let db_path = tmp_dir.path().join("foo_tests.db");
let db = SqliteConnection::establish(db_path.to_str().unwrap()).unwrap();
embedded_migrations::run_with_output(&db, &mut stdout()).unwrap();
let inpt = vec![
"https://request-for-explanation.github.io/podcast/rss.xml",
"https://feeds.feedburner.com/InterceptedWithJeremyScahill",
"http://feeds.propublica.org/propublica/podcast",
"http://feeds.feedburner.com/linuxunplugged",
];
inpt.iter().for_each(|feed| index_source(&db, &NewSource::new_with_uri(feed)).unwrap());
index_loop(db).unwrap();
}
}