From fc693a569b99e06d6889be8b745ae070402b8f12 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Tue, 3 Oct 2017 12:01:01 +0300 Subject: [PATCH] Sort of works atm. --- src/cli.rs | 4 ++-- src/dbqueries.rs | 7 +++++++ src/downloader.rs | 48 ++++++++++++++++++++++++++++++++++++++++------- src/index_feed.rs | 48 ++++++++++++++++++++++++----------------------- src/lib.rs | 5 +++++ src/models.rs | 11 ++++++----- src/schema.rs | 2 +- 7 files changed, 87 insertions(+), 38 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index 22403e6..8782b12 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -20,8 +20,8 @@ pub fn run() -> Result<()> { info!("{:?}", foo); ::init()?; - downloader::download_to("./foo", "http://traffic.megaphone.fm/FL8700626063.mp3")?; - // ::index_feed::foo(); + let db = ::establish_connection(); + downloader::latest_dl(&db)?; Ok(()) } diff --git a/src/dbqueries.rs b/src/dbqueries.rs index 45095e4..2a5e893 100644 --- a/src/dbqueries.rs +++ b/src/dbqueries.rs @@ -15,6 +15,13 @@ pub fn get_podcasts(con: &SqliteConnection) -> QueryResult> { pds } +// Maybe later. +// pub fn get_podcasts_ids(con: &SqliteConnection) -> QueryResult> { +// use schema::podcast::dsl::*; + +// let pds = podcast.select(id).load::(con); +// pds +// } pub fn get_episodes(con: &SqliteConnection) -> QueryResult> { use schema::episode::dsl::*; diff --git a/src/downloader.rs b/src/downloader.rs index b621b0c..ef17725 100644 --- a/src/downloader.rs +++ b/src/downloader.rs @@ -1,9 +1,12 @@ use reqwest; use hyper::header::*; +use diesel::prelude::*; -use std::fs::File; +use std::fs::{File, DirBuilder}; use std::io::{BufWriter, Read, Write}; + use errors::*; +use dbqueries; // Adapted from https://github.com/mattgathu/rget . pub fn download_to(target: &str, url: &str) -> Result<()> { @@ -19,18 +22,17 @@ pub fn download_to(target: &str, url: &str) -> Result<()> { info!("Content Type: {:?}", ct_type); // FIXME - let out_file = target.to_owned() + "/bar.mp3"; - info!("Save destination: {}", out_file); + // let out_file = target.to_owned() + "/bar.mp3"; + info!("Save destination: {}", target); let chunk_size = match ct_len { Some(x) => x as usize / 99, - None => 1024usize, // default chunk size + None => 1024 as usize, // default chunk size }; - // let foo_file = - - let mut writer = BufWriter::new(File::create(out_file)?); + let mut writer = BufWriter::new(File::create(target)?); + // FIXME: not running loop { let mut buffer = vec![0; chunk_size]; let bcount = resp.read(&mut buffer[..]).unwrap(); @@ -44,3 +46,35 @@ pub fn download_to(target: &str, url: &str) -> Result<()> { } Ok(()) } + +// Initial messy prototype, queries load alot of not needed stuff. +pub fn latest_dl(connection: &SqliteConnection) -> Result<()> { + let pds = dbqueries::get_podcasts(connection)?; + + pds.iter() + .map(|x| -> Result<()> { + let eps = dbqueries::get_pd_episodes(connection, &x)?; + + // It might be better to make it a hash of the title + let dl_fold = format!("{}/{}", ::DL_DIR.to_str().unwrap(), x.title()); + + // Create the folder + DirBuilder::new().recursive(true).create(&dl_fold).unwrap(); + + // Download the episodes + eps.iter() + .map(|y| -> Result<()> { + let ext = y.uri().split(".").last().unwrap(); + let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap(), ext); + info!("Downloading {:?} into: {}", y.title(), dlpath); + download_to(&dlpath, y.uri())?; + Ok(()) + }) + .fold((), |(), _| ()); + + Ok(()) + }) + .fold((), |(), _| ()); + + Ok(()) +} diff --git a/src/index_feed.rs b/src/index_feed.rs index 5c5a29f..956580a 100644 --- a/src/index_feed.rs +++ b/src/index_feed.rs @@ -6,7 +6,7 @@ use rss; use reqwest; use rayon::prelude::*; use std::sync::{Arc, Mutex}; - + use schema; use dbqueries; use feedparser; @@ -83,11 +83,11 @@ pub fn index_loop(db: SqliteConnection) -> Result<()> { // f.par_iter_mut().for_each(|&mut (ref mut req, ref source)| { // TODO: Once for_each is stable, uncomment above line and delete collect. - let _ : Vec<_> = f.par_iter_mut() - .map(|&mut (ref mut req, ref source)| { - complete_index_from_source(req, source, m.clone()).unwrap(); - }) - .collect(); + let _: Vec<_> = f.par_iter_mut() + .map(|&mut (ref mut req, ref source)| { + complete_index_from_source(req, source, m.clone()).unwrap(); + }) + .collect(); Ok(()) } @@ -257,10 +257,11 @@ mod tests { "http://feeds.feedburner.com/linuxunplugged", ]; - inpt.iter().map(|feed| { - index_source(&db, &NewSource::new_with_uri(feed)).unwrap() - }) - .fold((), |(), _| ()); + inpt.iter() + .map(|feed| { + index_source(&db, &NewSource::new_with_uri(feed)).unwrap() + }) + .fold((), |(), _| ()); index_loop(db).unwrap(); @@ -297,21 +298,22 @@ mod tests { ), ]; - urls.iter().map(|&(path, url)| { - let tempdb = m.lock().unwrap(); - // Create and insert a Source into db - let s = insert_return_source(&tempdb, url).unwrap(); - drop(tempdb); + urls.iter() + .map(|&(path, url)| { + let tempdb = m.lock().unwrap(); + // Create and insert a Source into db + let s = insert_return_source(&tempdb, url).unwrap(); + drop(tempdb); - // open the xml file - let feed = fs::File::open(path).unwrap(); - // parse it into a channel - let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); + // open the xml file + let feed = fs::File::open(path).unwrap(); + // parse it into a channel + let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); - // Index the channel - complete_index(m.clone(), chan, &s).unwrap(); - }) - .fold((), |(), _| ()); + // Index the channel + complete_index(m.clone(), chan, &s).unwrap(); + }) + .fold((), |(), _| ()); // Assert the index rows equal the controlled results let tempdb = m.lock().unwrap(); diff --git a/src/lib.rs b/src/lib.rs index 1f7ba80..58f1902 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -100,6 +100,11 @@ lazy_static!{ HAMMOND_XDG.place_data_file("hammond.db").unwrap() }; + + static ref DL_DIR: PathBuf = { + &HAMMOND_DATA; + HAMMOND_XDG.create_data_directory("Downloads").unwrap() + }; } pub fn init() -> Result<()> { diff --git a/src/models.rs b/src/models.rs index cd08319..5932ade 100644 --- a/src/models.rs +++ b/src/models.rs @@ -14,7 +14,7 @@ use errors::*; pub struct Episode { id: i32, title: Option, - uri: Option, + uri: String, local_uri: Option, description: Option, published_date: Option, @@ -37,12 +37,13 @@ impl Episode { self.title = value.map(|x| x.to_string()); } - pub fn uri(&self) -> Option<&str> { - self.uri.as_ref().map(|s| s.as_str()) + /// uri is guaranted to exist based on the db rules + pub fn uri(&self) -> &str { + self.uri.as_ref() } - pub fn set_uri(&mut self, value: Option<&str>) { - self.uri = value.map(|x| x.to_string()); + pub fn set_uri(&mut self, value: &str) { + self.uri = value.to_string(); } pub fn local_uri(&self) -> Option<&str> { diff --git a/src/schema.rs b/src/schema.rs index 6e99078..1f85408 100644 --- a/src/schema.rs +++ b/src/schema.rs @@ -2,7 +2,7 @@ table! { episode (id) { id -> Integer, title -> Nullable, - uri -> Nullable, + uri -> Text, local_uri -> Nullable, description -> Nullable, published_date -> Nullable,