Sort of works atm.

This commit is contained in:
Jordan Petridis 2017-10-03 12:01:01 +03:00
parent fe7ef323c4
commit fc693a569b
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
7 changed files with 87 additions and 38 deletions

View File

@ -20,8 +20,8 @@ pub fn run() -> Result<()> {
info!("{:?}", foo);
::init()?;
downloader::download_to("./foo", "http://traffic.megaphone.fm/FL8700626063.mp3")?;
// ::index_feed::foo();
let db = ::establish_connection();
downloader::latest_dl(&db)?;
Ok(())
}

View File

@ -15,6 +15,13 @@ pub fn get_podcasts(con: &SqliteConnection) -> QueryResult<Vec<Podcast>> {
pds
}
// Maybe later.
// pub fn get_podcasts_ids(con: &SqliteConnection) -> QueryResult<Vec<i32>> {
// use schema::podcast::dsl::*;
// let pds = podcast.select(id).load::<i32>(con);
// pds
// }
pub fn get_episodes(con: &SqliteConnection) -> QueryResult<Vec<Episode>> {
use schema::episode::dsl::*;

View File

@ -1,9 +1,12 @@
use reqwest;
use hyper::header::*;
use diesel::prelude::*;
use std::fs::File;
use std::fs::{File, DirBuilder};
use std::io::{BufWriter, Read, Write};
use errors::*;
use dbqueries;
// Adapted from https://github.com/mattgathu/rget .
pub fn download_to(target: &str, url: &str) -> Result<()> {
@ -19,18 +22,17 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
info!("Content Type: {:?}", ct_type);
// FIXME
let out_file = target.to_owned() + "/bar.mp3";
info!("Save destination: {}", out_file);
// let out_file = target.to_owned() + "/bar.mp3";
info!("Save destination: {}", target);
let chunk_size = match ct_len {
Some(x) => x as usize / 99,
None => 1024usize, // default chunk size
None => 1024 as usize, // default chunk size
};
// let foo_file =
let mut writer = BufWriter::new(File::create(out_file)?);
let mut writer = BufWriter::new(File::create(target)?);
// FIXME: not running
loop {
let mut buffer = vec![0; chunk_size];
let bcount = resp.read(&mut buffer[..]).unwrap();
@ -44,3 +46,35 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
}
Ok(())
}
// Initial messy prototype, queries load alot of not needed stuff.
pub fn latest_dl(connection: &SqliteConnection) -> Result<()> {
let pds = dbqueries::get_podcasts(connection)?;
pds.iter()
.map(|x| -> Result<()> {
let eps = dbqueries::get_pd_episodes(connection, &x)?;
// It might be better to make it a hash of the title
let dl_fold = format!("{}/{}", ::DL_DIR.to_str().unwrap(), x.title());
// Create the folder
DirBuilder::new().recursive(true).create(&dl_fold).unwrap();
// Download the episodes
eps.iter()
.map(|y| -> Result<()> {
let ext = y.uri().split(".").last().unwrap();
let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap(), ext);
info!("Downloading {:?} into: {}", y.title(), dlpath);
download_to(&dlpath, y.uri())?;
Ok(())
})
.fold((), |(), _| ());
Ok(())
})
.fold((), |(), _| ());
Ok(())
}

View File

@ -6,7 +6,7 @@ use rss;
use reqwest;
use rayon::prelude::*;
use std::sync::{Arc, Mutex};
use schema;
use dbqueries;
use feedparser;
@ -83,11 +83,11 @@ pub fn index_loop(db: SqliteConnection) -> Result<()> {
// f.par_iter_mut().for_each(|&mut (ref mut req, ref source)| {
// TODO: Once for_each is stable, uncomment above line and delete collect.
let _ : Vec<_> = f.par_iter_mut()
.map(|&mut (ref mut req, ref source)| {
complete_index_from_source(req, source, m.clone()).unwrap();
})
.collect();
let _: Vec<_> = f.par_iter_mut()
.map(|&mut (ref mut req, ref source)| {
complete_index_from_source(req, source, m.clone()).unwrap();
})
.collect();
Ok(())
}
@ -257,10 +257,11 @@ mod tests {
"http://feeds.feedburner.com/linuxunplugged",
];
inpt.iter().map(|feed| {
index_source(&db, &NewSource::new_with_uri(feed)).unwrap()
})
.fold((), |(), _| ());
inpt.iter()
.map(|feed| {
index_source(&db, &NewSource::new_with_uri(feed)).unwrap()
})
.fold((), |(), _| ());
index_loop(db).unwrap();
@ -297,21 +298,22 @@ mod tests {
),
];
urls.iter().map(|&(path, url)| {
let tempdb = m.lock().unwrap();
// Create and insert a Source into db
let s = insert_return_source(&tempdb, url).unwrap();
drop(tempdb);
urls.iter()
.map(|&(path, url)| {
let tempdb = m.lock().unwrap();
// Create and insert a Source into db
let s = insert_return_source(&tempdb, url).unwrap();
drop(tempdb);
// open the xml file
let feed = fs::File::open(path).unwrap();
// parse it into a channel
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
// open the xml file
let feed = fs::File::open(path).unwrap();
// parse it into a channel
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
// Index the channel
complete_index(m.clone(), chan, &s).unwrap();
})
.fold((), |(), _| ());
// Index the channel
complete_index(m.clone(), chan, &s).unwrap();
})
.fold((), |(), _| ());
// Assert the index rows equal the controlled results
let tempdb = m.lock().unwrap();

View File

@ -100,6 +100,11 @@ lazy_static!{
HAMMOND_XDG.place_data_file("hammond.db").unwrap()
};
static ref DL_DIR: PathBuf = {
&HAMMOND_DATA;
HAMMOND_XDG.create_data_directory("Downloads").unwrap()
};
}
pub fn init() -> Result<()> {

View File

@ -14,7 +14,7 @@ use errors::*;
pub struct Episode {
id: i32,
title: Option<String>,
uri: Option<String>,
uri: String,
local_uri: Option<String>,
description: Option<String>,
published_date: Option<String>,
@ -37,12 +37,13 @@ impl Episode {
self.title = value.map(|x| x.to_string());
}
pub fn uri(&self) -> Option<&str> {
self.uri.as_ref().map(|s| s.as_str())
/// uri is guaranted to exist based on the db rules
pub fn uri(&self) -> &str {
self.uri.as_ref()
}
pub fn set_uri(&mut self, value: Option<&str>) {
self.uri = value.map(|x| x.to_string());
pub fn set_uri(&mut self, value: &str) {
self.uri = value.to_string();
}
pub fn local_uri(&self) -> Option<&str> {

View File

@ -2,7 +2,7 @@ table! {
episode (id) {
id -> Integer,
title -> Nullable<Text>,
uri -> Nullable<Text>,
uri -> Text,
local_uri -> Nullable<Text>,
description -> Nullable<Text>,
published_date -> Nullable<Text>,