Cleaned up a bit the downloader moduel and added some unit tests.

This commit is contained in:
Jordan Petridis 2017-11-08 20:22:07 +02:00
parent 4e6ed416ee
commit 8c5cdf75a7
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
7 changed files with 96 additions and 84 deletions

1
Cargo.lock generated
View File

@ -568,6 +568,7 @@ dependencies = [
"hammond-data 0.1.0", "hammond-data 0.1.0",
"hyper 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"reqwest 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "reqwest 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rss 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rss 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View File

@ -8,6 +8,7 @@ workspace = "../"
hammond-data = {path = "../hammond-data"} hammond-data = {path = "../hammond-data"}
error-chain = "0.11" error-chain = "0.11"
log = "0.3" log = "0.3"
mime = "0.3"
reqwest = "0.8" reqwest = "0.8"
hyper = "0.11" hyper = "0.11"
diesel = { version = "0.16", features = ["sqlite"] } diesel = { version = "0.16", features = ["sqlite"] }

View File

@ -1,13 +1,14 @@
use reqwest; use reqwest;
use hyper::header::*; use hyper::header::*;
// use mime::Mime;
use std::fs::{rename, DirBuilder, File}; use std::fs::{rename, DirBuilder, File};
use std::io::{BufWriter, Read, Write}; use std::io::{BufWriter, Read, Write};
use std::path::Path; use std::path::Path;
// use std::str::FromStr;
use errors::*; use errors::*;
use hammond_data::index_feed::Database; use hammond_data::index_feed::Database;
use hammond_data::dbqueries;
use hammond_data::models::Episode; use hammond_data::models::Episode;
use hammond_data::{DL_DIR, HAMMOND_CACHE}; use hammond_data::{DL_DIR, HAMMOND_CACHE};
@ -17,7 +18,7 @@ use hammond_data::{DL_DIR, HAMMOND_CACHE};
// Would much rather use a crate, // Would much rather use a crate,
// or bindings for a lib like youtube-dl(python), // or bindings for a lib like youtube-dl(python),
// But cant seem to find one. // But cant seem to find one.
pub fn download_to(target: &str, url: &str) -> Result<()> { pub fn download_to(dir: &str, filename: &str, url: &str) -> Result<String> {
info!("GET request to: {}", url); info!("GET request to: {}", url);
let client = reqwest::Client::builder().referer(false).build()?; let client = reqwest::Client::builder().referer(false).build()?;
let mut resp = client.get(url).send()?; let mut resp = client.get(url).send()?;
@ -27,73 +28,47 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
let headers = resp.headers().clone(); let headers = resp.headers().clone();
let ct_len = headers.get::<ContentLength>().map(|ct_len| **ct_len); let ct_len = headers.get::<ContentLength>().map(|ct_len| **ct_len);
let ct_type = headers.get::<ContentType>().unwrap(); let ct_type = headers.get::<ContentType>();
ct_len.map(|x| info!("File Lenght: {}", x)); ct_len.map(|x| info!("File Lenght: {}", x));
info!("Content Type: {:?}", ct_type); ct_type.map(|x| info!("Content Type: {}", x));
info!("Save destination: {}", target); let target = format!("{}/{}", dir, filename);
// let target = format!("{}{}",dir, filename, ext);
let chunk_size = match ct_len { return save_io(&target, &mut resp, ct_len);
Some(x) => x as usize / 99,
None => 1024 as usize, // default chunk size
};
let out_file = format!("{}.part", target);
let mut writer = BufWriter::new(File::create(&out_file)?);
loop {
let mut buffer = vec![0; chunk_size];
let bcount = resp.read(&mut buffer[..]).unwrap();
buffer.truncate(bcount);
if !buffer.is_empty() {
writer.write_all(buffer.as_slice()).unwrap();
} else {
break;
}
}
rename(out_file, target)?;
} }
Ok(()) // Ok(String::from(""))
panic!("foo");
} }
// Initial messy prototype, queries load alot of not needed stuff. fn save_io(
// TODO: Refactor target: &str,
pub fn latest_dl(connection: &Database, limit: u32) -> Result<()> { resp: &mut reqwest::Response,
let pds = { content_lenght: Option<u64>,
let tempdb = connection.lock().unwrap(); ) -> Result<String> {
dbqueries::get_podcasts(&tempdb)? info!("Downloading into: {}", target);
let chunk_size = match content_lenght {
Some(x) => x as usize / 99,
None => 1024 as usize, // default chunk size
}; };
let _: Vec<_> = pds.iter() let out_file = format!("{}.part", target);
.map(|x| -> Result<()> { let mut writer = BufWriter::new(File::create(&out_file)?);
let mut eps = {
let tempdb = connection.lock().unwrap();
if limit == 0 {
dbqueries::get_pd_episodes(&tempdb, x)?
} else {
dbqueries::get_pd_episodes_limit(&tempdb, x, limit)?
}
};
let download_fold = get_download_folder(x.title())?; loop {
let mut buffer = vec![0; chunk_size];
// Download the episodes let bcount = resp.read(&mut buffer[..])?;
eps.iter_mut().for_each(|ep| { buffer.truncate(bcount);
let x = get_episode(connection, ep, &download_fold); if !buffer.is_empty() {
if let Err(err) = x { writer.write_all(buffer.as_slice())?;
error!("An Error occured while downloading an episode."); } else {
error!("Error: {}", err); break;
}; }
}); }
rename(out_file, target)?;
Ok(()) info!("Downloading of {} completed succesfully.", target);
}) Ok(target.to_string())
.collect();
Ok(())
} }
// TODO: Right unit test
pub fn get_download_folder(pd_title: &str) -> Result<String> { pub fn get_download_folder(pd_title: &str) -> Result<String> {
// It might be better to make it a hash of the title // It might be better to make it a hash of the title
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
@ -116,33 +91,26 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st
}; };
// FIXME: Unreliable and hacky way to extract the file extension from the url. // FIXME: Unreliable and hacky way to extract the file extension from the url.
// https://gitlab.gnome.org/alatiera/Hammond/issues/5
let ext = ep.uri().split('.').last().unwrap().to_owned(); let ext = ep.uri().split('.').last().unwrap().to_owned();
// Construct the download path. // Construct the download path.
// TODO: Check if its a valid path // TODO: Check if its a valid path
let dlpath = format!( let file_name = format!("/{}.{}", ep.title().unwrap().to_owned(), ext);
"{}/{}.{}",
download_folder,
ep.title().unwrap().to_owned(),
ext
);
// info!("Downloading {:?} into: {}", y.title(), dlpath);
let uri = ep.uri().to_owned(); let uri = ep.uri().to_owned();
let res = download_to(&dlpath, uri.as_str()); let res = download_to(download_folder, &file_name, uri.as_str());
if let Err(err) = res { if res.is_ok() {
error!("Something whent wrong while downloading."); // If download succedes set episode local_uri to dlpath.
error!("Error: {}", err); let dlpath = res.unwrap();
return Err(err); ep.set_local_uri(Some(&dlpath));
ep.save(connection)?;
Ok(())
} else { } else {
info!("Download of {} finished.", uri); error!("Something whent wrong while downloading.");
}; Err(res.unwrap_err())
}
// If download succedes set episode local_uri to dlpath.
ep.set_local_uri(Some(&dlpath));
ep.save(connection)?;
Ok(())
} }
// pub fn cache_image(pd: &Podcast) -> Option<String> { // pub fn cache_image(pd: &Podcast) -> Option<String> {
@ -154,7 +122,7 @@ pub fn cache_image(title: &str, image_uri: Option<&str>) -> Option<String> {
return None; return None;
} }
// FIXME: // FIXME: https://gitlab.gnome.org/alatiera/Hammond/issues/5
let ext = url.split('.').last().unwrap(); let ext = url.split('.').last().unwrap();
let download_fold = format!("{}{}", HAMMOND_CACHE.to_str().unwrap(), title); let download_fold = format!("{}{}", HAMMOND_CACHE.to_str().unwrap(), title);
@ -162,13 +130,16 @@ pub fn cache_image(title: &str, image_uri: Option<&str>) -> Option<String> {
.recursive(true) .recursive(true)
.create(&download_fold) .create(&download_fold)
.unwrap(); .unwrap();
let dlpath = format!("{}/{}.{}", download_fold, title, ext); let file_name = format!("cover.{}", ext);
// This will need rework once the #5 is completed.
let dlpath = format!("{}/{}", download_fold, file_name);
if Path::new(&dlpath).exists() { if Path::new(&dlpath).exists() {
return Some(dlpath); return Some(dlpath);
} }
if let Err(err) = download_to(&dlpath, url) { if let Err(err) = download_to(&download_fold, &file_name, url) {
error!("Failed to get feed image."); error!("Failed to get feed image.");
error!("Error: {}", err); error!("Error: {}", err);
return None; return None;
@ -179,3 +150,27 @@ pub fn cache_image(title: &str, image_uri: Option<&str>) -> Option<String> {
} }
None None
} }
#[cfg(test)]
mod tests {
use super::*;
use hammond_data::{DL_DIR, HAMMOND_CACHE};
#[test]
fn test_get_dl_folder() {
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
assert_eq!(get_download_folder("foo").unwrap(), foo_);
}
#[test]
fn test_cache_image() {
let img_path =
cache_image("New Rustacean", Some("http://newrustacean.coe/podcast.png")).unwrap();
let foo_ = format!(
"{}{}/cover.png",
HAMMOND_CACHE.to_str().unwrap(),
"New Rustacean"
);
assert_eq!(img_path, foo_);
}
}

View File

@ -7,6 +7,7 @@ extern crate hammond_data;
extern crate hyper; extern crate hyper;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate mime;
extern crate reqwest; extern crate reqwest;
extern crate rss; extern crate rss;

View File

@ -6,6 +6,8 @@ pub fn init() -> Result<(), Error> {
let res_bytes = include_bytes!("../resources/resources.gresource"); let res_bytes = include_bytes!("../resources/resources.gresource");
// Create Resource it will live as long the value lives. // Create Resource it will live as long the value lives.
// TODO: change it into Bytes::From_static once the fix lands
// https://bugzilla.gnome.org/show_bug.cgi?id=790030
let gbytes = Bytes::from(&res_bytes.as_ref()); let gbytes = Bytes::from(&res_bytes.as_ref());
let resource = Resource::new_from_data(&gbytes)?; let resource = Resource::new_from_data(&gbytes)?;
// let resource = Resource::new_from_data(&res_bytes.as_ref().into())?; // let resource = Resource::new_from_data(&res_bytes.as_ref().into())?;

View File

@ -46,7 +46,7 @@ fn create_flowbox_child(db: &Database, pd: &Podcast) -> gtk::FlowBoxChild {
pd_title.set_text(pd.title()); pd_title.set_text(pd.title());
let cover = get_pixbuf_from_path(pd.image_uri(), pd.title()); let cover = get_pixbuf_from_path(pd.title(), pd.image_uri());
if let Some(img) = cover { if let Some(img) = cover {
pd_cover.set_from_pixbuf(&img); pd_cover.set_from_pixbuf(&img);
}; };

View File

@ -40,7 +40,7 @@ pub fn podcast_widget(db: &Database, stack: &gtk::Stack, pd: &Podcast) -> gtk::B
buff.set_text(pd.description()); buff.set_text(pd.description());
} }
let img = get_pixbuf_from_path(pd.image_uri(), pd.title()); let img = get_pixbuf_from_path(pd.title(), pd.image_uri());
if let Some(i) = img { if let Some(i) = img {
cover.set_from_pixbuf(&i); cover.set_from_pixbuf(&i);
} }
@ -101,7 +101,7 @@ fn show_played_button(db: &Database, pd: &Podcast, played_button: &gtk::Button)
} }
} }
pub fn get_pixbuf_from_path(img_path: Option<&str>, pd_title: &str) -> Option<Pixbuf> { pub fn get_pixbuf_from_path(pd_title: &str, img_path: Option<&str>) -> Option<Pixbuf> {
let img_path = downloader::cache_image(pd_title, img_path); let img_path = downloader::cache_image(pd_title, img_path);
if let Some(i) = img_path { if let Some(i) = img_path {
Pixbuf::new_from_file_at_scale(&i, 256, 256, true).ok() Pixbuf::new_from_file_at_scale(&i, 256, 256, true).ok()
@ -127,3 +127,15 @@ pub fn update_podcast_widget(db: &Database, stack: &gtk::Stack, pd: &Podcast) {
stack.set_visible_child_name(&vis); stack.set_visible_child_name(&vis);
old.destroy(); old.destroy();
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_pixbuf_from_path() {
let pxbuf =
get_pixbuf_from_path("New Rustacean", Some("http://newrustacean.com/podcast.png"));
assert!(pxbuf.is_some());
}
}