Cleaned up a bit the downloader moduel and added some unit tests.

This commit is contained in:
Jordan Petridis 2017-11-08 20:22:07 +02:00
parent 4e6ed416ee
commit 8c5cdf75a7
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
7 changed files with 96 additions and 84 deletions

1
Cargo.lock generated
View File

@ -568,6 +568,7 @@ dependencies = [
"hammond-data 0.1.0",
"hyper 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"reqwest 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rss 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]

View File

@ -8,6 +8,7 @@ workspace = "../"
hammond-data = {path = "../hammond-data"}
error-chain = "0.11"
log = "0.3"
mime = "0.3"
reqwest = "0.8"
hyper = "0.11"
diesel = { version = "0.16", features = ["sqlite"] }

View File

@ -1,13 +1,14 @@
use reqwest;
use hyper::header::*;
// use mime::Mime;
use std::fs::{rename, DirBuilder, File};
use std::io::{BufWriter, Read, Write};
use std::path::Path;
// use std::str::FromStr;
use errors::*;
use hammond_data::index_feed::Database;
use hammond_data::dbqueries;
use hammond_data::models::Episode;
use hammond_data::{DL_DIR, HAMMOND_CACHE};
@ -17,7 +18,7 @@ use hammond_data::{DL_DIR, HAMMOND_CACHE};
// Would much rather use a crate,
// or bindings for a lib like youtube-dl(python),
// But cant seem to find one.
pub fn download_to(target: &str, url: &str) -> Result<()> {
pub fn download_to(dir: &str, filename: &str, url: &str) -> Result<String> {
info!("GET request to: {}", url);
let client = reqwest::Client::builder().referer(false).build()?;
let mut resp = client.get(url).send()?;
@ -27,73 +28,47 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
let headers = resp.headers().clone();
let ct_len = headers.get::<ContentLength>().map(|ct_len| **ct_len);
let ct_type = headers.get::<ContentType>().unwrap();
let ct_type = headers.get::<ContentType>();
ct_len.map(|x| info!("File Lenght: {}", x));
info!("Content Type: {:?}", ct_type);
ct_type.map(|x| info!("Content Type: {}", x));
info!("Save destination: {}", target);
let chunk_size = match ct_len {
Some(x) => x as usize / 99,
None => 1024 as usize, // default chunk size
};
let out_file = format!("{}.part", target);
let mut writer = BufWriter::new(File::create(&out_file)?);
loop {
let mut buffer = vec![0; chunk_size];
let bcount = resp.read(&mut buffer[..]).unwrap();
buffer.truncate(bcount);
if !buffer.is_empty() {
writer.write_all(buffer.as_slice()).unwrap();
} else {
break;
}
}
rename(out_file, target)?;
let target = format!("{}/{}", dir, filename);
// let target = format!("{}{}",dir, filename, ext);
return save_io(&target, &mut resp, ct_len);
}
Ok(())
// Ok(String::from(""))
panic!("foo");
}
// Initial messy prototype, queries load alot of not needed stuff.
// TODO: Refactor
pub fn latest_dl(connection: &Database, limit: u32) -> Result<()> {
let pds = {
let tempdb = connection.lock().unwrap();
dbqueries::get_podcasts(&tempdb)?
fn save_io(
target: &str,
resp: &mut reqwest::Response,
content_lenght: Option<u64>,
) -> Result<String> {
info!("Downloading into: {}", target);
let chunk_size = match content_lenght {
Some(x) => x as usize / 99,
None => 1024 as usize, // default chunk size
};
let _: Vec<_> = pds.iter()
.map(|x| -> Result<()> {
let mut eps = {
let tempdb = connection.lock().unwrap();
if limit == 0 {
dbqueries::get_pd_episodes(&tempdb, x)?
} else {
dbqueries::get_pd_episodes_limit(&tempdb, x, limit)?
}
};
let out_file = format!("{}.part", target);
let mut writer = BufWriter::new(File::create(&out_file)?);
let download_fold = get_download_folder(x.title())?;
// Download the episodes
eps.iter_mut().for_each(|ep| {
let x = get_episode(connection, ep, &download_fold);
if let Err(err) = x {
error!("An Error occured while downloading an episode.");
error!("Error: {}", err);
};
});
Ok(())
})
.collect();
Ok(())
loop {
let mut buffer = vec![0; chunk_size];
let bcount = resp.read(&mut buffer[..])?;
buffer.truncate(bcount);
if !buffer.is_empty() {
writer.write_all(buffer.as_slice())?;
} else {
break;
}
}
rename(out_file, target)?;
info!("Downloading of {} completed succesfully.", target);
Ok(target.to_string())
}
// TODO: Right unit test
pub fn get_download_folder(pd_title: &str) -> Result<String> {
// It might be better to make it a hash of the title
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
@ -116,33 +91,26 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st
};
// FIXME: Unreliable and hacky way to extract the file extension from the url.
// https://gitlab.gnome.org/alatiera/Hammond/issues/5
let ext = ep.uri().split('.').last().unwrap().to_owned();
// Construct the download path.
// TODO: Check if its a valid path
let dlpath = format!(
"{}/{}.{}",
download_folder,
ep.title().unwrap().to_owned(),
ext
);
// info!("Downloading {:?} into: {}", y.title(), dlpath);
let file_name = format!("/{}.{}", ep.title().unwrap().to_owned(), ext);
let uri = ep.uri().to_owned();
let res = download_to(&dlpath, uri.as_str());
let res = download_to(download_folder, &file_name, uri.as_str());
if let Err(err) = res {
error!("Something whent wrong while downloading.");
error!("Error: {}", err);
return Err(err);
if res.is_ok() {
// If download succedes set episode local_uri to dlpath.
let dlpath = res.unwrap();
ep.set_local_uri(Some(&dlpath));
ep.save(connection)?;
Ok(())
} else {
info!("Download of {} finished.", uri);
};
// If download succedes set episode local_uri to dlpath.
ep.set_local_uri(Some(&dlpath));
ep.save(connection)?;
Ok(())
error!("Something whent wrong while downloading.");
Err(res.unwrap_err())
}
}
// pub fn cache_image(pd: &Podcast) -> Option<String> {
@ -154,7 +122,7 @@ pub fn cache_image(title: &str, image_uri: Option<&str>) -> Option<String> {
return None;
}
// FIXME:
// FIXME: https://gitlab.gnome.org/alatiera/Hammond/issues/5
let ext = url.split('.').last().unwrap();
let download_fold = format!("{}{}", HAMMOND_CACHE.to_str().unwrap(), title);
@ -162,13 +130,16 @@ pub fn cache_image(title: &str, image_uri: Option<&str>) -> Option<String> {
.recursive(true)
.create(&download_fold)
.unwrap();
let dlpath = format!("{}/{}.{}", download_fold, title, ext);
let file_name = format!("cover.{}", ext);
// This will need rework once the #5 is completed.
let dlpath = format!("{}/{}", download_fold, file_name);
if Path::new(&dlpath).exists() {
return Some(dlpath);
}
if let Err(err) = download_to(&dlpath, url) {
if let Err(err) = download_to(&download_fold, &file_name, url) {
error!("Failed to get feed image.");
error!("Error: {}", err);
return None;
@ -179,3 +150,27 @@ pub fn cache_image(title: &str, image_uri: Option<&str>) -> Option<String> {
}
None
}
#[cfg(test)]
mod tests {
use super::*;
use hammond_data::{DL_DIR, HAMMOND_CACHE};
#[test]
fn test_get_dl_folder() {
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
assert_eq!(get_download_folder("foo").unwrap(), foo_);
}
#[test]
fn test_cache_image() {
let img_path =
cache_image("New Rustacean", Some("http://newrustacean.coe/podcast.png")).unwrap();
let foo_ = format!(
"{}{}/cover.png",
HAMMOND_CACHE.to_str().unwrap(),
"New Rustacean"
);
assert_eq!(img_path, foo_);
}
}

View File

@ -7,6 +7,7 @@ extern crate hammond_data;
extern crate hyper;
#[macro_use]
extern crate log;
extern crate mime;
extern crate reqwest;
extern crate rss;

View File

@ -6,6 +6,8 @@ pub fn init() -> Result<(), Error> {
let res_bytes = include_bytes!("../resources/resources.gresource");
// Create Resource it will live as long the value lives.
// TODO: change it into Bytes::From_static once the fix lands
// https://bugzilla.gnome.org/show_bug.cgi?id=790030
let gbytes = Bytes::from(&res_bytes.as_ref());
let resource = Resource::new_from_data(&gbytes)?;
// let resource = Resource::new_from_data(&res_bytes.as_ref().into())?;

View File

@ -46,7 +46,7 @@ fn create_flowbox_child(db: &Database, pd: &Podcast) -> gtk::FlowBoxChild {
pd_title.set_text(pd.title());
let cover = get_pixbuf_from_path(pd.image_uri(), pd.title());
let cover = get_pixbuf_from_path(pd.title(), pd.image_uri());
if let Some(img) = cover {
pd_cover.set_from_pixbuf(&img);
};

View File

@ -40,7 +40,7 @@ pub fn podcast_widget(db: &Database, stack: &gtk::Stack, pd: &Podcast) -> gtk::B
buff.set_text(pd.description());
}
let img = get_pixbuf_from_path(pd.image_uri(), pd.title());
let img = get_pixbuf_from_path(pd.title(), pd.image_uri());
if let Some(i) = img {
cover.set_from_pixbuf(&i);
}
@ -101,7 +101,7 @@ fn show_played_button(db: &Database, pd: &Podcast, played_button: &gtk::Button)
}
}
pub fn get_pixbuf_from_path(img_path: Option<&str>, pd_title: &str) -> Option<Pixbuf> {
pub fn get_pixbuf_from_path(pd_title: &str, img_path: Option<&str>) -> Option<Pixbuf> {
let img_path = downloader::cache_image(pd_title, img_path);
if let Some(i) = img_path {
Pixbuf::new_from_file_at_scale(&i, 256, 256, true).ok()
@ -127,3 +127,15 @@ pub fn update_podcast_widget(db: &Database, stack: &gtk::Stack, pd: &Podcast) {
stack.set_visible_child_name(&vis);
old.destroy();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_pixbuf_from_path() {
let pxbuf =
get_pixbuf_from_path("New Rustacean", Some("http://newrustacean.com/podcast.png"));
assert!(pxbuf.is_some());
}
}