Added some comments into the downloader module.
This commit is contained in:
parent
4bf84ed170
commit
1e45adc034
@ -14,19 +14,27 @@ use hammond_data::index_feed::Database;
|
|||||||
use hammond_data::models::{Episode, Podcast};
|
use hammond_data::models::{Episode, Podcast};
|
||||||
use hammond_data::{DL_DIR, HAMMOND_CACHE};
|
use hammond_data::{DL_DIR, HAMMOND_CACHE};
|
||||||
|
|
||||||
|
// TODO: Replace path that are of type &str with std::path.
|
||||||
|
// TODO: Have a convention/document absolute/relative paths, if they should end with / or not.
|
||||||
|
|
||||||
// Adapted from https://github.com/mattgathu/rget .
|
// Adapted from https://github.com/mattgathu/rget .
|
||||||
// I never wanted to write a custom downloader.
|
// I never wanted to write a custom downloader.
|
||||||
// Sorry to those who will have to work with that code.
|
// Sorry to those who will have to work with that code.
|
||||||
// Would much rather use a crate,
|
// Would much rather use a crate,
|
||||||
// or bindings for a lib like youtube-dl(python),
|
// or bindings for a lib like youtube-dl(python),
|
||||||
// But cant seem to find one.
|
// But cant seem to find one.
|
||||||
|
// TODO: Write unit-tests.
|
||||||
fn download_into(dir: &str, file_title: &str, url: &str) -> Result<String> {
|
fn download_into(dir: &str, file_title: &str, url: &str) -> Result<String> {
|
||||||
info!("GET request to: {}", url);
|
info!("GET request to: {}", url);
|
||||||
let client = reqwest::Client::builder().referer(false).build()?;
|
let client = reqwest::Client::builder().referer(false).build()?;
|
||||||
let mut resp = client.get(url).send()?;
|
let mut resp = client.get(url).send()?;
|
||||||
info!("Status Resp: {}", resp.status());
|
info!("Status Resp: {}", resp.status());
|
||||||
|
|
||||||
if resp.status().is_success() {
|
if !resp.status().is_success() {
|
||||||
|
// TODO: Return an error instead of panicking.
|
||||||
|
panic!("Bad request response.");
|
||||||
|
}
|
||||||
|
|
||||||
let headers = resp.headers().clone();
|
let headers = resp.headers().clone();
|
||||||
|
|
||||||
let ct_len = headers.get::<ContentLength>().map(|ct_len| **ct_len);
|
let ct_len = headers.get::<ContentLength>().map(|ct_len| **ct_len);
|
||||||
@ -35,6 +43,7 @@ fn download_into(dir: &str, file_title: &str, url: &str) -> Result<String> {
|
|||||||
ct_type.map(|x| info!("Content Type: {}", x));
|
ct_type.map(|x| info!("Content Type: {}", x));
|
||||||
|
|
||||||
// This could be prettier.
|
// This could be prettier.
|
||||||
|
// Determine the file extension from the http content-type header.
|
||||||
let ext = if let Some(t) = ct_type {
|
let ext = if let Some(t) = ct_type {
|
||||||
let mime = mime_guess::get_extensions(t.type_().as_ref(), t.subtype().as_ref());
|
let mime = mime_guess::get_extensions(t.type_().as_ref(), t.subtype().as_ref());
|
||||||
if let Some(m) = mime {
|
if let Some(m) = mime {
|
||||||
@ -63,19 +72,19 @@ fn download_into(dir: &str, file_title: &str, url: &str) -> Result<String> {
|
|||||||
rng.gen::<usize>()
|
rng.gen::<usize>()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Save requested content into the file.
|
||||||
save_io(&out_file, &mut resp, ct_len)?;
|
save_io(&out_file, &mut resp, ct_len)?;
|
||||||
|
|
||||||
// Construct the desired path.
|
// Construct the desired path.
|
||||||
let target = format!("{}/{}.{}", dir, file_title, ext);
|
let target = format!("{}/{}.{}", dir, file_title, ext);
|
||||||
// Rename/move the tempfile into a permanent place.
|
// Rename/move the tempfile into a permanent place upon success.
|
||||||
rename(out_file, &target)?;
|
rename(out_file, &target)?;
|
||||||
info!("Downloading of {} completed succesfully.", &target);
|
info!("Downloading of {} completed succesfully.", &target);
|
||||||
return Ok(target);
|
Ok(target)
|
||||||
}
|
|
||||||
// Ok(String::from(""))
|
|
||||||
panic!("Bad request response.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: Write unit-tests.
|
||||||
|
/// Handles the I/O of fetching a remote file and saving into a Buffer and A File.
|
||||||
fn save_io(file: &str, resp: &mut reqwest::Response, content_lenght: Option<u64>) -> Result<()> {
|
fn save_io(file: &str, resp: &mut reqwest::Response, content_lenght: Option<u64>) -> Result<()> {
|
||||||
info!("Downloading into: {}", file);
|
info!("Downloading into: {}", file);
|
||||||
let chunk_size = match content_lenght {
|
let chunk_size = match content_lenght {
|
||||||
@ -116,6 +125,7 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the path is not valid, then set it to None.
|
||||||
ep.set_local_uri(None);
|
ep.set_local_uri(None);
|
||||||
ep.save(connection)?;
|
ep.save(connection)?;
|
||||||
};
|
};
|
||||||
@ -134,7 +144,10 @@ pub fn get_episode(connection: &Database, ep: &mut Episode, download_folder: &st
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn cache_image(pd: &Podcast) -> Option<String> {
|
pub fn cache_image(pd: &Podcast) -> Option<String> {
|
||||||
if pd.image_uri().is_some() {
|
if pd.image_uri().is_none() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let url = pd.image_uri().unwrap().to_owned();
|
let url = pd.image_uri().unwrap().to_owned();
|
||||||
if url == "" {
|
if url == "" {
|
||||||
return None;
|
return None;
|
||||||
@ -170,14 +183,12 @@ pub fn cache_image(pd: &Podcast) -> Option<String> {
|
|||||||
let dlpath = download_into(&download_fold, "cover", &url);
|
let dlpath = download_into(&download_fold, "cover", &url);
|
||||||
if let Ok(path) = dlpath {
|
if let Ok(path) = dlpath {
|
||||||
info!("Cached img into: {}", &path);
|
info!("Cached img into: {}", &path);
|
||||||
return Some(path);
|
Some(path)
|
||||||
} else {
|
} else {
|
||||||
error!("Failed to get feed image.");
|
error!("Failed to get feed image.");
|
||||||
error!("Error: {}", dlpath.unwrap_err());
|
error!("Error: {}", dlpath.unwrap_err());
|
||||||
return None;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
None
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user