Initial local_uri suppoer for the downloader.
This commit is contained in:
parent
d5c4b13b4d
commit
828a12054d
@ -4,11 +4,17 @@ use diesel::prelude::*;
|
||||
|
||||
use std::fs::{DirBuilder, File};
|
||||
use std::io::{BufWriter, Read, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use errors::*;
|
||||
use dbqueries;
|
||||
|
||||
// Adapted from https://github.com/mattgathu/rget .
|
||||
/// I never wanted to write a custom downloader.
|
||||
/// Sorry to those who will have to work with that code.
|
||||
/// Would much rather use a crate,
|
||||
/// or bindings for a lib like youtube-dl(python),
|
||||
/// But cant seem to find one.
|
||||
pub fn download_to(target: &str, url: &str) -> Result<()> {
|
||||
let mut resp = reqwest::get(url)?;
|
||||
info!("GET request to: {}", url);
|
||||
@ -32,7 +38,6 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
|
||||
|
||||
let mut writer = BufWriter::new(File::create(target)?);
|
||||
|
||||
// FIXME: not running
|
||||
loop {
|
||||
let mut buffer = vec![0; chunk_size];
|
||||
let bcount = resp.read(&mut buffer[..]).unwrap();
|
||||
@ -49,11 +54,15 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
|
||||
|
||||
// Initial messy prototype, queries load alot of not needed stuff.
|
||||
pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
|
||||
use models::Episode;
|
||||
|
||||
let pds = dbqueries::get_podcasts(connection)?;
|
||||
|
||||
pds.iter()
|
||||
// TODO when for_each reaches stable:
|
||||
// Remove all the ugly folds(_) and replace map() with for_each().
|
||||
.map(|x| -> Result<()> {
|
||||
let eps;
|
||||
let mut eps;
|
||||
if limit == 0 {
|
||||
eps = dbqueries::get_pd_episodes(connection, &x)?;
|
||||
} else {
|
||||
@ -67,12 +76,29 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
|
||||
DirBuilder::new().recursive(true).create(&dl_fold).unwrap();
|
||||
|
||||
// Download the episodes
|
||||
eps.iter()
|
||||
eps.iter_mut()
|
||||
.map(|y| -> Result<()> {
|
||||
let ext = y.uri().split(".").last().unwrap();
|
||||
let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap(), ext);
|
||||
// Check if its alrdy downloaded
|
||||
if let Some(foo) = y.local_uri().clone(){
|
||||
if Path::new(foo).exists() {
|
||||
return Ok(());
|
||||
}
|
||||
y.save_changes::<Episode>(connection)?;
|
||||
()
|
||||
};
|
||||
|
||||
// Unreliable and hacky way to extract the file extension from the url.
|
||||
let ext = y.uri().split(".").last().unwrap().to_owned();
|
||||
|
||||
// Construct the download path.
|
||||
let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap().to_owned(), ext);
|
||||
info!("Downloading {:?} into: {}", y.title(), dlpath);
|
||||
// TODO: implement .part files
|
||||
download_to(&dlpath, y.uri())?;
|
||||
|
||||
// If download succedes set episode local_uri to dlpath.
|
||||
y.set_local_uri(Some(&dlpath));
|
||||
y.save_changes::<Episode>(connection)?;
|
||||
Ok(())
|
||||
})
|
||||
.fold((), |(), _| ());
|
||||
|
||||
@ -24,17 +24,13 @@ pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result<models::NewEp
|
||||
let title = item.title();
|
||||
let description = item.description();
|
||||
let guid = item.guid().map(|x| x.value());
|
||||
let local_uri = None;
|
||||
|
||||
let mut uri = item.enclosure().map(|x| x.url());
|
||||
if uri == None {
|
||||
uri = item.link();
|
||||
}
|
||||
|
||||
// FIXME:
|
||||
// probably needs to be removed from NewEpisode,
|
||||
// and have seperate logic to handle local_files
|
||||
let local_uri = None;
|
||||
|
||||
let date = parse_from_rfc2822_with_fallback(
|
||||
// Default to rfc2822 represantation of epoch 0.
|
||||
item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"),
|
||||
|
||||
Loading…
Reference in New Issue
Block a user