Initial local_uri suppoer for the downloader.

This commit is contained in:
Jordan Petridis 2017-10-03 13:49:32 +03:00
parent d5c4b13b4d
commit 828a12054d
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
2 changed files with 32 additions and 10 deletions

View File

@ -4,11 +4,17 @@ use diesel::prelude::*;
use std::fs::{DirBuilder, File}; use std::fs::{DirBuilder, File};
use std::io::{BufWriter, Read, Write}; use std::io::{BufWriter, Read, Write};
use std::path::Path;
use errors::*; use errors::*;
use dbqueries; use dbqueries;
// Adapted from https://github.com/mattgathu/rget . // Adapted from https://github.com/mattgathu/rget .
/// I never wanted to write a custom downloader.
/// Sorry to those who will have to work with that code.
/// Would much rather use a crate,
/// or bindings for a lib like youtube-dl(python),
/// But cant seem to find one.
pub fn download_to(target: &str, url: &str) -> Result<()> { pub fn download_to(target: &str, url: &str) -> Result<()> {
let mut resp = reqwest::get(url)?; let mut resp = reqwest::get(url)?;
info!("GET request to: {}", url); info!("GET request to: {}", url);
@ -32,7 +38,6 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
let mut writer = BufWriter::new(File::create(target)?); let mut writer = BufWriter::new(File::create(target)?);
// FIXME: not running
loop { loop {
let mut buffer = vec![0; chunk_size]; let mut buffer = vec![0; chunk_size];
let bcount = resp.read(&mut buffer[..]).unwrap(); let bcount = resp.read(&mut buffer[..]).unwrap();
@ -49,11 +54,15 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
// Initial messy prototype, queries load alot of not needed stuff. // Initial messy prototype, queries load alot of not needed stuff.
pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
use models::Episode;
let pds = dbqueries::get_podcasts(connection)?; let pds = dbqueries::get_podcasts(connection)?;
pds.iter() pds.iter()
// TODO when for_each reaches stable:
// Remove all the ugly folds(_) and replace map() with for_each().
.map(|x| -> Result<()> { .map(|x| -> Result<()> {
let eps; let mut eps;
if limit == 0 { if limit == 0 {
eps = dbqueries::get_pd_episodes(connection, &x)?; eps = dbqueries::get_pd_episodes(connection, &x)?;
} else { } else {
@ -67,12 +76,29 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
DirBuilder::new().recursive(true).create(&dl_fold).unwrap(); DirBuilder::new().recursive(true).create(&dl_fold).unwrap();
// Download the episodes // Download the episodes
eps.iter() eps.iter_mut()
.map(|y| -> Result<()> { .map(|y| -> Result<()> {
let ext = y.uri().split(".").last().unwrap(); // Check if its alrdy downloaded
let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap(), ext); if let Some(foo) = y.local_uri().clone(){
if Path::new(foo).exists() {
return Ok(());
}
y.save_changes::<Episode>(connection)?;
()
};
// Unreliable and hacky way to extract the file extension from the url.
let ext = y.uri().split(".").last().unwrap().to_owned();
// Construct the download path.
let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap().to_owned(), ext);
info!("Downloading {:?} into: {}", y.title(), dlpath); info!("Downloading {:?} into: {}", y.title(), dlpath);
// TODO: implement .part files
download_to(&dlpath, y.uri())?; download_to(&dlpath, y.uri())?;
// If download succedes set episode local_uri to dlpath.
y.set_local_uri(Some(&dlpath));
y.save_changes::<Episode>(connection)?;
Ok(()) Ok(())
}) })
.fold((), |(), _| ()); .fold((), |(), _| ());

View File

@ -24,17 +24,13 @@ pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result<models::NewEp
let title = item.title(); let title = item.title();
let description = item.description(); let description = item.description();
let guid = item.guid().map(|x| x.value()); let guid = item.guid().map(|x| x.value());
let local_uri = None;
let mut uri = item.enclosure().map(|x| x.url()); let mut uri = item.enclosure().map(|x| x.url());
if uri == None { if uri == None {
uri = item.link(); uri = item.link();
} }
// FIXME:
// probably needs to be removed from NewEpisode,
// and have seperate logic to handle local_files
let local_uri = None;
let date = parse_from_rfc2822_with_fallback( let date = parse_from_rfc2822_with_fallback(
// Default to rfc2822 represantation of epoch 0. // Default to rfc2822 represantation of epoch 0.
item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"), item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"),