From 828a12054d471cdb7a705b1bdeb6fed47a4a86f7 Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Tue, 3 Oct 2017 13:49:32 +0300 Subject: [PATCH] Initial local_uri suppoer for the downloader. --- src/downloader.rs | 36 +++++++++++++++++++++++++++++++----- src/feedparser.rs | 6 +----- 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/src/downloader.rs b/src/downloader.rs index 98f3131..08ec86f 100644 --- a/src/downloader.rs +++ b/src/downloader.rs @@ -4,11 +4,17 @@ use diesel::prelude::*; use std::fs::{DirBuilder, File}; use std::io::{BufWriter, Read, Write}; +use std::path::Path; use errors::*; use dbqueries; // Adapted from https://github.com/mattgathu/rget . +/// I never wanted to write a custom downloader. +/// Sorry to those who will have to work with that code. +/// Would much rather use a crate, +/// or bindings for a lib like youtube-dl(python), +/// But cant seem to find one. pub fn download_to(target: &str, url: &str) -> Result<()> { let mut resp = reqwest::get(url)?; info!("GET request to: {}", url); @@ -32,7 +38,6 @@ pub fn download_to(target: &str, url: &str) -> Result<()> { let mut writer = BufWriter::new(File::create(target)?); - // FIXME: not running loop { let mut buffer = vec![0; chunk_size]; let bcount = resp.read(&mut buffer[..]).unwrap(); @@ -49,11 +54,15 @@ pub fn download_to(target: &str, url: &str) -> Result<()> { // Initial messy prototype, queries load alot of not needed stuff. pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { + use models::Episode; + let pds = dbqueries::get_podcasts(connection)?; pds.iter() + // TODO when for_each reaches stable: + // Remove all the ugly folds(_) and replace map() with for_each(). .map(|x| -> Result<()> { - let eps; + let mut eps; if limit == 0 { eps = dbqueries::get_pd_episodes(connection, &x)?; } else { @@ -67,12 +76,29 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { DirBuilder::new().recursive(true).create(&dl_fold).unwrap(); // Download the episodes - eps.iter() + eps.iter_mut() .map(|y| -> Result<()> { - let ext = y.uri().split(".").last().unwrap(); - let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap(), ext); + // Check if its alrdy downloaded + if let Some(foo) = y.local_uri().clone(){ + if Path::new(foo).exists() { + return Ok(()); + } + y.save_changes::(connection)?; + () + }; + + // Unreliable and hacky way to extract the file extension from the url. + let ext = y.uri().split(".").last().unwrap().to_owned(); + + // Construct the download path. + let dlpath = format!("{}/{}.{}", dl_fold, y.title().unwrap().to_owned(), ext); info!("Downloading {:?} into: {}", y.title(), dlpath); + // TODO: implement .part files download_to(&dlpath, y.uri())?; + + // If download succedes set episode local_uri to dlpath. + y.set_local_uri(Some(&dlpath)); + y.save_changes::(connection)?; Ok(()) }) .fold((), |(), _| ()); diff --git a/src/feedparser.rs b/src/feedparser.rs index 73d42d2..ed080ee 100644 --- a/src/feedparser.rs +++ b/src/feedparser.rs @@ -24,17 +24,13 @@ pub fn parse_episode<'a>(item: &'a Item, parent_id: i32) -> Result