Replaced some map/fold with for_each now that it hit stable!

This commit is contained in:
Jordan Petridis 2017-10-13 02:49:14 +03:00
parent b176f334c3
commit e39a89d63d
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
2 changed files with 22 additions and 28 deletions

View File

@ -262,11 +262,9 @@ mod tests {
"http://feeds.feedburner.com/linuxunplugged", "http://feeds.feedburner.com/linuxunplugged",
]; ];
inpt.iter() inpt.iter().for_each(|feed| {
.map(|feed| { index_source(&db, &NewSource::new_with_uri(feed)).unwrap()
index_source(&db, &NewSource::new_with_uri(feed)).unwrap() });
})
.fold((), |(), _| ());
index_loop(db, true).unwrap(); index_loop(db, true).unwrap();
@ -303,22 +301,20 @@ mod tests {
), ),
]; ];
urls.iter() urls.iter().for_each(|&(path, url)| {
.map(|&(path, url)| { let tempdb = m.lock().unwrap();
let tempdb = m.lock().unwrap(); // Create and insert a Source into db
// Create and insert a Source into db let s = insert_return_source(&tempdb, url).unwrap();
let s = insert_return_source(&tempdb, url).unwrap(); drop(tempdb);
drop(tempdb);
// open the xml file // open the xml file
let feed = fs::File::open(path).unwrap(); let feed = fs::File::open(path).unwrap();
// parse it into a channel // parse it into a channel
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap(); let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
// Index the channel // Index the channel
complete_index(m.clone(), &chan, &s).unwrap(); complete_index(m.clone(), &chan, &s).unwrap();
}) });
.fold((), |(), _| ());
// Assert the index rows equal the controlled results // Assert the index rows equal the controlled results
let tempdb = m.lock().unwrap(); let tempdb = m.lock().unwrap();

View File

@ -55,12 +55,12 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
} }
// Initial messy prototype, queries load alot of not needed stuff. // Initial messy prototype, queries load alot of not needed stuff.
// TODO: Refactor
pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> { pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
let pds = dbqueries::get_podcasts(connection)?; let pds = dbqueries::get_podcasts(connection)?;
pds.iter() let _: Vec<_> = pds.iter()
// TODO when for_each reaches stable: // This could be for_each instead of map.
// Remove all the ugly folds(_) and replace map() with for_each().
.map(|x| -> Result<()> { .map(|x| -> Result<()> {
let mut eps = if limit == 0 { let mut eps = if limit == 0 {
dbqueries::get_pd_episodes(connection, x)? dbqueries::get_pd_episodes(connection, x)?
@ -75,17 +75,15 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
DirBuilder::new().recursive(true).create(&dl_fold).unwrap(); DirBuilder::new().recursive(true).create(&dl_fold).unwrap();
// Download the episodes // Download the episodes
eps.iter_mut() let _ :Vec<_>= eps.iter_mut()
.map(|y| -> Result<()> { .map(|y| -> Result<()> {
// Check if its alrdy downloaded // Check if its alrdy downloaded
if y.local_uri().is_some() { if y.local_uri().is_some() {
// Not idiomatic but I am still fighting the borrow-checker. if Path::new(y.local_uri().unwrap()).exists() {
if Path::new(y.local_uri().unwrap().to_owned().as_str()).exists() {
return Ok(()); return Ok(());
} }
y.set_local_uri(None); y.set_local_uri(None);
y.save_changes::<Episode>(connection)?; y.save_changes::<Episode>(connection)?;
()
}; };
// Unreliable and hacky way to extract the file extension from the url. // Unreliable and hacky way to extract the file extension from the url.
@ -101,11 +99,11 @@ pub fn latest_dl(connection: &SqliteConnection, limit: u32) -> Result<()> {
y.save_changes::<Episode>(connection)?; y.save_changes::<Episode>(connection)?;
Ok(()) Ok(())
}) })
.fold((), |(), _| ()); .collect();
Ok(()) Ok(())
}) })
.fold((), |(), _| ()); .collect();
Ok(()) Ok(())
} }