Refactored refresh_feed.
This commit is contained in:
parent
7a1b272d9e
commit
a9dec8dbe8
@ -90,9 +90,9 @@ fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Epis
|
||||
}
|
||||
|
||||
pub fn full_index_loop(db: &Database) -> Result<()> {
|
||||
let mut f = fetch_feeds(db)?;
|
||||
let mut f = fetch_all_feeds(db)?;
|
||||
|
||||
index_feed(&db, &mut f);
|
||||
index_feed(db, &mut f);
|
||||
info!("Indexing done.");
|
||||
Ok(())
|
||||
}
|
||||
@ -166,12 +166,17 @@ fn index_channel_items(db: &Database, it: &[rss::Item], pd: &Podcast) {
|
||||
}
|
||||
|
||||
// Maybe this can be refactored into an Iterator for lazy evaluation.
|
||||
pub fn fetch_feeds(db: &Database) -> Result<Vec<Feed>> {
|
||||
pub fn fetch_all_feeds(db: &Database) -> Result<Vec<Feed>> {
|
||||
let mut feeds = {
|
||||
let conn = db.lock().unwrap();
|
||||
dbqueries::get_sources(&conn)?
|
||||
};
|
||||
|
||||
let results = fetch_feeds(db, &mut feeds);
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
pub fn fetch_feeds(db: &Database, feeds: &mut [Source]) -> Vec<Feed> {
|
||||
let results: Vec<Feed> = feeds
|
||||
.par_iter_mut()
|
||||
.filter_map(|x| {
|
||||
@ -186,7 +191,7 @@ pub fn fetch_feeds(db: &Database) -> Result<Vec<Feed>> {
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(results)
|
||||
results
|
||||
}
|
||||
|
||||
pub fn refresh_source(db: &Database, feed: &mut Source) -> Result<Feed> {
|
||||
|
||||
@ -64,7 +64,7 @@ fn on_add_bttn_clicked(db: &Database, stack: >k::Stack, url: &str) {
|
||||
|
||||
if let Ok(s) = source {
|
||||
// update the db
|
||||
utils::refresh_feed(db, stack, Some(Box::new(vec![s])));
|
||||
utils::refresh_feed(db, stack, Some(vec![s]));
|
||||
} else {
|
||||
error!("Expected Error, feed probably already exists.");
|
||||
error!("Error: {:?}", source.unwrap_err());
|
||||
|
||||
@ -12,8 +12,9 @@ extern crate hammond_downloader;
|
||||
extern crate log;
|
||||
extern crate loggerv;
|
||||
extern crate open;
|
||||
extern crate rayon;
|
||||
// extern crate rayon;
|
||||
|
||||
// use rayon::prelude::*;
|
||||
use log::LogLevel;
|
||||
use hammond_data::dbcheckup;
|
||||
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
use glib;
|
||||
use gtk;
|
||||
use rayon::prelude::*;
|
||||
|
||||
use hammond_data::index_feed;
|
||||
use hammond_data::models::Source;
|
||||
@ -18,26 +17,24 @@ thread_local!(
|
||||
gtk::Stack,
|
||||
Receiver<bool>)>> = RefCell::new(None));
|
||||
|
||||
pub fn refresh_feed(db: &Database, stack: >k::Stack, source: Option<Box<Vec<Source>>>) {
|
||||
/// Update the rss feed(s) originating from `Source`.
|
||||
/// If `source` is None, Fetches all the `Source` entries in the database and updates them.
|
||||
/// When It's done,it queues up a `podcast_view` refresh.
|
||||
pub fn refresh_feed(db: &Database, stack: >k::Stack, source: Option<Vec<Source>>) {
|
||||
// Create a async channel.
|
||||
let (sender, receiver) = channel();
|
||||
|
||||
// Pass the desired arguments into the Local Thread Storage.
|
||||
GLOBAL.with(clone!(db, stack => move |global| {
|
||||
*global.borrow_mut() = Some((db, stack, receiver));
|
||||
}));
|
||||
|
||||
// TODO: add timeout option and error reporting.
|
||||
thread::spawn(clone!(db => move || {
|
||||
let feeds = {
|
||||
if let Some(mut boxed_vec) = source {
|
||||
let f = boxed_vec
|
||||
.par_iter_mut()
|
||||
.filter_map(|mut s| {
|
||||
index_feed::refresh_source(&db, &mut s).ok()
|
||||
})
|
||||
.collect();
|
||||
Ok(f)
|
||||
if let Some(mut vec) = source {
|
||||
Ok(index_feed::fetch_feeds(&db, &mut vec))
|
||||
} else {
|
||||
index_feed::fetch_feeds(&db)
|
||||
index_feed::fetch_all_feeds(&db)
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user