Changed arguments Arc<T> to bepassed by value, clone on arc just copies the refference anyway.
This commit is contained in:
parent
7b0a8f0e25
commit
2077215491
@ -51,7 +51,7 @@ fn run() -> Result<()> {
|
|||||||
if args.up {
|
if args.up {
|
||||||
let db = hammond_data::establish_connection();
|
let db = hammond_data::establish_connection();
|
||||||
let db = Arc::new(Mutex::new(db));
|
let db = Arc::new(Mutex::new(db));
|
||||||
index_feed::index_loop(&db.clone(), false)?;
|
index_feed::index_loop(db.clone(), false)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.dl >= 0 {
|
if args.dl >= 0 {
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
||||||
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel;
|
use diesel;
|
||||||
@ -81,12 +82,12 @@ fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Epis
|
|||||||
Ok(dbqueries::load_episode(con, ep.uri.unwrap())?)
|
Ok(dbqueries::load_episode(con, ep.uri.unwrap())?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_loop(db: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
|
pub fn index_loop(db: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
|
||||||
let mut f = fetch_feeds(db, force)?;
|
let mut f = fetch_feeds(db.clone(), force)?;
|
||||||
|
|
||||||
f.par_iter_mut()
|
f.par_iter_mut()
|
||||||
.for_each(|&mut Feed(ref mut req, ref source)| {
|
.for_each(|&mut Feed(ref mut req, ref source)| {
|
||||||
let e = complete_index_from_source(req, source, db);
|
let e = complete_index_from_source(req, source, db.clone());
|
||||||
if e.is_err() {
|
if e.is_err() {
|
||||||
error!("Error While trying to update the database.");
|
error!("Error While trying to update the database.");
|
||||||
error!("Error msg: {}", e.unwrap_err());
|
error!("Error msg: {}", e.unwrap_err());
|
||||||
@ -99,7 +100,7 @@ pub fn index_loop(db: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()>
|
|||||||
pub fn complete_index_from_source(
|
pub fn complete_index_from_source(
|
||||||
req: &mut reqwest::Response,
|
req: &mut reqwest::Response,
|
||||||
source: &Source,
|
source: &Source,
|
||||||
mutex: &Arc<Mutex<SqliteConnection>>,
|
mutex: Arc<Mutex<SqliteConnection>>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
@ -114,13 +115,15 @@ pub fn complete_index_from_source(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn complete_index(
|
fn complete_index(
|
||||||
connection: &Arc<Mutex<SqliteConnection>>,
|
connection: Arc<Mutex<SqliteConnection>>,
|
||||||
chan: &rss::Channel,
|
chan: &rss::Channel,
|
||||||
parent: &Source,
|
parent: &Source,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let tempdb = connection.lock().unwrap();
|
let pd = {
|
||||||
let pd = index_channel(&tempdb, chan, parent)?;
|
let db = connection.clone();
|
||||||
drop(tempdb);
|
let db = db.lock().unwrap();
|
||||||
|
index_channel(&db, chan, parent)?
|
||||||
|
};
|
||||||
|
|
||||||
index_channel_items(connection, chan.items(), &pd);
|
index_channel_items(connection, chan.items(), &pd);
|
||||||
|
|
||||||
@ -134,11 +137,12 @@ fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) ->
|
|||||||
Ok(pd)
|
Ok(pd)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_channel_items(connection: &Arc<Mutex<SqliteConnection>>, it: &[rss::Item], pd: &Podcast) {
|
fn index_channel_items(connection: Arc<Mutex<SqliteConnection>>, it: &[rss::Item], pd: &Podcast) {
|
||||||
it.par_iter()
|
it.par_iter()
|
||||||
.map(|x| feedparser::parse_episode(x, pd.id()))
|
.map(|x| feedparser::parse_episode(x, pd.id()))
|
||||||
.for_each(|x| {
|
.for_each(|x| {
|
||||||
let db = connection.lock().unwrap();
|
let db = connection.clone();
|
||||||
|
let db = db.lock().unwrap();
|
||||||
let e = index_episode(&db, &x);
|
let e = index_episode(&db, &x);
|
||||||
if let Err(err) = e {
|
if let Err(err) = e {
|
||||||
error!("Failed to index episode: {:?}.", x);
|
error!("Failed to index episode: {:?}.", x);
|
||||||
@ -148,7 +152,7 @@ fn index_channel_items(connection: &Arc<Mutex<SqliteConnection>>, it: &[rss::Ite
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Maybe this can be refactored into an Iterator for lazy evaluation.
|
// Maybe this can be refactored into an Iterator for lazy evaluation.
|
||||||
pub fn fetch_feeds(connection: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<Vec<Feed>> {
|
pub fn fetch_feeds(connection: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<Vec<Feed>> {
|
||||||
let tempdb = connection.lock().unwrap();
|
let tempdb = connection.lock().unwrap();
|
||||||
let mut feeds = dbqueries::get_sources(&tempdb)?;
|
let mut feeds = dbqueries::get_sources(&tempdb)?;
|
||||||
drop(tempdb);
|
drop(tempdb);
|
||||||
@ -268,10 +272,10 @@ mod tests {
|
|||||||
index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap()
|
index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap()
|
||||||
});
|
});
|
||||||
|
|
||||||
index_loop(&db, true).unwrap();
|
index_loop(db.clone(), true).unwrap();
|
||||||
|
|
||||||
// Run again to cover Unique constrains erros.
|
// Run again to cover Unique constrains erros.
|
||||||
index_loop(&db, true).unwrap();
|
index_loop(db.clone(), true).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -312,7 +316,7 @@ mod tests {
|
|||||||
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
||||||
|
|
||||||
// Index the channel
|
// Index the channel
|
||||||
complete_index(&m, &chan, &s).unwrap();
|
complete_index(m.clone(), &chan, &s).unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Assert the index rows equal the controlled results
|
// Assert the index rows equal the controlled results
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
||||||
|
|
||||||
use gtk;
|
use gtk;
|
||||||
use gtk::prelude::*;
|
use gtk::prelude::*;
|
||||||
@ -40,7 +41,7 @@ pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: gtk::Stack) -> gtk
|
|||||||
info!("{:?} feed added", url);
|
info!("{:?} feed added", url);
|
||||||
if let Ok(mut source) = f {
|
if let Ok(mut source) = f {
|
||||||
// update the db
|
// update the db
|
||||||
utils::refresh_feed(&db_clone, &stack_clone, &mut source);
|
utils::refresh_feed(db_clone.clone(), &stack_clone, &mut source);
|
||||||
} else {
|
} else {
|
||||||
error!("Expected Error, feed probably already exists.");
|
error!("Expected Error, feed probably already exists.");
|
||||||
error!("Error: {:?}", f.unwrap_err());
|
error!("Error: {:?}", f.unwrap_err());
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
||||||
|
|
||||||
extern crate gdk;
|
extern crate gdk;
|
||||||
extern crate gdk_pixbuf;
|
extern crate gdk_pixbuf;
|
||||||
@ -42,7 +43,7 @@ fn build_ui(app: >k::Application) {
|
|||||||
window.set_default_size(1050, 600);
|
window.set_default_size(1050, 600);
|
||||||
app.add_window(&window);
|
app.add_window(&window);
|
||||||
// Setup the Stack that will magane the switche between podcasts_view and podcast_widget.
|
// Setup the Stack that will magane the switche between podcasts_view and podcast_widget.
|
||||||
let stack = podcasts_view::setup_stack(&db);
|
let stack = podcasts_view::setup_stack(db.clone());
|
||||||
window.add(&stack);
|
window.add(&stack);
|
||||||
|
|
||||||
// FIXME:
|
// FIXME:
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
||||||
|
|
||||||
use glib;
|
use glib;
|
||||||
|
|
||||||
@ -33,7 +34,7 @@ pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: gtk::Stack) {
|
|||||||
// The implementation of how this is done is probably terrible but it works!.
|
// The implementation of how this is done is probably terrible but it works!.
|
||||||
let db_clone = db.clone();
|
let db_clone = db.clone();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let t = hammond_data::index_feed::index_loop(&db_clone, false);
|
let t = hammond_data::index_feed::index_loop(db_clone, false);
|
||||||
if t.is_err() {
|
if t.is_err() {
|
||||||
error!("Error While trying to update the database.");
|
error!("Error While trying to update the database.");
|
||||||
error!("Error msg: {}", t.unwrap_err());
|
error!("Error msg: {}", t.unwrap_err());
|
||||||
@ -44,18 +45,19 @@ pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: gtk::Stack) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn refresh_feed(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack, source: &mut Source) {
|
pub fn refresh_feed(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack, source: &mut Source) {
|
||||||
let db_clone = db.clone();
|
let db_clone = db.clone();
|
||||||
let mut source_ = source.clone();
|
let mut source_ = source.clone();
|
||||||
// TODO: add timeout option and error reporting.
|
// TODO: add timeout option and error reporting.
|
||||||
let handle = thread::spawn(move || {
|
let handle = thread::spawn(move || {
|
||||||
let db_ = db_clone.lock().unwrap();
|
let db_ = db_clone.clone();
|
||||||
|
let db_ = db_.lock().unwrap();
|
||||||
let foo_ = hammond_data::index_feed::refresh_source(&db_, &mut source_, false);
|
let foo_ = hammond_data::index_feed::refresh_source(&db_, &mut source_, false);
|
||||||
drop(db_);
|
drop(db_);
|
||||||
|
|
||||||
if let Ok(x) = foo_ {
|
if let Ok(x) = foo_ {
|
||||||
let Feed(mut req, s) = x;
|
let Feed(mut req, s) = x;
|
||||||
let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, &db_clone);
|
let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, db_clone);
|
||||||
if s.is_err() {
|
if s.is_err() {
|
||||||
error!("Error While trying to update the database.");
|
error!("Error While trying to update the database.");
|
||||||
error!("Error msg: {}", s.unwrap_err());
|
error!("Error msg: {}", s.unwrap_err());
|
||||||
@ -93,7 +95,7 @@ fn receive() -> glib::Continue {
|
|||||||
GLOBAL.with(|global| {
|
GLOBAL.with(|global| {
|
||||||
if let Some((ref db, ref stack, ref reciever)) = *global.borrow() {
|
if let Some((ref db, ref stack, ref reciever)) = *global.borrow() {
|
||||||
if let Ok(_) = reciever.try_recv() {
|
if let Ok(_) = reciever.try_recv() {
|
||||||
podcasts_view::update_podcasts_view(db, stack);
|
podcasts_view::update_podcasts_view(db.clone(), stack);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@ -11,7 +11,7 @@ use std::sync::{Arc, Mutex};
|
|||||||
use widgets::podcast::*;
|
use widgets::podcast::*;
|
||||||
|
|
||||||
pub fn populate_podcasts_flowbox(
|
pub fn populate_podcasts_flowbox(
|
||||||
db: &Arc<Mutex<SqliteConnection>>,
|
db: Arc<Mutex<SqliteConnection>>,
|
||||||
stack: >k::Stack,
|
stack: >k::Stack,
|
||||||
flowbox: >k::FlowBox,
|
flowbox: >k::FlowBox,
|
||||||
) {
|
) {
|
||||||
@ -51,7 +51,7 @@ pub fn populate_podcasts_flowbox(
|
|||||||
f.connect_activate(move |_| {
|
f.connect_activate(move |_| {
|
||||||
let old = stack_clone.get_child_by_name("pdw").unwrap();
|
let old = stack_clone.get_child_by_name("pdw").unwrap();
|
||||||
let pdw = podcast_widget(
|
let pdw = podcast_widget(
|
||||||
&db_clone,
|
db_clone.clone(),
|
||||||
Some(title.as_str()),
|
Some(title.as_str()),
|
||||||
description.as_ref().map(|x| x.as_str()),
|
description.as_ref().map(|x| x.as_str()),
|
||||||
pixbuf.clone(),
|
pixbuf.clone(),
|
||||||
@ -71,12 +71,12 @@ pub fn populate_podcasts_flowbox(
|
|||||||
flowbox.show_all();
|
flowbox.show_all();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_podcast_widget(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
fn setup_podcast_widget(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
let pd_widget = podcast_widget(db, None, None, None);
|
let pd_widget = podcast_widget(db, None, None, None);
|
||||||
stack.add_named(&pd_widget, "pdw");
|
stack.add_named(&pd_widget, "pdw");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_podcasts_grid(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
fn setup_podcasts_grid(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
let builder = include_str!("../../gtk/podcasts_view.ui");
|
let builder = include_str!("../../gtk/podcasts_view.ui");
|
||||||
let builder = gtk::Builder::new_from_string(builder);
|
let builder = gtk::Builder::new_from_string(builder);
|
||||||
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
||||||
@ -90,19 +90,14 @@ fn setup_podcasts_grid(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
|||||||
populate_podcasts_flowbox(db, stack, &flowbox);
|
populate_podcasts_flowbox(db, stack, &flowbox);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn setup_stack(db: &Arc<Mutex<SqliteConnection>>) -> gtk::Stack {
|
pub fn setup_stack(db: Arc<Mutex<SqliteConnection>>) -> gtk::Stack {
|
||||||
let stack = gtk::Stack::new();
|
let stack = gtk::Stack::new();
|
||||||
// let _st_clone = stack.clone();
|
setup_podcast_widget(db.clone(), &stack);
|
||||||
setup_podcast_widget(db, &stack);
|
|
||||||
setup_podcasts_grid(db, &stack);
|
setup_podcasts_grid(db, &stack);
|
||||||
// stack.connect("update_grid", true, move |_| {
|
|
||||||
// update_podcasts_view(&db_clone, &st_clone);
|
|
||||||
// None
|
|
||||||
// });
|
|
||||||
stack
|
stack
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_podcasts_view(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
pub fn update_podcasts_view(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
let builder = include_str!("../../gtk/podcasts_view.ui");
|
let builder = include_str!("../../gtk/podcasts_view.ui");
|
||||||
let builder = gtk::Builder::new_from_string(builder);
|
let builder = gtk::Builder::new_from_string(builder);
|
||||||
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
|
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
||||||
|
|
||||||
use open;
|
use open;
|
||||||
use diesel::prelude::SqliteConnection;
|
use diesel::prelude::SqliteConnection;
|
||||||
@ -22,7 +23,7 @@ use gtk::ContainerExt;
|
|||||||
// use utils;
|
// use utils;
|
||||||
|
|
||||||
fn epidose_widget(
|
fn epidose_widget(
|
||||||
connection: &Arc<Mutex<SqliteConnection>>,
|
connection: Arc<Mutex<SqliteConnection>>,
|
||||||
episode: &mut Episode,
|
episode: &mut Episode,
|
||||||
pd_title: &str,
|
pd_title: &str,
|
||||||
) -> gtk::Box {
|
) -> gtk::Box {
|
||||||
@ -101,7 +102,7 @@ fn epidose_widget(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn episodes_listbox(connection: &Arc<Mutex<SqliteConnection>>, pd_title: &str) -> gtk::ListBox {
|
pub fn episodes_listbox(connection: Arc<Mutex<SqliteConnection>>, pd_title: &str) -> gtk::ListBox {
|
||||||
// TODO: handle unwraps.
|
// TODO: handle unwraps.
|
||||||
let m = connection.lock().unwrap();
|
let m = connection.lock().unwrap();
|
||||||
let pd = dbqueries::load_podcast(&m, pd_title).unwrap();
|
let pd = dbqueries::load_podcast(&m, pd_title).unwrap();
|
||||||
@ -110,7 +111,7 @@ pub fn episodes_listbox(connection: &Arc<Mutex<SqliteConnection>>, pd_title: &st
|
|||||||
|
|
||||||
let list = gtk::ListBox::new();
|
let list = gtk::ListBox::new();
|
||||||
episodes.iter_mut().for_each(|ep| {
|
episodes.iter_mut().for_each(|ep| {
|
||||||
let w = epidose_widget(connection, ep, pd_title);
|
let w = epidose_widget(connection.clone(), ep, pd_title);
|
||||||
list.add(&w)
|
list.add(&w)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -12,7 +12,7 @@ use std::sync::{Arc, Mutex};
|
|||||||
use widgets::episode::episodes_listbox;
|
use widgets::episode::episodes_listbox;
|
||||||
|
|
||||||
pub fn podcast_widget(
|
pub fn podcast_widget(
|
||||||
connection: &Arc<Mutex<SqliteConnection>>,
|
connection: Arc<Mutex<SqliteConnection>>,
|
||||||
title: Option<&str>,
|
title: Option<&str>,
|
||||||
description: Option<&str>,
|
description: Option<&str>,
|
||||||
image: Option<Pixbuf>,
|
image: Option<Pixbuf>,
|
||||||
@ -29,7 +29,7 @@ pub fn podcast_widget(
|
|||||||
|
|
||||||
if let Some(t) = title {
|
if let Some(t) = title {
|
||||||
title_label.set_text(t);
|
title_label.set_text(t);
|
||||||
let listbox = episodes_listbox(&connection, t);
|
let listbox = episodes_listbox(connection, t);
|
||||||
view.add(&listbox);
|
view.add(&listbox);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,7 +108,7 @@ pub fn podcast_liststore(connection: &SqliteConnection) -> gtk::ListStore {
|
|||||||
// stack.set_visible_child_full("pdw", StackTransitionType::None);
|
// stack.set_visible_child_full("pdw", StackTransitionType::None);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
pub fn pd_widget_from_diesel_model(db: &Arc<Mutex<SqliteConnection>>, pd: &Podcast) -> gtk::Box {
|
pub fn pd_widget_from_diesel_model(db: Arc<Mutex<SqliteConnection>>, pd: &Podcast) -> gtk::Box {
|
||||||
let img = get_pixbuf_from_path(pd.image_uri(), pd.title());
|
let img = get_pixbuf_from_path(pd.image_uri(), pd.title());
|
||||||
podcast_widget(db, Some(pd.title()), Some(pd.description()), img)
|
podcast_widget(db, Some(pd.title()), Some(pd.description()), img)
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user