Final2, I swear, when dealing with Arc/rc always use references.

This commit is contained in:
Jordan Petridis 2017-10-22 04:51:34 +03:00
parent cbe50c9163
commit 6ffacaa6d8
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
9 changed files with 49 additions and 54 deletions

View File

@ -51,13 +51,13 @@ fn run() -> Result<()> {
if args.up {
let db = hammond_data::establish_connection();
let db = Arc::new(Mutex::new(db));
index_feed::index_loop(db.clone(), false)?;
index_feed::index_loop(&db, false)?;
}
if args.dl >= 0 {
let db = hammond_data::establish_connection();
let db = Arc::new(Mutex::new(db));
downloader::latest_dl(db, args.dl as u32).unwrap();
downloader::latest_dl(&db, args.dl as u32).unwrap();
}
if args.latest {

View File

@ -1,6 +1,5 @@
#![allow(dead_code)]
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
use diesel::prelude::*;
use diesel;
@ -82,12 +81,12 @@ fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Epis
Ok(dbqueries::load_episode(con, ep.uri.unwrap())?)
}
pub fn index_loop(db: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
let mut f = fetch_feeds(db.clone(), force)?;
pub fn index_loop(db: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
let mut f = fetch_feeds(db, force)?;
f.par_iter_mut()
.for_each(|&mut Feed(ref mut req, ref source)| {
let e = complete_index_from_source(req, source, db.clone());
let e = complete_index_from_source(req, source, db);
if e.is_err() {
error!("Error While trying to update the database.");
error!("Error msg: {}", e.unwrap_err());
@ -100,7 +99,7 @@ pub fn index_loop(db: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
pub fn complete_index_from_source(
req: &mut reqwest::Response,
source: &Source,
mutex: Arc<Mutex<SqliteConnection>>,
mutex: &Arc<Mutex<SqliteConnection>>,
) -> Result<()> {
use std::io::Read;
use std::str::FromStr;
@ -115,7 +114,7 @@ pub fn complete_index_from_source(
}
fn complete_index(
connection: Arc<Mutex<SqliteConnection>>,
connection: &Arc<Mutex<SqliteConnection>>,
chan: &rss::Channel,
parent: &Source,
) -> Result<()> {
@ -137,7 +136,7 @@ fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) ->
Ok(pd)
}
fn index_channel_items(connection: Arc<Mutex<SqliteConnection>>, it: &[rss::Item], pd: &Podcast) {
fn index_channel_items(connection: &Arc<Mutex<SqliteConnection>>, it: &[rss::Item], pd: &Podcast) {
it.par_iter()
.map(|x| feedparser::parse_episode(x, pd.id()))
.for_each(|x| {
@ -152,7 +151,7 @@ fn index_channel_items(connection: Arc<Mutex<SqliteConnection>>, it: &[rss::Item
}
// Maybe this can be refactored into an Iterator for lazy evaluation.
pub fn fetch_feeds(connection: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<Vec<Feed>> {
pub fn fetch_feeds(connection: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<Vec<Feed>> {
let tempdb = connection.lock().unwrap();
let mut feeds = dbqueries::get_sources(&tempdb)?;
drop(tempdb);
@ -272,10 +271,10 @@ mod tests {
index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap()
});
index_loop(db.clone(), true).unwrap();
index_loop(&db, true).unwrap();
// Run again to cover Unique constrains erros.
index_loop(db.clone(), true).unwrap();
index_loop(&db, true).unwrap();
}
#[test]
@ -316,7 +315,7 @@ mod tests {
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
// Index the channel
complete_index(m.clone(), &chan, &s).unwrap();
complete_index(&m, &chan, &s).unwrap();
});
// Assert the index rows equal the controlled results

View File

@ -1,5 +1,4 @@
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
use reqwest;
use hyper::header::*;
@ -61,7 +60,7 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
// Initial messy prototype, queries load alot of not needed stuff.
// TODO: Refactor
pub fn latest_dl(connection: Arc<Mutex<SqliteConnection>>, limit: u32) -> Result<()> {
pub fn latest_dl(connection: &Arc<Mutex<SqliteConnection>>, limit: u32) -> Result<()> {
let pds = {
let tempdb = connection.lock().unwrap();
dbqueries::get_podcasts(&tempdb)?
@ -82,7 +81,7 @@ pub fn latest_dl(connection: Arc<Mutex<SqliteConnection>>, limit: u32) -> Result
// Download the episodes
eps.iter_mut().for_each(|ep| {
let x = get_episode(connection.clone(), ep, &dl_fold);
let x = get_episode(connection, ep, &dl_fold);
if let Err(err) = x {
error!("An Error occured while downloading an episode.");
error!("Error: {}", err);
@ -108,7 +107,7 @@ pub fn get_dl_folder(pd_title: &str) -> Result<String> {
// TODO: Refactor
pub fn get_episode(
connection: Arc<Mutex<SqliteConnection>>,
connection: &Arc<Mutex<SqliteConnection>>,
ep: &mut Episode,
dl_folder: &str,
) -> Result<()> {

View File

@ -1,5 +1,4 @@
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
use gtk;
use gtk::prelude::*;
@ -10,7 +9,7 @@ use utils;
use std::sync::{Arc, Mutex};
pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) -> gtk::HeaderBar {
pub fn get_headerbar(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) -> gtk::HeaderBar {
let builder = include_str!("../gtk/headerbar.ui");
let builder = gtk::Builder::new_from_string(builder);
@ -34,7 +33,7 @@ pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) -> gt
add_button.connect_clicked(move |_| {
let url = new_url.get_text().unwrap_or_default();
on_add_bttn_clicked(db_clone.clone(), &stack_clone, &url);
on_add_bttn_clicked(&db_clone, &stack_clone, &url);
// TODO: lock the button instead of hiding and add notification of feed added.
// TODO: map the spinner
@ -55,22 +54,22 @@ pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) -> gt
let db_clone = db.clone();
// FIXME: There appears to be a memmory leak here.
refresh_button.connect_clicked(move |_| {
utils::refresh_db(db_clone.clone(), &stack_clone);
utils::refresh_db(&db_clone, &stack_clone);
});
header
}
fn on_add_bttn_clicked(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, url: &str) {
fn on_add_bttn_clicked(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, url: &str) {
let source = {
let tempdb = db.lock().unwrap();
index_feed::insert_return_source(&tempdb, &url)
index_feed::insert_return_source(&tempdb, url)
};
info!("{:?} feed added", url);
if let Ok(mut s) = source {
// update the db
utils::refresh_feed(db.clone(), &stack, &mut s);
utils::refresh_feed(db, stack, &mut s);
} else {
error!("Expected Error, feed probably already exists.");
error!("Error: {:?}", source.unwrap_err());

View File

@ -1,5 +1,4 @@
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
extern crate gdk;
extern crate gdk_pixbuf;
@ -43,7 +42,7 @@ fn build_ui(app: &gtk::Application) {
window.set_default_size(1050, 600);
app.add_window(&window);
// Setup the Stack that will magane the switche between podcasts_view and podcast_widget.
let stack = podcasts_view::setup_stack(db.clone());
let stack = podcasts_view::setup_stack(&db);
window.add(&stack);
// FIXME:
@ -55,7 +54,7 @@ fn build_ui(app: &gtk::Application) {
});
// Get the headerbar
let header = headerbar::get_headerbar(db.clone(), &stack);
let header = headerbar::get_headerbar(&db, &stack);
// TODO: add delay, cause else theres lock contention for the db obj.
// utils::refresh_db(db.clone(), stack.clone());
window.set_titlebar(&header);

View File

@ -1,5 +1,4 @@
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
use glib;
@ -24,7 +23,7 @@ thread_local!(
gtk::Stack,
Receiver<bool>)>> = RefCell::new(None));
pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
pub fn refresh_db(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
// Create a async channel.
let (sender, receiver) = channel();
@ -38,7 +37,7 @@ pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
// TODO: add timeout option and error reporting.
let db_clone = db.clone();
thread::spawn(move || {
let t = hammond_data::index_feed::index_loop(db_clone, false);
let t = hammond_data::index_feed::index_loop(&db_clone, false);
if t.is_err() {
error!("Error While trying to update the database.");
error!("Error msg: {}", t.unwrap_err());
@ -50,7 +49,7 @@ pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
});
}
pub fn refresh_feed(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, source: &mut Source) {
pub fn refresh_feed(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, source: &mut Source) {
let (sender, receiver) = channel();
let db_clone = db.clone();
@ -69,7 +68,7 @@ pub fn refresh_feed(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, source
if let Ok(x) = foo_ {
let Feed(mut req, s) = x;
let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, db_clone);
let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, &db_clone);
if s.is_err() {
error!("Error While trying to update the database.");
error!("Error msg: {}", s.unwrap_err());
@ -85,7 +84,7 @@ fn refresh_podcasts_view() -> glib::Continue {
GLOBAL.with(|global| {
if let Some((ref db, ref stack, ref reciever)) = *global.borrow() {
if reciever.try_recv().is_ok() {
podcasts_view::update_podcasts_view(db.clone(), stack);
podcasts_view::update_podcasts_view(db, stack);
}
}
});

View File

@ -13,7 +13,7 @@ use widgets::podcast::*;
// NOT IN USE.
// TRYING OUT STORELESS ATM.
pub fn populate_podcasts_flowbox(
db: Arc<Mutex<SqliteConnection>>,
db: &Arc<Mutex<SqliteConnection>>,
stack: &gtk::Stack,
flowbox: &gtk::FlowBox,
) {
@ -53,7 +53,7 @@ pub fn populate_podcasts_flowbox(
f.connect_activate(move |_| {
let old = stack_clone.get_child_by_name("pdw").unwrap();
let pdw = podcast_widget(
db_clone.clone(),
&db_clone,
Some(title.as_str()),
description.as_ref().map(|x| x.as_str()),
pixbuf.clone(),
@ -85,7 +85,7 @@ fn show_empty_view(stack: &gtk::Stack) {
}
pub fn pop_flowbox_no_store(
db: Arc<Mutex<SqliteConnection>>,
db: &Arc<Mutex<SqliteConnection>>,
stack: &gtk::Stack,
flowbox: &gtk::FlowBox,
) {
@ -105,21 +105,21 @@ pub fn pop_flowbox_no_store(
let stack = stack.clone();
let parent = parent.clone();
f.connect_activate(move |_| {
on_flowbox_child_activate(db.clone(), &stack, &parent, pixbuf.clone());
on_flowbox_child_activate(&db, &stack, &parent, pixbuf.clone());
});
flowbox.add(&f);
});
} else {
show_empty_view(&stack);
show_empty_view(stack);
}
}
fn setup_podcast_widget(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
fn setup_podcast_widget(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
let pd_widget = podcast_widget(db, None, None, None);
stack.add_named(&pd_widget, "pdw");
}
fn setup_podcasts_grid(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
fn setup_podcasts_grid(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
let builder = include_str!("../../gtk/podcasts_view.ui");
let builder = gtk::Builder::new_from_string(builder);
let grid: gtk::Grid = builder.get_object("grid").unwrap();
@ -134,14 +134,14 @@ fn setup_podcasts_grid(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
pop_flowbox_no_store(db, stack, &flowbox);
}
pub fn setup_stack(db: Arc<Mutex<SqliteConnection>>) -> gtk::Stack {
pub fn setup_stack(db: &Arc<Mutex<SqliteConnection>>) -> gtk::Stack {
let stack = gtk::Stack::new();
setup_podcast_widget(db.clone(), &stack);
setup_podcast_widget(db, &stack);
setup_podcasts_grid(db, &stack);
stack
}
pub fn update_podcasts_view(db: Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
pub fn update_podcasts_view(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack) {
let builder = include_str!("../../gtk/podcasts_view.ui");
let builder = gtk::Builder::new_from_string(builder);
let grid: gtk::Grid = builder.get_object("grid").unwrap();

View File

@ -1,6 +1,5 @@
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
use open;
use diesel::prelude::SqliteConnection;
@ -27,7 +26,7 @@ thread_local!(
// TODO: REFACTOR AND MODULATE ME.
fn epidose_widget(
connection: Arc<Mutex<SqliteConnection>>,
connection: &Arc<Mutex<SqliteConnection>>,
episode: &mut Episode,
pd_title: &str,
) -> gtk::Box {
@ -83,7 +82,7 @@ fn epidose_widget(
let dl_button_clone = dl_button.clone();
dl_button.connect_clicked(move |_| {
on_dl_clicked(
db.clone(),
&db,
&pd_title_clone,
&mut ep_clone.clone(),
dl_button_clone.clone(),
@ -96,7 +95,7 @@ fn epidose_widget(
// TODO: show notification when dl is finished and block play_bttn till then.
fn on_dl_clicked(
db: Arc<Mutex<SqliteConnection>>,
db: &Arc<Mutex<SqliteConnection>>,
pd_title: &str,
ep: &mut Episode,
dl_bttn: gtk::Button,
@ -112,9 +111,10 @@ fn on_dl_clicked(
let pd_title = pd_title.to_owned();
let mut ep = ep.clone();
let db = db.clone();
thread::spawn(move || {
let dl_fold = downloader::get_dl_folder(&pd_title).unwrap();
let e = downloader::get_episode(db, &mut ep, dl_fold.as_str());
let e = downloader::get_episode(&db, &mut ep, dl_fold.as_str());
if let Err(err) = e {
error!("Error while trying to download: {}", ep.uri());
error!("Error: {}", err);
@ -136,7 +136,7 @@ fn receive() -> glib::Continue {
glib::Continue(false)
}
pub fn episodes_listbox(connection: Arc<Mutex<SqliteConnection>>, pd_title: &str) -> gtk::ListBox {
pub fn episodes_listbox(connection: &Arc<Mutex<SqliteConnection>>, pd_title: &str) -> gtk::ListBox {
// TODO: handle unwraps.
let m = connection.lock().unwrap();
let pd = dbqueries::load_podcast(&m, pd_title).unwrap();
@ -145,7 +145,7 @@ pub fn episodes_listbox(connection: Arc<Mutex<SqliteConnection>>, pd_title: &str
let list = gtk::ListBox::new();
episodes.iter_mut().for_each(|ep| {
let w = epidose_widget(connection.clone(), ep, pd_title);
let w = epidose_widget(connection, ep, pd_title);
list.add(&w)
});

View File

@ -12,7 +12,7 @@ use std::sync::{Arc, Mutex};
use widgets::episode::episodes_listbox;
pub fn podcast_widget(
connection: Arc<Mutex<SqliteConnection>>,
connection: &Arc<Mutex<SqliteConnection>>,
title: Option<&str>,
description: Option<&str>,
image: Option<Pixbuf>,
@ -73,7 +73,7 @@ pub fn create_flowbox_child(title: &str, cover: Option<Pixbuf>) -> gtk::FlowBoxC
}
pub fn on_flowbox_child_activate(
db: Arc<Mutex<SqliteConnection>>,
db: &Arc<Mutex<SqliteConnection>>,
stack: &gtk::Stack,
parent: &Podcast,
pixbuf: Option<Pixbuf>,
@ -115,7 +115,7 @@ pub fn podcast_liststore(connection: &SqliteConnection) -> gtk::ListStore {
podcast_model
}
// pub fn update_podcast_widget(db: &Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, pd:
// pub fn update_podcast_widget(db: &&Arc<Mutex<SqliteConnection>>, stack: &gtk::Stack, pd:
// &Podcast){
// let old = stack.get_child_by_name("pdw").unwrap();
// let pdw = pd_widget_from_diesel_model(&db.clone(), pd, &stack.clone());
@ -126,7 +126,7 @@ pub fn podcast_liststore(connection: &SqliteConnection) -> gtk::ListStore {
// stack.set_visible_child_name(&vis);
// }
pub fn pd_widget_from_diesel_model(db: Arc<Mutex<SqliteConnection>>, pd: &Podcast) -> gtk::Box {
pub fn pd_widget_from_diesel_model(db: &Arc<Mutex<SqliteConnection>>, pd: &Podcast) -> gtk::Box {
let img = get_pixbuf_from_path(pd.image_uri(), pd.title());
podcast_widget(db, Some(pd.title()), Some(pd.description()), img)
}