Final2, I swear, when dealing with Arc/rc always use references.
This commit is contained in:
parent
cbe50c9163
commit
6ffacaa6d8
@ -51,13 +51,13 @@ fn run() -> Result<()> {
|
|||||||
if args.up {
|
if args.up {
|
||||||
let db = hammond_data::establish_connection();
|
let db = hammond_data::establish_connection();
|
||||||
let db = Arc::new(Mutex::new(db));
|
let db = Arc::new(Mutex::new(db));
|
||||||
index_feed::index_loop(db.clone(), false)?;
|
index_feed::index_loop(&db, false)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.dl >= 0 {
|
if args.dl >= 0 {
|
||||||
let db = hammond_data::establish_connection();
|
let db = hammond_data::establish_connection();
|
||||||
let db = Arc::new(Mutex::new(db));
|
let db = Arc::new(Mutex::new(db));
|
||||||
downloader::latest_dl(db, args.dl as u32).unwrap();
|
downloader::latest_dl(&db, args.dl as u32).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.latest {
|
if args.latest {
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
|
||||||
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel;
|
use diesel;
|
||||||
@ -82,12 +81,12 @@ fn insert_return_episode(con: &SqliteConnection, ep: &NewEpisode) -> Result<Epis
|
|||||||
Ok(dbqueries::load_episode(con, ep.uri.unwrap())?)
|
Ok(dbqueries::load_episode(con, ep.uri.unwrap())?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_loop(db: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
|
pub fn index_loop(db: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
|
||||||
let mut f = fetch_feeds(db.clone(), force)?;
|
let mut f = fetch_feeds(db, force)?;
|
||||||
|
|
||||||
f.par_iter_mut()
|
f.par_iter_mut()
|
||||||
.for_each(|&mut Feed(ref mut req, ref source)| {
|
.for_each(|&mut Feed(ref mut req, ref source)| {
|
||||||
let e = complete_index_from_source(req, source, db.clone());
|
let e = complete_index_from_source(req, source, db);
|
||||||
if e.is_err() {
|
if e.is_err() {
|
||||||
error!("Error While trying to update the database.");
|
error!("Error While trying to update the database.");
|
||||||
error!("Error msg: {}", e.unwrap_err());
|
error!("Error msg: {}", e.unwrap_err());
|
||||||
@ -100,7 +99,7 @@ pub fn index_loop(db: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<()> {
|
|||||||
pub fn complete_index_from_source(
|
pub fn complete_index_from_source(
|
||||||
req: &mut reqwest::Response,
|
req: &mut reqwest::Response,
|
||||||
source: &Source,
|
source: &Source,
|
||||||
mutex: Arc<Mutex<SqliteConnection>>,
|
mutex: &Arc<Mutex<SqliteConnection>>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
@ -115,7 +114,7 @@ pub fn complete_index_from_source(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn complete_index(
|
fn complete_index(
|
||||||
connection: Arc<Mutex<SqliteConnection>>,
|
connection: &Arc<Mutex<SqliteConnection>>,
|
||||||
chan: &rss::Channel,
|
chan: &rss::Channel,
|
||||||
parent: &Source,
|
parent: &Source,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -137,7 +136,7 @@ fn index_channel(db: &SqliteConnection, chan: &rss::Channel, parent: &Source) ->
|
|||||||
Ok(pd)
|
Ok(pd)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_channel_items(connection: Arc<Mutex<SqliteConnection>>, it: &[rss::Item], pd: &Podcast) {
|
fn index_channel_items(connection: &Arc<Mutex<SqliteConnection>>, it: &[rss::Item], pd: &Podcast) {
|
||||||
it.par_iter()
|
it.par_iter()
|
||||||
.map(|x| feedparser::parse_episode(x, pd.id()))
|
.map(|x| feedparser::parse_episode(x, pd.id()))
|
||||||
.for_each(|x| {
|
.for_each(|x| {
|
||||||
@ -152,7 +151,7 @@ fn index_channel_items(connection: Arc<Mutex<SqliteConnection>>, it: &[rss::Item
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Maybe this can be refactored into an Iterator for lazy evaluation.
|
// Maybe this can be refactored into an Iterator for lazy evaluation.
|
||||||
pub fn fetch_feeds(connection: Arc<Mutex<SqliteConnection>>, force: bool) -> Result<Vec<Feed>> {
|
pub fn fetch_feeds(connection: &Arc<Mutex<SqliteConnection>>, force: bool) -> Result<Vec<Feed>> {
|
||||||
let tempdb = connection.lock().unwrap();
|
let tempdb = connection.lock().unwrap();
|
||||||
let mut feeds = dbqueries::get_sources(&tempdb)?;
|
let mut feeds = dbqueries::get_sources(&tempdb)?;
|
||||||
drop(tempdb);
|
drop(tempdb);
|
||||||
@ -272,10 +271,10 @@ mod tests {
|
|||||||
index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap()
|
index_source(&tempdb, &NewSource::new_with_uri(feed)).unwrap()
|
||||||
});
|
});
|
||||||
|
|
||||||
index_loop(db.clone(), true).unwrap();
|
index_loop(&db, true).unwrap();
|
||||||
|
|
||||||
// Run again to cover Unique constrains erros.
|
// Run again to cover Unique constrains erros.
|
||||||
index_loop(db.clone(), true).unwrap();
|
index_loop(&db, true).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -316,7 +315,7 @@ mod tests {
|
|||||||
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
||||||
|
|
||||||
// Index the channel
|
// Index the channel
|
||||||
complete_index(m.clone(), &chan, &s).unwrap();
|
complete_index(&m, &chan, &s).unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Assert the index rows equal the controlled results
|
// Assert the index rows equal the controlled results
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
|
||||||
|
|
||||||
use reqwest;
|
use reqwest;
|
||||||
use hyper::header::*;
|
use hyper::header::*;
|
||||||
@ -61,7 +60,7 @@ pub fn download_to(target: &str, url: &str) -> Result<()> {
|
|||||||
|
|
||||||
// Initial messy prototype, queries load alot of not needed stuff.
|
// Initial messy prototype, queries load alot of not needed stuff.
|
||||||
// TODO: Refactor
|
// TODO: Refactor
|
||||||
pub fn latest_dl(connection: Arc<Mutex<SqliteConnection>>, limit: u32) -> Result<()> {
|
pub fn latest_dl(connection: &Arc<Mutex<SqliteConnection>>, limit: u32) -> Result<()> {
|
||||||
let pds = {
|
let pds = {
|
||||||
let tempdb = connection.lock().unwrap();
|
let tempdb = connection.lock().unwrap();
|
||||||
dbqueries::get_podcasts(&tempdb)?
|
dbqueries::get_podcasts(&tempdb)?
|
||||||
@ -82,7 +81,7 @@ pub fn latest_dl(connection: Arc<Mutex<SqliteConnection>>, limit: u32) -> Result
|
|||||||
|
|
||||||
// Download the episodes
|
// Download the episodes
|
||||||
eps.iter_mut().for_each(|ep| {
|
eps.iter_mut().for_each(|ep| {
|
||||||
let x = get_episode(connection.clone(), ep, &dl_fold);
|
let x = get_episode(connection, ep, &dl_fold);
|
||||||
if let Err(err) = x {
|
if let Err(err) = x {
|
||||||
error!("An Error occured while downloading an episode.");
|
error!("An Error occured while downloading an episode.");
|
||||||
error!("Error: {}", err);
|
error!("Error: {}", err);
|
||||||
@ -108,7 +107,7 @@ pub fn get_dl_folder(pd_title: &str) -> Result<String> {
|
|||||||
|
|
||||||
// TODO: Refactor
|
// TODO: Refactor
|
||||||
pub fn get_episode(
|
pub fn get_episode(
|
||||||
connection: Arc<Mutex<SqliteConnection>>,
|
connection: &Arc<Mutex<SqliteConnection>>,
|
||||||
ep: &mut Episode,
|
ep: &mut Episode,
|
||||||
dl_folder: &str,
|
dl_folder: &str,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
|
||||||
|
|
||||||
use gtk;
|
use gtk;
|
||||||
use gtk::prelude::*;
|
use gtk::prelude::*;
|
||||||
@ -10,7 +9,7 @@ use utils;
|
|||||||
|
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) -> gtk::HeaderBar {
|
pub fn get_headerbar(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) -> gtk::HeaderBar {
|
||||||
let builder = include_str!("../gtk/headerbar.ui");
|
let builder = include_str!("../gtk/headerbar.ui");
|
||||||
let builder = gtk::Builder::new_from_string(builder);
|
let builder = gtk::Builder::new_from_string(builder);
|
||||||
|
|
||||||
@ -34,7 +33,7 @@ pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) -> gt
|
|||||||
|
|
||||||
add_button.connect_clicked(move |_| {
|
add_button.connect_clicked(move |_| {
|
||||||
let url = new_url.get_text().unwrap_or_default();
|
let url = new_url.get_text().unwrap_or_default();
|
||||||
on_add_bttn_clicked(db_clone.clone(), &stack_clone, &url);
|
on_add_bttn_clicked(&db_clone, &stack_clone, &url);
|
||||||
|
|
||||||
// TODO: lock the button instead of hiding and add notification of feed added.
|
// TODO: lock the button instead of hiding and add notification of feed added.
|
||||||
// TODO: map the spinner
|
// TODO: map the spinner
|
||||||
@ -55,22 +54,22 @@ pub fn get_headerbar(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) -> gt
|
|||||||
let db_clone = db.clone();
|
let db_clone = db.clone();
|
||||||
// FIXME: There appears to be a memmory leak here.
|
// FIXME: There appears to be a memmory leak here.
|
||||||
refresh_button.connect_clicked(move |_| {
|
refresh_button.connect_clicked(move |_| {
|
||||||
utils::refresh_db(db_clone.clone(), &stack_clone);
|
utils::refresh_db(&db_clone, &stack_clone);
|
||||||
});
|
});
|
||||||
|
|
||||||
header
|
header
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_add_bttn_clicked(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack, url: &str) {
|
fn on_add_bttn_clicked(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack, url: &str) {
|
||||||
let source = {
|
let source = {
|
||||||
let tempdb = db.lock().unwrap();
|
let tempdb = db.lock().unwrap();
|
||||||
index_feed::insert_return_source(&tempdb, &url)
|
index_feed::insert_return_source(&tempdb, url)
|
||||||
};
|
};
|
||||||
info!("{:?} feed added", url);
|
info!("{:?} feed added", url);
|
||||||
|
|
||||||
if let Ok(mut s) = source {
|
if let Ok(mut s) = source {
|
||||||
// update the db
|
// update the db
|
||||||
utils::refresh_feed(db.clone(), &stack, &mut s);
|
utils::refresh_feed(db, stack, &mut s);
|
||||||
} else {
|
} else {
|
||||||
error!("Expected Error, feed probably already exists.");
|
error!("Expected Error, feed probably already exists.");
|
||||||
error!("Error: {:?}", source.unwrap_err());
|
error!("Error: {:?}", source.unwrap_err());
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
|
||||||
|
|
||||||
extern crate gdk;
|
extern crate gdk;
|
||||||
extern crate gdk_pixbuf;
|
extern crate gdk_pixbuf;
|
||||||
@ -43,7 +42,7 @@ fn build_ui(app: >k::Application) {
|
|||||||
window.set_default_size(1050, 600);
|
window.set_default_size(1050, 600);
|
||||||
app.add_window(&window);
|
app.add_window(&window);
|
||||||
// Setup the Stack that will magane the switche between podcasts_view and podcast_widget.
|
// Setup the Stack that will magane the switche between podcasts_view and podcast_widget.
|
||||||
let stack = podcasts_view::setup_stack(db.clone());
|
let stack = podcasts_view::setup_stack(&db);
|
||||||
window.add(&stack);
|
window.add(&stack);
|
||||||
|
|
||||||
// FIXME:
|
// FIXME:
|
||||||
@ -55,7 +54,7 @@ fn build_ui(app: >k::Application) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Get the headerbar
|
// Get the headerbar
|
||||||
let header = headerbar::get_headerbar(db.clone(), &stack);
|
let header = headerbar::get_headerbar(&db, &stack);
|
||||||
// TODO: add delay, cause else theres lock contention for the db obj.
|
// TODO: add delay, cause else theres lock contention for the db obj.
|
||||||
// utils::refresh_db(db.clone(), stack.clone());
|
// utils::refresh_db(db.clone(), stack.clone());
|
||||||
window.set_titlebar(&header);
|
window.set_titlebar(&header);
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
|
||||||
|
|
||||||
use glib;
|
use glib;
|
||||||
|
|
||||||
@ -24,7 +23,7 @@ thread_local!(
|
|||||||
gtk::Stack,
|
gtk::Stack,
|
||||||
Receiver<bool>)>> = RefCell::new(None));
|
Receiver<bool>)>> = RefCell::new(None));
|
||||||
|
|
||||||
pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
pub fn refresh_db(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
// Create a async channel.
|
// Create a async channel.
|
||||||
let (sender, receiver) = channel();
|
let (sender, receiver) = channel();
|
||||||
|
|
||||||
@ -38,7 +37,7 @@ pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
|||||||
// TODO: add timeout option and error reporting.
|
// TODO: add timeout option and error reporting.
|
||||||
let db_clone = db.clone();
|
let db_clone = db.clone();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let t = hammond_data::index_feed::index_loop(db_clone, false);
|
let t = hammond_data::index_feed::index_loop(&db_clone, false);
|
||||||
if t.is_err() {
|
if t.is_err() {
|
||||||
error!("Error While trying to update the database.");
|
error!("Error While trying to update the database.");
|
||||||
error!("Error msg: {}", t.unwrap_err());
|
error!("Error msg: {}", t.unwrap_err());
|
||||||
@ -50,7 +49,7 @@ pub fn refresh_db(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn refresh_feed(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack, source: &mut Source) {
|
pub fn refresh_feed(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack, source: &mut Source) {
|
||||||
let (sender, receiver) = channel();
|
let (sender, receiver) = channel();
|
||||||
|
|
||||||
let db_clone = db.clone();
|
let db_clone = db.clone();
|
||||||
@ -69,7 +68,7 @@ pub fn refresh_feed(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack, source
|
|||||||
|
|
||||||
if let Ok(x) = foo_ {
|
if let Ok(x) = foo_ {
|
||||||
let Feed(mut req, s) = x;
|
let Feed(mut req, s) = x;
|
||||||
let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, db_clone);
|
let s = hammond_data::index_feed::complete_index_from_source(&mut req, &s, &db_clone);
|
||||||
if s.is_err() {
|
if s.is_err() {
|
||||||
error!("Error While trying to update the database.");
|
error!("Error While trying to update the database.");
|
||||||
error!("Error msg: {}", s.unwrap_err());
|
error!("Error msg: {}", s.unwrap_err());
|
||||||
@ -85,7 +84,7 @@ fn refresh_podcasts_view() -> glib::Continue {
|
|||||||
GLOBAL.with(|global| {
|
GLOBAL.with(|global| {
|
||||||
if let Some((ref db, ref stack, ref reciever)) = *global.borrow() {
|
if let Some((ref db, ref stack, ref reciever)) = *global.borrow() {
|
||||||
if reciever.try_recv().is_ok() {
|
if reciever.try_recv().is_ok() {
|
||||||
podcasts_view::update_podcasts_view(db.clone(), stack);
|
podcasts_view::update_podcasts_view(db, stack);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@ -13,7 +13,7 @@ use widgets::podcast::*;
|
|||||||
// NOT IN USE.
|
// NOT IN USE.
|
||||||
// TRYING OUT STORELESS ATM.
|
// TRYING OUT STORELESS ATM.
|
||||||
pub fn populate_podcasts_flowbox(
|
pub fn populate_podcasts_flowbox(
|
||||||
db: Arc<Mutex<SqliteConnection>>,
|
db: &Arc<Mutex<SqliteConnection>>,
|
||||||
stack: >k::Stack,
|
stack: >k::Stack,
|
||||||
flowbox: >k::FlowBox,
|
flowbox: >k::FlowBox,
|
||||||
) {
|
) {
|
||||||
@ -53,7 +53,7 @@ pub fn populate_podcasts_flowbox(
|
|||||||
f.connect_activate(move |_| {
|
f.connect_activate(move |_| {
|
||||||
let old = stack_clone.get_child_by_name("pdw").unwrap();
|
let old = stack_clone.get_child_by_name("pdw").unwrap();
|
||||||
let pdw = podcast_widget(
|
let pdw = podcast_widget(
|
||||||
db_clone.clone(),
|
&db_clone,
|
||||||
Some(title.as_str()),
|
Some(title.as_str()),
|
||||||
description.as_ref().map(|x| x.as_str()),
|
description.as_ref().map(|x| x.as_str()),
|
||||||
pixbuf.clone(),
|
pixbuf.clone(),
|
||||||
@ -85,7 +85,7 @@ fn show_empty_view(stack: >k::Stack) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn pop_flowbox_no_store(
|
pub fn pop_flowbox_no_store(
|
||||||
db: Arc<Mutex<SqliteConnection>>,
|
db: &Arc<Mutex<SqliteConnection>>,
|
||||||
stack: >k::Stack,
|
stack: >k::Stack,
|
||||||
flowbox: >k::FlowBox,
|
flowbox: >k::FlowBox,
|
||||||
) {
|
) {
|
||||||
@ -105,21 +105,21 @@ pub fn pop_flowbox_no_store(
|
|||||||
let stack = stack.clone();
|
let stack = stack.clone();
|
||||||
let parent = parent.clone();
|
let parent = parent.clone();
|
||||||
f.connect_activate(move |_| {
|
f.connect_activate(move |_| {
|
||||||
on_flowbox_child_activate(db.clone(), &stack, &parent, pixbuf.clone());
|
on_flowbox_child_activate(&db, &stack, &parent, pixbuf.clone());
|
||||||
});
|
});
|
||||||
flowbox.add(&f);
|
flowbox.add(&f);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
show_empty_view(&stack);
|
show_empty_view(stack);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_podcast_widget(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
fn setup_podcast_widget(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
let pd_widget = podcast_widget(db, None, None, None);
|
let pd_widget = podcast_widget(db, None, None, None);
|
||||||
stack.add_named(&pd_widget, "pdw");
|
stack.add_named(&pd_widget, "pdw");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_podcasts_grid(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
fn setup_podcasts_grid(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
let builder = include_str!("../../gtk/podcasts_view.ui");
|
let builder = include_str!("../../gtk/podcasts_view.ui");
|
||||||
let builder = gtk::Builder::new_from_string(builder);
|
let builder = gtk::Builder::new_from_string(builder);
|
||||||
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
||||||
@ -134,14 +134,14 @@ fn setup_podcasts_grid(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
|||||||
pop_flowbox_no_store(db, stack, &flowbox);
|
pop_flowbox_no_store(db, stack, &flowbox);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn setup_stack(db: Arc<Mutex<SqliteConnection>>) -> gtk::Stack {
|
pub fn setup_stack(db: &Arc<Mutex<SqliteConnection>>) -> gtk::Stack {
|
||||||
let stack = gtk::Stack::new();
|
let stack = gtk::Stack::new();
|
||||||
setup_podcast_widget(db.clone(), &stack);
|
setup_podcast_widget(db, &stack);
|
||||||
setup_podcasts_grid(db, &stack);
|
setup_podcasts_grid(db, &stack);
|
||||||
stack
|
stack
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_podcasts_view(db: Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
pub fn update_podcasts_view(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack) {
|
||||||
let builder = include_str!("../../gtk/podcasts_view.ui");
|
let builder = include_str!("../../gtk/podcasts_view.ui");
|
||||||
let builder = gtk::Builder::new_from_string(builder);
|
let builder = gtk::Builder::new_from_string(builder);
|
||||||
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
let grid: gtk::Grid = builder.get_object("grid").unwrap();
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
|
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
|
|
||||||
|
|
||||||
use open;
|
use open;
|
||||||
use diesel::prelude::SqliteConnection;
|
use diesel::prelude::SqliteConnection;
|
||||||
@ -27,7 +26,7 @@ thread_local!(
|
|||||||
|
|
||||||
// TODO: REFACTOR AND MODULATE ME.
|
// TODO: REFACTOR AND MODULATE ME.
|
||||||
fn epidose_widget(
|
fn epidose_widget(
|
||||||
connection: Arc<Mutex<SqliteConnection>>,
|
connection: &Arc<Mutex<SqliteConnection>>,
|
||||||
episode: &mut Episode,
|
episode: &mut Episode,
|
||||||
pd_title: &str,
|
pd_title: &str,
|
||||||
) -> gtk::Box {
|
) -> gtk::Box {
|
||||||
@ -83,7 +82,7 @@ fn epidose_widget(
|
|||||||
let dl_button_clone = dl_button.clone();
|
let dl_button_clone = dl_button.clone();
|
||||||
dl_button.connect_clicked(move |_| {
|
dl_button.connect_clicked(move |_| {
|
||||||
on_dl_clicked(
|
on_dl_clicked(
|
||||||
db.clone(),
|
&db,
|
||||||
&pd_title_clone,
|
&pd_title_clone,
|
||||||
&mut ep_clone.clone(),
|
&mut ep_clone.clone(),
|
||||||
dl_button_clone.clone(),
|
dl_button_clone.clone(),
|
||||||
@ -96,7 +95,7 @@ fn epidose_widget(
|
|||||||
|
|
||||||
// TODO: show notification when dl is finished and block play_bttn till then.
|
// TODO: show notification when dl is finished and block play_bttn till then.
|
||||||
fn on_dl_clicked(
|
fn on_dl_clicked(
|
||||||
db: Arc<Mutex<SqliteConnection>>,
|
db: &Arc<Mutex<SqliteConnection>>,
|
||||||
pd_title: &str,
|
pd_title: &str,
|
||||||
ep: &mut Episode,
|
ep: &mut Episode,
|
||||||
dl_bttn: gtk::Button,
|
dl_bttn: gtk::Button,
|
||||||
@ -112,9 +111,10 @@ fn on_dl_clicked(
|
|||||||
|
|
||||||
let pd_title = pd_title.to_owned();
|
let pd_title = pd_title.to_owned();
|
||||||
let mut ep = ep.clone();
|
let mut ep = ep.clone();
|
||||||
|
let db = db.clone();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let dl_fold = downloader::get_dl_folder(&pd_title).unwrap();
|
let dl_fold = downloader::get_dl_folder(&pd_title).unwrap();
|
||||||
let e = downloader::get_episode(db, &mut ep, dl_fold.as_str());
|
let e = downloader::get_episode(&db, &mut ep, dl_fold.as_str());
|
||||||
if let Err(err) = e {
|
if let Err(err) = e {
|
||||||
error!("Error while trying to download: {}", ep.uri());
|
error!("Error while trying to download: {}", ep.uri());
|
||||||
error!("Error: {}", err);
|
error!("Error: {}", err);
|
||||||
@ -136,7 +136,7 @@ fn receive() -> glib::Continue {
|
|||||||
glib::Continue(false)
|
glib::Continue(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn episodes_listbox(connection: Arc<Mutex<SqliteConnection>>, pd_title: &str) -> gtk::ListBox {
|
pub fn episodes_listbox(connection: &Arc<Mutex<SqliteConnection>>, pd_title: &str) -> gtk::ListBox {
|
||||||
// TODO: handle unwraps.
|
// TODO: handle unwraps.
|
||||||
let m = connection.lock().unwrap();
|
let m = connection.lock().unwrap();
|
||||||
let pd = dbqueries::load_podcast(&m, pd_title).unwrap();
|
let pd = dbqueries::load_podcast(&m, pd_title).unwrap();
|
||||||
@ -145,7 +145,7 @@ pub fn episodes_listbox(connection: Arc<Mutex<SqliteConnection>>, pd_title: &str
|
|||||||
|
|
||||||
let list = gtk::ListBox::new();
|
let list = gtk::ListBox::new();
|
||||||
episodes.iter_mut().for_each(|ep| {
|
episodes.iter_mut().for_each(|ep| {
|
||||||
let w = epidose_widget(connection.clone(), ep, pd_title);
|
let w = epidose_widget(connection, ep, pd_title);
|
||||||
list.add(&w)
|
list.add(&w)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -12,7 +12,7 @@ use std::sync::{Arc, Mutex};
|
|||||||
use widgets::episode::episodes_listbox;
|
use widgets::episode::episodes_listbox;
|
||||||
|
|
||||||
pub fn podcast_widget(
|
pub fn podcast_widget(
|
||||||
connection: Arc<Mutex<SqliteConnection>>,
|
connection: &Arc<Mutex<SqliteConnection>>,
|
||||||
title: Option<&str>,
|
title: Option<&str>,
|
||||||
description: Option<&str>,
|
description: Option<&str>,
|
||||||
image: Option<Pixbuf>,
|
image: Option<Pixbuf>,
|
||||||
@ -73,7 +73,7 @@ pub fn create_flowbox_child(title: &str, cover: Option<Pixbuf>) -> gtk::FlowBoxC
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_flowbox_child_activate(
|
pub fn on_flowbox_child_activate(
|
||||||
db: Arc<Mutex<SqliteConnection>>,
|
db: &Arc<Mutex<SqliteConnection>>,
|
||||||
stack: >k::Stack,
|
stack: >k::Stack,
|
||||||
parent: &Podcast,
|
parent: &Podcast,
|
||||||
pixbuf: Option<Pixbuf>,
|
pixbuf: Option<Pixbuf>,
|
||||||
@ -115,7 +115,7 @@ pub fn podcast_liststore(connection: &SqliteConnection) -> gtk::ListStore {
|
|||||||
podcast_model
|
podcast_model
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub fn update_podcast_widget(db: &Arc<Mutex<SqliteConnection>>, stack: >k::Stack, pd:
|
// pub fn update_podcast_widget(db: &&Arc<Mutex<SqliteConnection>>, stack: >k::Stack, pd:
|
||||||
// &Podcast){
|
// &Podcast){
|
||||||
// let old = stack.get_child_by_name("pdw").unwrap();
|
// let old = stack.get_child_by_name("pdw").unwrap();
|
||||||
// let pdw = pd_widget_from_diesel_model(&db.clone(), pd, &stack.clone());
|
// let pdw = pd_widget_from_diesel_model(&db.clone(), pd, &stack.clone());
|
||||||
@ -126,7 +126,7 @@ pub fn podcast_liststore(connection: &SqliteConnection) -> gtk::ListStore {
|
|||||||
// stack.set_visible_child_name(&vis);
|
// stack.set_visible_child_name(&vis);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
pub fn pd_widget_from_diesel_model(db: Arc<Mutex<SqliteConnection>>, pd: &Podcast) -> gtk::Box {
|
pub fn pd_widget_from_diesel_model(db: &Arc<Mutex<SqliteConnection>>, pd: &Podcast) -> gtk::Box {
|
||||||
let img = get_pixbuf_from_path(pd.image_uri(), pd.title());
|
let img = get_pixbuf_from_path(pd.image_uri(), pd.title());
|
||||||
podcast_widget(db, Some(pd.title()), Some(pd.description()), img)
|
podcast_widget(db, Some(pd.title()), Some(pd.description()), img)
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user