Merge branch 'db-cleanup' into 'master'
Database cleanup See merge request World/hammond!41
This commit is contained in:
commit
ae11084f48
6
hammond-data/diesel.toml
Normal file
6
hammond-data/diesel.toml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# For documentation on how to configure this file,
|
||||||
|
# see diesel.rs/guides/configuring-diesel-cli
|
||||||
|
|
||||||
|
[print_schema]
|
||||||
|
file = "src/schema.rs"
|
||||||
|
patch_file = "src/schema.patch"
|
||||||
@ -0,0 +1,53 @@
|
|||||||
|
ALTER TABLE episode RENAME TO old_table;
|
||||||
|
|
||||||
|
CREATE TABLE episode (
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
uri TEXT,
|
||||||
|
local_uri TEXT,
|
||||||
|
description TEXT,
|
||||||
|
epoch INTEGER NOT NULL DEFAULT 0,
|
||||||
|
length INTEGER,
|
||||||
|
duration INTEGER,
|
||||||
|
guid TEXT,
|
||||||
|
played INTEGER,
|
||||||
|
podcast_id INTEGER NOT NULL,
|
||||||
|
favorite INTEGER DEFAULT 0,
|
||||||
|
archive INTEGER DEFAULT 0,
|
||||||
|
PRIMARY KEY (title, podcast_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO episode (title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, favorite, archive)
|
||||||
|
SELECT title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, 0, 0
|
||||||
|
FROM old_table;
|
||||||
|
|
||||||
|
Drop table old_table;
|
||||||
|
|
||||||
|
ALTER TABLE podcast RENAME TO old_table;
|
||||||
|
CREATE TABLE `podcast` (
|
||||||
|
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||||
|
`title` TEXT NOT NULL,
|
||||||
|
`link` TEXT NOT NULL,
|
||||||
|
`description` TEXT NOT NULL,
|
||||||
|
`image_uri` TEXT,
|
||||||
|
`source_id` INTEGER NOT NULL UNIQUE,
|
||||||
|
`favorite` INTEGER NOT NULL DEFAULT 0,
|
||||||
|
`archive` INTEGER NOT NULL DEFAULT 0,
|
||||||
|
`always_dl` INTEGER NOT NULL DEFAULT 0
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO podcast (
|
||||||
|
id,
|
||||||
|
title,
|
||||||
|
link,
|
||||||
|
description,
|
||||||
|
image_uri,
|
||||||
|
source_id
|
||||||
|
) SELECT id,
|
||||||
|
title,
|
||||||
|
link,
|
||||||
|
description,
|
||||||
|
image_uri,
|
||||||
|
source_id
|
||||||
|
FROM old_table;
|
||||||
|
|
||||||
|
Drop table old_table;
|
||||||
@ -0,0 +1,66 @@
|
|||||||
|
ALTER TABLE episode RENAME TO old_table;
|
||||||
|
|
||||||
|
CREATE TABLE episode (
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
uri TEXT,
|
||||||
|
local_uri TEXT,
|
||||||
|
description TEXT,
|
||||||
|
epoch INTEGER NOT NULL DEFAULT 0,
|
||||||
|
length INTEGER,
|
||||||
|
duration INTEGER,
|
||||||
|
guid TEXT,
|
||||||
|
played INTEGER,
|
||||||
|
podcast_id INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (title, podcast_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO episode (
|
||||||
|
title,
|
||||||
|
uri,
|
||||||
|
local_uri,
|
||||||
|
description,
|
||||||
|
epoch,
|
||||||
|
length,
|
||||||
|
duration,
|
||||||
|
guid,
|
||||||
|
played,
|
||||||
|
podcast_id
|
||||||
|
) SELECT title,
|
||||||
|
uri,
|
||||||
|
local_uri,
|
||||||
|
description,
|
||||||
|
epoch, length,
|
||||||
|
duration,
|
||||||
|
guid,
|
||||||
|
played,
|
||||||
|
podcast_id
|
||||||
|
FROM old_table;
|
||||||
|
|
||||||
|
Drop table old_table;
|
||||||
|
|
||||||
|
ALTER TABLE podcast RENAME TO old_table;
|
||||||
|
CREATE TABLE `podcast` (
|
||||||
|
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||||
|
`title` TEXT NOT NULL,
|
||||||
|
`link` TEXT NOT NULL,
|
||||||
|
`description` TEXT NOT NULL,
|
||||||
|
`image_uri` TEXT,
|
||||||
|
`source_id` INTEGER NOT NULL UNIQUE
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO podcast (
|
||||||
|
id,
|
||||||
|
title,
|
||||||
|
link,
|
||||||
|
description,
|
||||||
|
image_uri,
|
||||||
|
source_id
|
||||||
|
) SELECT id,
|
||||||
|
title,
|
||||||
|
link,
|
||||||
|
description,
|
||||||
|
image_uri,
|
||||||
|
source_id
|
||||||
|
FROM old_table;
|
||||||
|
|
||||||
|
Drop table old_table;
|
||||||
@ -0,0 +1,40 @@
|
|||||||
|
ALTER TABLE episodes RENAME TO old_table;
|
||||||
|
ALTER TABLE shows RENAME TO podcast;
|
||||||
|
|
||||||
|
CREATE TABLE episode (
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
uri TEXT,
|
||||||
|
local_uri TEXT,
|
||||||
|
description TEXT,
|
||||||
|
epoch INTEGER NOT NULL DEFAULT 0,
|
||||||
|
length INTEGER,
|
||||||
|
duration INTEGER,
|
||||||
|
guid TEXT,
|
||||||
|
played INTEGER,
|
||||||
|
podcast_id INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (title, podcast_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO episode (
|
||||||
|
title,
|
||||||
|
uri,
|
||||||
|
local_uri,
|
||||||
|
description,
|
||||||
|
epoch,
|
||||||
|
length,
|
||||||
|
duration,
|
||||||
|
guid,
|
||||||
|
played,
|
||||||
|
podcast_id
|
||||||
|
) SELECT title,
|
||||||
|
uri,
|
||||||
|
local_uri,
|
||||||
|
description,
|
||||||
|
epoch, length,
|
||||||
|
duration,
|
||||||
|
guid,
|
||||||
|
played,
|
||||||
|
show_id
|
||||||
|
FROM old_table;
|
||||||
|
|
||||||
|
Drop table old_table;
|
||||||
@ -0,0 +1,40 @@
|
|||||||
|
ALTER TABLE episode RENAME TO old_table;
|
||||||
|
ALTER TABLE podcast RENAME TO shows;
|
||||||
|
|
||||||
|
CREATE TABLE episodes (
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
uri TEXT,
|
||||||
|
local_uri TEXT,
|
||||||
|
description TEXT,
|
||||||
|
epoch INTEGER NOT NULL DEFAULT 0,
|
||||||
|
length INTEGER,
|
||||||
|
duration INTEGER,
|
||||||
|
guid TEXT,
|
||||||
|
played INTEGER,
|
||||||
|
show_id INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (title, show_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO episodes (
|
||||||
|
title,
|
||||||
|
uri,
|
||||||
|
local_uri,
|
||||||
|
description,
|
||||||
|
epoch,
|
||||||
|
length,
|
||||||
|
duration,
|
||||||
|
guid,
|
||||||
|
played,
|
||||||
|
show_id
|
||||||
|
) SELECT title,
|
||||||
|
uri,
|
||||||
|
local_uri,
|
||||||
|
description,
|
||||||
|
epoch, length,
|
||||||
|
duration,
|
||||||
|
guid,
|
||||||
|
played,
|
||||||
|
podcast_id
|
||||||
|
FROM old_table;
|
||||||
|
|
||||||
|
Drop table old_table;
|
||||||
@ -68,8 +68,8 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
|
|||||||
pub fn truncate_db() -> Result<(), DataError> {
|
pub fn truncate_db() -> Result<(), DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
con.execute("DELETE FROM episode")?;
|
con.execute("DELETE FROM episodes")?;
|
||||||
con.execute("DELETE FROM podcast")?;
|
con.execute("DELETE FROM shows")?;
|
||||||
con.execute("DELETE FROM source")?;
|
con.execute("DELETE FROM source")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,106 +23,106 @@ pub fn get_sources() -> Result<Vec<Source>, DataError> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcasts() -> Result<Vec<Podcast>, DataError> {
|
pub fn get_podcasts() -> Result<Vec<Show>, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
podcast
|
shows
|
||||||
.order(title.asc())
|
.order(title.asc())
|
||||||
.load::<Podcast>(&con)
|
.load::<Show>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result<Vec<Podcast>, DataError> {
|
pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result<Vec<Show>, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
podcast
|
shows
|
||||||
.order(title.asc())
|
.order(title.asc())
|
||||||
.filter(id.ne_all(filter_ids))
|
.filter(id.ne_all(filter_ids))
|
||||||
.load::<Podcast>(&con)
|
.load::<Show>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.order(epoch.desc())
|
.order(epoch.desc())
|
||||||
.load::<Episode>(&con)
|
.load::<Episode>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerModel>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.select((rowid, local_uri, played))
|
.select((rowid, local_uri, played))
|
||||||
.filter(local_uri.is_not_null())
|
.filter(local_uri.is_not_null())
|
||||||
.load::<EpisodeCleanerQuery>(&con)
|
.load::<EpisodeCleanerModel>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
||||||
// use schema::episode::dsl::*;
|
// use schema::episodes::dsl::*;
|
||||||
|
|
||||||
// let db = connection();
|
// let db = connection();
|
||||||
// let con = db.get()?;
|
// let con = db.get()?;
|
||||||
// episode
|
// episodes
|
||||||
// .filter(played.is_not_null())
|
// .filter(played.is_not_null())
|
||||||
// .load::<Episode>(&con)
|
// .load::<Episode>(&con)
|
||||||
// .map_err(From::from)
|
// .map_err(From::from)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerModel>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.select((rowid, local_uri, played))
|
.select((rowid, local_uri, played))
|
||||||
.filter(played.is_not_null())
|
.filter(played.is_not_null())
|
||||||
.load::<EpisodeCleanerQuery>(&con)
|
.load::<EpisodeCleanerModel>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.filter(rowid.eq(ep_id))
|
.filter(rowid.eq(ep_id))
|
||||||
.get_result::<Episode>(&con)
|
.get_result::<Episode>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result<EpisodeWidgetQuery, DataError> {
|
pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result<EpisodeWidgetModel, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.select((
|
.select((
|
||||||
rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id,
|
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||||
))
|
))
|
||||||
.filter(rowid.eq(ep_id))
|
.filter(rowid.eq(ep_id))
|
||||||
.get_result::<EpisodeWidgetQuery>(&con)
|
.get_result::<EpisodeWidgetModel>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.filter(rowid.eq(ep_id))
|
.filter(rowid.eq(ep_id))
|
||||||
.select(local_uri)
|
.select(local_uri)
|
||||||
.get_result::<Option<String>>(&con)
|
.get_result::<Option<String>>(&con)
|
||||||
@ -132,48 +132,48 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataE
|
|||||||
pub fn get_episodes_widgets_filter_limit(
|
pub fn get_episodes_widgets_filter_limit(
|
||||||
filter_ids: &[i32],
|
filter_ids: &[i32],
|
||||||
limit: u32,
|
limit: u32,
|
||||||
) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
) -> Result<Vec<EpisodeWidgetModel>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
let columns = (
|
let columns = (
|
||||||
rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id,
|
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.select(columns)
|
.select(columns)
|
||||||
.order(epoch.desc())
|
.order(epoch.desc())
|
||||||
.filter(podcast_id.ne_all(filter_ids))
|
.filter(show_id.ne_all(filter_ids))
|
||||||
.limit(i64::from(limit))
|
.limit(i64::from(limit))
|
||||||
.load::<EpisodeWidgetQuery>(&con)
|
.load::<EpisodeWidgetModel>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast, DataError> {
|
pub fn get_podcast_from_id(pid: i32) -> Result<Show, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
podcast
|
shows
|
||||||
.filter(id.eq(pid))
|
.filter(id.eq(pid))
|
||||||
.get_result::<Podcast>(&con)
|
.get_result::<Show>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery, DataError> {
|
pub fn get_podcast_cover_from_id(pid: i32) -> Result<ShowCoverModel, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
podcast
|
shows
|
||||||
.select((id, title, image_uri))
|
.select((id, title, image_uri))
|
||||||
.filter(id.eq(pid))
|
.filter(id.eq(pid))
|
||||||
.get_result::<PodcastCoverQuery>(&con)
|
.get_result::<ShowCoverModel>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
pub fn get_pd_episodes(parent: &Show) -> Result<Vec<Episode>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
@ -183,7 +183,7 @@ pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_episodes_count(parent: &Podcast) -> Result<i64, DataError> {
|
pub fn get_pd_episodes_count(parent: &Show) -> Result<i64, DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
@ -193,24 +193,24 @@ pub fn get_pd_episodes_count(parent: &Podcast) -> Result<i64, DataError> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
pub fn get_pd_episodeswidgets(parent: &Show) -> Result<Vec<EpisodeWidgetModel>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
let columns = (
|
let columns = (
|
||||||
rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id,
|
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.select(columns)
|
.select(columns)
|
||||||
.filter(podcast_id.eq(parent.id()))
|
.filter(show_id.eq(parent.id()))
|
||||||
.order(epoch.desc())
|
.order(epoch.desc())
|
||||||
.load::<EpisodeWidgetQuery>(&con)
|
.load::<EpisodeWidgetModel>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
pub fn get_pd_unplayed_episodes(parent: &Show) -> Result<Vec<Episode>, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
@ -221,8 +221,8 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataEr
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) ->
|
// pub(crate) fn get_pd_episodes_limit(parent: &Show, limit: u32) ->
|
||||||
// Result<Vec<Episode>, DataError> { use schema::episode::dsl::*;
|
// Result<Vec<Episode>, DataError> { use schema::episodes::dsl::*;
|
||||||
|
|
||||||
// let db = connection();
|
// let db = connection();
|
||||||
// let con = db.get()?;
|
// let con = db.get()?;
|
||||||
@ -256,25 +256,25 @@ pub fn get_source_from_id(id_: i32) -> Result<Source, DataError> {
|
|||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast, DataError> {
|
pub fn get_podcast_from_source_id(sid: i32) -> Result<Show, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
podcast
|
shows
|
||||||
.filter(source_id.eq(sid))
|
.filter(source_id.eq(sid))
|
||||||
.get_result::<Podcast>(&con)
|
.get_result::<Show>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.filter(title.eq(title_))
|
.filter(title.eq(title_))
|
||||||
.filter(podcast_id.eq(pid))
|
.filter(show_id.eq(pid))
|
||||||
.get_result::<Episode>(&con)
|
.get_result::<Episode>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
@ -283,19 +283,19 @@ pub(crate) fn get_episode_minimal_from_pk(
|
|||||||
title_: &str,
|
title_: &str,
|
||||||
pid: i32,
|
pid: i32,
|
||||||
) -> Result<EpisodeMinimal, DataError> {
|
) -> Result<EpisodeMinimal, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
episode
|
episodes
|
||||||
.select((rowid, title, uri, epoch, duration, guid, podcast_id))
|
.select((rowid, title, uri, epoch, duration, guid, show_id))
|
||||||
.filter(title.eq(title_))
|
.filter(title.eq(title_))
|
||||||
.filter(podcast_id.eq(pid))
|
.filter(show_id.eq(pid))
|
||||||
.get_result::<EpisodeMinimal>(&con)
|
.get_result::<EpisodeMinimal>(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn remove_feed(pd: &Podcast) -> Result<(), DataError> {
|
pub(crate) fn remove_feed(pd: &Show) -> Result<(), DataError> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
@ -314,16 +314,16 @@ fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
|
|||||||
diesel::delete(source.filter(id.eq(source_id))).execute(con)
|
diesel::delete(source.filter(id.eq(source_id))).execute(con)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
|
fn delete_podcast(con: &SqliteConnection, show_id: i32) -> QueryResult<usize> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
|
|
||||||
diesel::delete(podcast.filter(id.eq(podcast_id))).execute(con)
|
diesel::delete(shows.filter(id.eq(show_id))).execute(con)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
|
|
||||||
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con)
|
diesel::delete(episodes.filter(show_id.eq(parent_id))).execute(con)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||||
@ -338,70 +338,70 @@ pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
select(exists(podcast.filter(source_id.eq(source_id_))))
|
select(exists(shows.filter(source_id.eq(source_id_))))
|
||||||
.get_result(&con)
|
.get_result(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||||
pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool, DataError> {
|
pub(crate) fn episode_exists(title_: &str, show_id_: i32) -> Result<bool, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
select(exists(episode.filter(podcast_id.eq(podcast_id_)).filter(title.eq(title_))))
|
select(exists(episodes.filter(show_id.eq(show_id_)).filter(title.eq(title_))))
|
||||||
.get_result(&con)
|
.get_result(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the `episode table contains any rows
|
/// Check if the `episodes table contains any rows
|
||||||
///
|
///
|
||||||
/// Return true if `episode` table is populated.
|
/// Return true if `episodes` table is populated.
|
||||||
pub fn is_episodes_populated() -> Result<bool, DataError> {
|
pub fn is_episodes_populated() -> Result<bool, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
select(exists(episode.as_query()))
|
select(exists(episodes.as_query()))
|
||||||
.get_result(&con)
|
.get_result(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the `podcast` table contains any rows
|
/// Check if the `shows` table contains any rows
|
||||||
///
|
///
|
||||||
/// Return true if `podcast table is populated.
|
/// Return true if `shows table is populated.
|
||||||
pub fn is_podcasts_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
pub fn is_podcasts_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
select(exists(podcast.filter(id.ne_all(filter_ids))))
|
select(exists(shows.filter(id.ne_all(filter_ids))))
|
||||||
.get_result(&con)
|
.get_result(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
diesel::insert_into(episode)
|
diesel::insert_into(episodes)
|
||||||
.values(eps)
|
.values(eps)
|
||||||
.execute(&*con)
|
.execute(&*con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize, DataError> {
|
pub fn update_none_to_played_now(parent: &Show) -> Result<usize, DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,7 @@ use rss;
|
|||||||
use dbqueries;
|
use dbqueries;
|
||||||
use errors::DataError;
|
use errors::DataError;
|
||||||
use models::{Index, IndexState, Update};
|
use models::{Index, IndexState, Update};
|
||||||
use models::{NewEpisode, NewEpisodeMinimal, NewPodcast, Podcast};
|
use models::{NewEpisode, NewEpisodeMinimal, NewShow, Show};
|
||||||
|
|
||||||
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
|
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
|
||||||
/// that corresponds to the `Source.uri` field.
|
/// that corresponds to the `Source.uri` field.
|
||||||
@ -31,15 +31,15 @@ impl Feed {
|
|||||||
.and_then(move |pd| self.index_channel_items(pd))
|
.and_then(move |pd| self.index_channel_items(pd))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_podcast(&self) -> NewPodcast {
|
fn parse_podcast(&self) -> NewShow {
|
||||||
NewPodcast::new(&self.channel, self.source_id)
|
NewShow::new(&self.channel, self.source_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_podcast_async(&self) -> impl Future<Item = NewPodcast, Error = DataError> + Send {
|
fn parse_podcast_async(&self) -> impl Future<Item = NewShow, Error = DataError> + Send {
|
||||||
ok(self.parse_podcast())
|
ok(self.parse_podcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_channel_items(self, pd: Podcast) -> impl Future<Item = (), Error = DataError> + Send {
|
fn index_channel_items(self, pd: Show) -> impl Future<Item = (), Error = DataError> + Send {
|
||||||
let stream = stream::iter_ok::<_, DataError>(self.channel.into_items());
|
let stream = stream::iter_ok::<_, DataError>(self.channel.into_items());
|
||||||
|
|
||||||
// Parse the episodes
|
// Parse the episodes
|
||||||
@ -65,12 +65,12 @@ fn determine_ep_state(
|
|||||||
item: &rss::Item,
|
item: &rss::Item,
|
||||||
) -> Result<IndexState<NewEpisode>, DataError> {
|
) -> Result<IndexState<NewEpisode>, DataError> {
|
||||||
// Check if feed exists
|
// Check if feed exists
|
||||||
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
|
let exists = dbqueries::episode_exists(ep.title(), ep.show_id())?;
|
||||||
|
|
||||||
if !exists {
|
if !exists {
|
||||||
Ok(IndexState::Index(ep.into_new_episode(item)))
|
Ok(IndexState::Index(ep.into_new_episode(item)))
|
||||||
} else {
|
} else {
|
||||||
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.podcast_id())?;
|
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.show_id())?;
|
||||||
let rowid = old.rowid();
|
let rowid = old.rowid();
|
||||||
|
|
||||||
if ep != old {
|
if ep != old {
|
||||||
@ -204,7 +204,7 @@ mod tests {
|
|||||||
let file = fs::File::open(path).unwrap();
|
let file = fs::File::open(path).unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(feed.parse_podcast(), pd);
|
assert_eq!(feed.parse_podcast(), pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -81,7 +81,7 @@ pub mod utils;
|
|||||||
|
|
||||||
pub use feed::{Feed, FeedBuilder};
|
pub use feed::{Feed, FeedBuilder};
|
||||||
pub use models::Save;
|
pub use models::Save;
|
||||||
pub use models::{Episode, EpisodeWidgetQuery, Podcast, PodcastCoverQuery, Source};
|
pub use models::{Episode, EpisodeWidgetModel, Show, ShowCoverModel, Source};
|
||||||
|
|
||||||
// Set the user agent, See #53 for more
|
// Set the user agent, See #53 for more
|
||||||
// Keep this in sync with Tor-browser releases
|
// Keep this in sync with Tor-browser releases
|
||||||
|
|||||||
@ -5,14 +5,14 @@ use diesel::SaveChangesDsl;
|
|||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::DataError;
|
use errors::DataError;
|
||||||
use models::{Podcast, Save};
|
use models::{Save, Show};
|
||||||
use schema::episode;
|
use schema::episodes;
|
||||||
|
|
||||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||||
#[table_name = "episode"]
|
#[table_name = "episodes"]
|
||||||
#[changeset_options(treat_none_as_null = "true")]
|
#[changeset_options(treat_none_as_null = "true")]
|
||||||
#[primary_key(title, podcast_id)]
|
#[primary_key(title, show_id)]
|
||||||
#[belongs_to(Podcast, foreign_key = "podcast_id")]
|
#[belongs_to(Show, foreign_key = "show_id")]
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
/// Diesel Model of the episode table.
|
/// Diesel Model of the episode table.
|
||||||
pub struct Episode {
|
pub struct Episode {
|
||||||
@ -26,9 +26,7 @@ pub struct Episode {
|
|||||||
duration: Option<i32>,
|
duration: Option<i32>,
|
||||||
guid: Option<String>,
|
guid: Option<String>,
|
||||||
played: Option<i32>,
|
played: Option<i32>,
|
||||||
favorite: bool,
|
show_id: i32,
|
||||||
archive: bool,
|
|
||||||
podcast_id: i32,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<Episode> for Episode {
|
impl Save<Episode> for Episode {
|
||||||
@ -154,32 +152,9 @@ impl Episode {
|
|||||||
self.played = value;
|
self.played = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents the archiving policy for the episode.
|
/// `Show` table foreign key.
|
||||||
pub fn archive(&self) -> bool {
|
pub fn show_id(&self) -> i32 {
|
||||||
self.archive
|
self.show_id
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the `archive` policy.
|
|
||||||
///
|
|
||||||
/// If true, the download cleanr will ignore the episode
|
|
||||||
/// and the corresponding media value will never be automaticly deleted.
|
|
||||||
pub fn set_archive(&mut self, b: bool) {
|
|
||||||
self.archive = b
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the `favorite` status of the `Episode`.
|
|
||||||
pub fn favorite(&self) -> bool {
|
|
||||||
self.favorite
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set `favorite` status.
|
|
||||||
pub fn set_favorite(&mut self, b: bool) {
|
|
||||||
self.favorite = b
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `Podcast` table foreign key.
|
|
||||||
pub fn podcast_id(&self) -> i32 {
|
|
||||||
self.podcast_id
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||||
@ -191,12 +166,12 @@ impl Episode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||||
#[table_name = "episode"]
|
#[table_name = "episodes"]
|
||||||
#[changeset_options(treat_none_as_null = "true")]
|
#[changeset_options(treat_none_as_null = "true")]
|
||||||
#[primary_key(title, podcast_id)]
|
#[primary_key(title, show_id)]
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
/// Diesel Model to be used for constructing `EpisodeWidgets`.
|
/// Diesel Model to be used for constructing `EpisodeWidgets`.
|
||||||
pub struct EpisodeWidgetQuery {
|
pub struct EpisodeWidgetModel {
|
||||||
rowid: i32,
|
rowid: i32,
|
||||||
title: String,
|
title: String,
|
||||||
uri: Option<String>,
|
uri: Option<String>,
|
||||||
@ -205,14 +180,12 @@ pub struct EpisodeWidgetQuery {
|
|||||||
length: Option<i32>,
|
length: Option<i32>,
|
||||||
duration: Option<i32>,
|
duration: Option<i32>,
|
||||||
played: Option<i32>,
|
played: Option<i32>,
|
||||||
// favorite: bool,
|
show_id: i32,
|
||||||
// archive: bool,
|
|
||||||
podcast_id: i32,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Episode> for EpisodeWidgetQuery {
|
impl From<Episode> for EpisodeWidgetModel {
|
||||||
fn from(e: Episode) -> EpisodeWidgetQuery {
|
fn from(e: Episode) -> EpisodeWidgetModel {
|
||||||
EpisodeWidgetQuery {
|
EpisodeWidgetModel {
|
||||||
rowid: e.rowid,
|
rowid: e.rowid,
|
||||||
title: e.title,
|
title: e.title,
|
||||||
uri: e.uri,
|
uri: e.uri,
|
||||||
@ -221,30 +194,30 @@ impl From<Episode> for EpisodeWidgetQuery {
|
|||||||
length: e.length,
|
length: e.length,
|
||||||
duration: e.duration,
|
duration: e.duration,
|
||||||
played: e.played,
|
played: e.played,
|
||||||
podcast_id: e.podcast_id,
|
show_id: e.show_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<usize> for EpisodeWidgetQuery {
|
impl Save<usize> for EpisodeWidgetModel {
|
||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
/// Helper method to easily save/"sync" current state of self to the
|
/// Helper method to easily save/"sync" current state of self to the
|
||||||
/// Database.
|
/// Database.
|
||||||
fn save(&self) -> Result<usize, Self::Error> {
|
fn save(&self) -> Result<usize, Self::Error> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let tempdb = db.get()?;
|
let tempdb = db.get()?;
|
||||||
|
|
||||||
diesel::update(episode.filter(rowid.eq(self.rowid)))
|
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||||
.set(self)
|
.set(self)
|
||||||
.execute(&*tempdb)
|
.execute(&*tempdb)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EpisodeWidgetQuery {
|
impl EpisodeWidgetModel {
|
||||||
/// Get the value of the sqlite's `ROW_ID`
|
/// Get the value of the sqlite's `ROW_ID`
|
||||||
pub fn rowid(&self) -> i32 {
|
pub fn rowid(&self) -> i32 {
|
||||||
self.rowid
|
self.rowid
|
||||||
@ -319,32 +292,9 @@ impl EpisodeWidgetQuery {
|
|||||||
self.played = value;
|
self.played = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
// /// Represents the archiving policy for the episode.
|
/// `Show` table foreign key.
|
||||||
// pub fn archive(&self) -> bool {
|
pub fn show_id(&self) -> i32 {
|
||||||
// self.archive
|
self.show_id
|
||||||
// }
|
|
||||||
|
|
||||||
// /// Set the `archive` policy.
|
|
||||||
// ///
|
|
||||||
// /// If true, the download cleanr will ignore the episode
|
|
||||||
// /// and the corresponding media value will never be automaticly deleted.
|
|
||||||
// pub fn set_archive(&mut self, b: bool) {
|
|
||||||
// self.archive = b
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /// Get the `favorite` status of the `Episode`.
|
|
||||||
// pub fn favorite(&self) -> bool {
|
|
||||||
// self.favorite
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /// Set `favorite` status.
|
|
||||||
// pub fn set_favorite(&mut self, b: bool) {
|
|
||||||
// self.favorite = b
|
|
||||||
// }
|
|
||||||
|
|
||||||
/// `Podcast` table foreign key.
|
|
||||||
pub fn podcast_id(&self) -> i32 {
|
|
||||||
self.podcast_id
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||||
@ -356,38 +306,38 @@ impl EpisodeWidgetQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||||
#[table_name = "episode"]
|
#[table_name = "episodes"]
|
||||||
#[changeset_options(treat_none_as_null = "true")]
|
#[changeset_options(treat_none_as_null = "true")]
|
||||||
#[primary_key(title, podcast_id)]
|
#[primary_key(title, show_id)]
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
/// Diesel Model to be used internal with the `utils::checkup` function.
|
/// Diesel Model to be used internal with the `utils::checkup` function.
|
||||||
pub struct EpisodeCleanerQuery {
|
pub struct EpisodeCleanerModel {
|
||||||
rowid: i32,
|
rowid: i32,
|
||||||
local_uri: Option<String>,
|
local_uri: Option<String>,
|
||||||
played: Option<i32>,
|
played: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<usize> for EpisodeCleanerQuery {
|
impl Save<usize> for EpisodeCleanerModel {
|
||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
/// Helper method to easily save/"sync" current state of self to the
|
/// Helper method to easily save/"sync" current state of self to the
|
||||||
/// Database.
|
/// Database.
|
||||||
fn save(&self) -> Result<usize, Self::Error> {
|
fn save(&self) -> Result<usize, Self::Error> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
|
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let tempdb = db.get()?;
|
let tempdb = db.get()?;
|
||||||
|
|
||||||
diesel::update(episode.filter(rowid.eq(self.rowid)))
|
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||||
.set(self)
|
.set(self)
|
||||||
.execute(&*tempdb)
|
.execute(&*tempdb)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Episode> for EpisodeCleanerQuery {
|
impl From<Episode> for EpisodeCleanerModel {
|
||||||
fn from(e: Episode) -> EpisodeCleanerQuery {
|
fn from(e: Episode) -> EpisodeCleanerModel {
|
||||||
EpisodeCleanerQuery {
|
EpisodeCleanerModel {
|
||||||
rowid: e.rowid(),
|
rowid: e.rowid(),
|
||||||
local_uri: e.local_uri,
|
local_uri: e.local_uri,
|
||||||
played: e.played,
|
played: e.played,
|
||||||
@ -395,7 +345,7 @@ impl From<Episode> for EpisodeCleanerQuery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EpisodeCleanerQuery {
|
impl EpisodeCleanerModel {
|
||||||
/// Get the value of the sqlite's `ROW_ID`
|
/// Get the value of the sqlite's `ROW_ID`
|
||||||
pub fn rowid(&self) -> i32 {
|
pub fn rowid(&self) -> i32 {
|
||||||
self.rowid
|
self.rowid
|
||||||
@ -428,9 +378,9 @@ impl EpisodeCleanerQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||||
#[table_name = "episode"]
|
#[table_name = "episodes"]
|
||||||
#[changeset_options(treat_none_as_null = "true")]
|
#[changeset_options(treat_none_as_null = "true")]
|
||||||
#[primary_key(title, podcast_id)]
|
#[primary_key(title, show_id)]
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
/// Diesel Model to be used for FIXME.
|
/// Diesel Model to be used for FIXME.
|
||||||
pub struct EpisodeMinimal {
|
pub struct EpisodeMinimal {
|
||||||
@ -440,7 +390,7 @@ pub struct EpisodeMinimal {
|
|||||||
epoch: i32,
|
epoch: i32,
|
||||||
duration: Option<i32>,
|
duration: Option<i32>,
|
||||||
guid: Option<String>,
|
guid: Option<String>,
|
||||||
podcast_id: i32,
|
show_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Episode> for EpisodeMinimal {
|
impl From<Episode> for EpisodeMinimal {
|
||||||
@ -452,7 +402,7 @@ impl From<Episode> for EpisodeMinimal {
|
|||||||
guid: e.guid,
|
guid: e.guid,
|
||||||
epoch: e.epoch,
|
epoch: e.epoch,
|
||||||
duration: e.duration,
|
duration: e.duration,
|
||||||
podcast_id: e.podcast_id,
|
show_id: e.show_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -495,8 +445,8 @@ impl EpisodeMinimal {
|
|||||||
self.duration
|
self.duration
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `Podcast` table foreign key.
|
/// `Show` table foreign key.
|
||||||
pub fn podcast_id(&self) -> i32 {
|
pub fn show_id(&self) -> i32 {
|
||||||
self.podcast_id
|
self.show_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,26 +1,26 @@
|
|||||||
mod new_episode;
|
mod new_episode;
|
||||||
mod new_podcast;
|
mod new_show;
|
||||||
mod new_source;
|
mod new_source;
|
||||||
|
|
||||||
mod episode;
|
mod episode;
|
||||||
mod podcast;
|
mod show;
|
||||||
mod source;
|
mod source;
|
||||||
|
|
||||||
// use futures::prelude::*;
|
// use futures::prelude::*;
|
||||||
// use futures::future::*;
|
// use futures::future::*;
|
||||||
|
|
||||||
pub(crate) use self::episode::EpisodeCleanerQuery;
|
pub(crate) use self::episode::EpisodeCleanerModel;
|
||||||
pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal};
|
pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal};
|
||||||
pub(crate) use self::new_podcast::NewPodcast;
|
pub(crate) use self::new_show::NewShow;
|
||||||
pub(crate) use self::new_source::NewSource;
|
pub(crate) use self::new_source::NewSource;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) use self::new_episode::NewEpisodeBuilder;
|
pub(crate) use self::new_episode::NewEpisodeBuilder;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) use self::new_podcast::NewPodcastBuilder;
|
pub(crate) use self::new_show::NewShowBuilder;
|
||||||
|
|
||||||
pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery};
|
pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetModel};
|
||||||
pub use self::podcast::{Podcast, PodcastCoverQuery};
|
pub use self::show::{Show, ShowCoverModel};
|
||||||
pub use self::source::Source;
|
pub use self::source::Source;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
|||||||
@ -9,11 +9,11 @@ use dbqueries;
|
|||||||
use errors::DataError;
|
use errors::DataError;
|
||||||
use models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
use models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
||||||
use parser;
|
use parser;
|
||||||
use schema::episode;
|
use schema::episodes;
|
||||||
use utils::url_cleaner;
|
use utils::url_cleaner;
|
||||||
|
|
||||||
#[derive(Insertable, AsChangeset)]
|
#[derive(Insertable, AsChangeset)]
|
||||||
#[table_name = "episode"]
|
#[table_name = "episodes"]
|
||||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||||
#[builder(default)]
|
#[builder(default)]
|
||||||
#[builder(derive(Debug))]
|
#[builder(derive(Debug))]
|
||||||
@ -26,7 +26,7 @@ pub(crate) struct NewEpisode {
|
|||||||
duration: Option<i32>,
|
duration: Option<i32>,
|
||||||
guid: Option<String>,
|
guid: Option<String>,
|
||||||
epoch: i32,
|
epoch: i32,
|
||||||
podcast_id: i32,
|
show_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<NewEpisodeMinimal> for NewEpisode {
|
impl From<NewEpisodeMinimal> for NewEpisode {
|
||||||
@ -36,7 +36,7 @@ impl From<NewEpisodeMinimal> for NewEpisode {
|
|||||||
.uri(e.uri)
|
.uri(e.uri)
|
||||||
.duration(e.duration)
|
.duration(e.duration)
|
||||||
.epoch(e.epoch)
|
.epoch(e.epoch)
|
||||||
.podcast_id(e.podcast_id)
|
.show_id(e.show_id)
|
||||||
.guid(e.guid)
|
.guid(e.guid)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@ -47,12 +47,12 @@ impl Insert<()> for NewEpisode {
|
|||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
fn insert(&self) -> Result<(), DataError> {
|
fn insert(&self) -> Result<(), DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
info!("Inserting {:?}", self.title);
|
info!("Inserting {:?}", self.title);
|
||||||
diesel::insert_into(episode)
|
diesel::insert_into(episodes)
|
||||||
.values(self)
|
.values(self)
|
||||||
.execute(&con)
|
.execute(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
@ -64,12 +64,12 @@ impl Update<()> for NewEpisode {
|
|||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
fn update(&self, episode_id: i32) -> Result<(), DataError> {
|
fn update(&self, episode_id: i32) -> Result<(), DataError> {
|
||||||
use schema::episode::dsl::*;
|
use schema::episodes::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
info!("Updating {:?}", self.title);
|
info!("Updating {:?}", self.title);
|
||||||
diesel::update(episode.filter(rowid.eq(episode_id)))
|
diesel::update(episodes.filter(rowid.eq(episode_id)))
|
||||||
.set(self)
|
.set(self)
|
||||||
.execute(&con)
|
.execute(&con)
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
@ -83,10 +83,10 @@ impl Index<()> for NewEpisode {
|
|||||||
// Does not update the episode description if it's the only thing that has
|
// Does not update the episode description if it's the only thing that has
|
||||||
// changed.
|
// changed.
|
||||||
fn index(&self) -> Result<(), DataError> {
|
fn index(&self) -> Result<(), DataError> {
|
||||||
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
|
let exists = dbqueries::episode_exists(self.title(), self.show_id())?;
|
||||||
|
|
||||||
if exists {
|
if exists {
|
||||||
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.podcast_id())?;
|
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.show_id())?;
|
||||||
|
|
||||||
if self != &other {
|
if self != &other {
|
||||||
self.update(other.rowid())
|
self.update(other.rowid())
|
||||||
@ -106,7 +106,7 @@ impl PartialEq<EpisodeMinimal> for NewEpisode {
|
|||||||
&& (self.duration() == other.duration())
|
&& (self.duration() == other.duration())
|
||||||
&& (self.epoch() == other.epoch())
|
&& (self.epoch() == other.epoch())
|
||||||
&& (self.guid() == other.guid())
|
&& (self.guid() == other.guid())
|
||||||
&& (self.podcast_id() == other.podcast_id())
|
&& (self.show_id() == other.show_id())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,7 +117,7 @@ impl PartialEq<Episode> for NewEpisode {
|
|||||||
&& (self.duration() == other.duration())
|
&& (self.duration() == other.duration())
|
||||||
&& (self.epoch() == other.epoch())
|
&& (self.epoch() == other.epoch())
|
||||||
&& (self.guid() == other.guid())
|
&& (self.guid() == other.guid())
|
||||||
&& (self.podcast_id() == other.podcast_id())
|
&& (self.show_id() == other.show_id())
|
||||||
&& (self.description() == other.description())
|
&& (self.description() == other.description())
|
||||||
&& (self.length() == other.length())
|
&& (self.length() == other.length())
|
||||||
}
|
}
|
||||||
@ -126,14 +126,14 @@ impl PartialEq<Episode> for NewEpisode {
|
|||||||
impl NewEpisode {
|
impl NewEpisode {
|
||||||
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self, DataError> {
|
pub(crate) fn new(item: &rss::Item, show_id: i32) -> Result<Self, DataError> {
|
||||||
NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item))
|
NewEpisodeMinimal::new(item, show_id).map(|ep| ep.into_new_episode(item))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn to_episode(&self) -> Result<Episode, DataError> {
|
pub(crate) fn to_episode(&self) -> Result<Episode, DataError> {
|
||||||
self.index()?;
|
self.index()?;
|
||||||
dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from)
|
dbqueries::get_episode_from_pk(&self.title, self.show_id).map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -167,13 +167,13 @@ impl NewEpisode {
|
|||||||
self.length
|
self.length
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn podcast_id(&self) -> i32 {
|
pub(crate) fn show_id(&self) -> i32 {
|
||||||
self.podcast_id
|
self.show_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Insertable, AsChangeset)]
|
#[derive(Insertable, AsChangeset)]
|
||||||
#[table_name = "episode"]
|
#[table_name = "episodes"]
|
||||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||||
#[builder(derive(Debug))]
|
#[builder(derive(Debug))]
|
||||||
#[builder(setter(into))]
|
#[builder(setter(into))]
|
||||||
@ -183,7 +183,7 @@ pub(crate) struct NewEpisodeMinimal {
|
|||||||
duration: Option<i32>,
|
duration: Option<i32>,
|
||||||
epoch: i32,
|
epoch: i32,
|
||||||
guid: Option<String>,
|
guid: Option<String>,
|
||||||
podcast_id: i32,
|
show_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
||||||
@ -193,7 +193,7 @@ impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
|||||||
&& (self.duration() == other.duration())
|
&& (self.duration() == other.duration())
|
||||||
&& (self.epoch() == other.epoch())
|
&& (self.epoch() == other.epoch())
|
||||||
&& (self.guid() == other.guid())
|
&& (self.guid() == other.guid())
|
||||||
&& (self.podcast_id() == other.podcast_id())
|
&& (self.show_id() == other.show_id())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -241,7 +241,7 @@ impl NewEpisodeMinimal {
|
|||||||
.duration(duration)
|
.duration(duration)
|
||||||
.epoch(epoch)
|
.epoch(epoch)
|
||||||
.guid(guid)
|
.guid(guid)
|
||||||
.podcast_id(parent_id)
|
.show_id(parent_id)
|
||||||
.build()
|
.build()
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
@ -263,7 +263,7 @@ impl NewEpisodeMinimal {
|
|||||||
.uri(self.uri)
|
.uri(self.uri)
|
||||||
.duration(self.duration)
|
.duration(self.duration)
|
||||||
.epoch(self.epoch)
|
.epoch(self.epoch)
|
||||||
.podcast_id(self.podcast_id)
|
.show_id(self.show_id)
|
||||||
.guid(self.guid)
|
.guid(self.guid)
|
||||||
.length(length)
|
.length(length)
|
||||||
.description(description)
|
.description(description)
|
||||||
@ -294,8 +294,8 @@ impl NewEpisodeMinimal {
|
|||||||
self.epoch
|
self.epoch
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn podcast_id(&self) -> i32 {
|
pub(crate) fn show_id(&self) -> i32 {
|
||||||
self.podcast_id
|
self.show_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -324,7 +324,7 @@ mod tests {
|
|||||||
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
||||||
.epoch(1505296800)
|
.epoch(1505296800)
|
||||||
.duration(Some(4171))
|
.duration(Some(4171))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -337,7 +337,7 @@ mod tests {
|
|||||||
.guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d")))
|
.guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d")))
|
||||||
.epoch(1502272800)
|
.epoch(1502272800)
|
||||||
.duration(Some(4415))
|
.duration(Some(4415))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -358,7 +358,7 @@ mod tests {
|
|||||||
.length(Some(66738886))
|
.length(Some(66738886))
|
||||||
.epoch(1505296800)
|
.epoch(1505296800)
|
||||||
.duration(Some(4171))
|
.duration(Some(4171))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -382,7 +382,7 @@ mod tests {
|
|||||||
.length(Some(67527575))
|
.length(Some(67527575))
|
||||||
.epoch(1502272800)
|
.epoch(1502272800)
|
||||||
.duration(Some(4415))
|
.duration(Some(4415))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -397,7 +397,7 @@ mod tests {
|
|||||||
.length(Some(66738886))
|
.length(Some(66738886))
|
||||||
.epoch(1505296800)
|
.epoch(1505296800)
|
||||||
.duration(Some(424242))
|
.duration(Some(424242))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -410,7 +410,7 @@ mod tests {
|
|||||||
.guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D")))
|
.guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D")))
|
||||||
.epoch(1505280282)
|
.epoch(1505280282)
|
||||||
.duration(Some(5733))
|
.duration(Some(5733))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -423,7 +423,7 @@ mod tests {
|
|||||||
.guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B")))
|
.guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B")))
|
||||||
.epoch(1504670247)
|
.epoch(1504670247)
|
||||||
.duration(Some(4491))
|
.duration(Some(4491))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -443,7 +443,7 @@ mod tests {
|
|||||||
.length(Some(46479789))
|
.length(Some(46479789))
|
||||||
.epoch(1505280282)
|
.epoch(1505280282)
|
||||||
.duration(Some(5733))
|
.duration(Some(5733))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -465,7 +465,7 @@ mod tests {
|
|||||||
.length(Some(36544272))
|
.length(Some(36544272))
|
||||||
.epoch(1504670247)
|
.epoch(1504670247)
|
||||||
.duration(Some(4491))
|
.duration(Some(4491))
|
||||||
.podcast_id(42)
|
.show_id(42)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
@ -558,7 +558,7 @@ mod tests {
|
|||||||
let episode = channel.items().iter().nth(14).unwrap();
|
let episode = channel.items().iter().nth(14).unwrap();
|
||||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
||||||
new_ep.insert().unwrap();
|
new_ep.insert().unwrap();
|
||||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap();
|
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
|
||||||
|
|
||||||
assert_eq!(new_ep, ep);
|
assert_eq!(new_ep, ep);
|
||||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
||||||
@ -567,7 +567,7 @@ mod tests {
|
|||||||
let episode = channel.items().iter().nth(15).unwrap();
|
let episode = channel.items().iter().nth(15).unwrap();
|
||||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
||||||
new_ep.insert().unwrap();
|
new_ep.insert().unwrap();
|
||||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap();
|
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
|
||||||
|
|
||||||
assert_eq!(new_ep, ep);
|
assert_eq!(new_ep, ep);
|
||||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
||||||
@ -581,21 +581,15 @@ mod tests {
|
|||||||
|
|
||||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||||
updated.update(old.rowid()).unwrap();
|
updated.update(old.rowid()).unwrap();
|
||||||
let mut new = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap();
|
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id()).unwrap();
|
||||||
|
|
||||||
// Assert that updating does not change the rowid and podcast_id
|
// Assert that updating does not change the rowid and show_id
|
||||||
assert_ne!(old, new);
|
assert_ne!(old, new);
|
||||||
assert_eq!(old.rowid(), new.rowid());
|
assert_eq!(old.rowid(), new.rowid());
|
||||||
assert_eq!(old.podcast_id(), new.podcast_id());
|
assert_eq!(old.show_id(), new.show_id());
|
||||||
|
|
||||||
assert_eq!(updated, &new);
|
assert_eq!(updated, &new);
|
||||||
assert_ne!(updated, &old);
|
assert_ne!(updated, &old);
|
||||||
|
|
||||||
new.set_archive(true);
|
|
||||||
new.save().unwrap();
|
|
||||||
|
|
||||||
let new2 = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap();
|
|
||||||
assert_eq!(true, new2.archive());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -608,7 +602,7 @@ mod tests {
|
|||||||
// Second identical, This should take the early return path
|
// Second identical, This should take the early return path
|
||||||
assert!(expected.index().is_ok());
|
assert!(expected.index().is_ok());
|
||||||
// Get the episode
|
// Get the episode
|
||||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||||
// Assert that NewPodcast is equal to the Indexed one
|
// Assert that NewPodcast is equal to the Indexed one
|
||||||
assert_eq!(*expected, old);
|
assert_eq!(*expected, old);
|
||||||
|
|
||||||
@ -617,23 +611,22 @@ mod tests {
|
|||||||
// Update the podcast
|
// Update the podcast
|
||||||
assert!(updated.index().is_ok());
|
assert!(updated.index().is_ok());
|
||||||
// Get the new Podcast
|
// Get the new Podcast
|
||||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||||
// Assert it's diff from the old one.
|
// Assert it's diff from the old one.
|
||||||
assert_ne!(new, old);
|
assert_ne!(new, old);
|
||||||
assert_eq!(*updated, new);
|
assert_eq!(*updated, new);
|
||||||
assert_eq!(new.rowid(), old.rowid());
|
assert_eq!(new.rowid(), old.rowid());
|
||||||
assert_eq!(new.podcast_id(), old.podcast_id());
|
assert_eq!(new.show_id(), old.show_id());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_new_episode_to_episode() {
|
fn test_new_episode_to_episode() {
|
||||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
|
||||||
|
|
||||||
// Assert insert() produces the same result that you would get with to_podcast()
|
// Assert insert() produces the same result that you would get with to_podcast()
|
||||||
truncate_db().unwrap();
|
truncate_db().unwrap();
|
||||||
expected.insert().unwrap();
|
expected.insert().unwrap();
|
||||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||||
let ep = expected.to_episode().unwrap();
|
let ep = expected.to_episode().unwrap();
|
||||||
assert_eq!(old, ep);
|
assert_eq!(old, ep);
|
||||||
|
|
||||||
@ -642,17 +635,7 @@ mod tests {
|
|||||||
let ep = expected.to_episode().unwrap();
|
let ep = expected.to_episode().unwrap();
|
||||||
// This should error as a unique constrain violation
|
// This should error as a unique constrain violation
|
||||||
assert!(expected.insert().is_err());
|
assert!(expected.insert().is_err());
|
||||||
let mut old =
|
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||||
dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
|
||||||
assert_eq!(old, ep);
|
assert_eq!(old, ep);
|
||||||
|
|
||||||
old.set_archive(true);
|
|
||||||
old.save().unwrap();
|
|
||||||
|
|
||||||
// Assert that it does not mess with user preferences
|
|
||||||
let ep = updated.to_episode().unwrap();
|
|
||||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
|
||||||
assert_eq!(old, ep);
|
|
||||||
assert_eq!(old.archive(), true);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,21 +4,21 @@ use diesel::prelude::*;
|
|||||||
use rss;
|
use rss;
|
||||||
|
|
||||||
use errors::DataError;
|
use errors::DataError;
|
||||||
use models::Podcast;
|
use models::Show;
|
||||||
use models::{Index, Insert, Update};
|
use models::{Index, Insert, Update};
|
||||||
use schema::podcast;
|
use schema::shows;
|
||||||
|
|
||||||
use database::connection;
|
use database::connection;
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
use utils::url_cleaner;
|
use utils::url_cleaner;
|
||||||
|
|
||||||
#[derive(Insertable, AsChangeset)]
|
#[derive(Insertable, AsChangeset)]
|
||||||
#[table_name = "podcast"]
|
#[table_name = "shows"]
|
||||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||||
#[builder(default)]
|
#[builder(default)]
|
||||||
#[builder(derive(Debug))]
|
#[builder(derive(Debug))]
|
||||||
#[builder(setter(into))]
|
#[builder(setter(into))]
|
||||||
pub(crate) struct NewPodcast {
|
pub(crate) struct NewShow {
|
||||||
title: String,
|
title: String,
|
||||||
link: String,
|
link: String,
|
||||||
description: String,
|
description: String,
|
||||||
@ -26,15 +26,15 @@ pub(crate) struct NewPodcast {
|
|||||||
source_id: i32,
|
source_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Insert<()> for NewPodcast {
|
impl Insert<()> for NewShow {
|
||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
fn insert(&self) -> Result<(), Self::Error> {
|
fn insert(&self) -> Result<(), Self::Error> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
diesel::insert_into(podcast)
|
diesel::insert_into(shows)
|
||||||
.values(self)
|
.values(self)
|
||||||
.execute(&con)
|
.execute(&con)
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
@ -42,16 +42,16 @@ impl Insert<()> for NewPodcast {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Update<()> for NewPodcast {
|
impl Update<()> for NewShow {
|
||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
fn update(&self, podcast_id: i32) -> Result<(), Self::Error> {
|
fn update(&self, show_id: i32) -> Result<(), Self::Error> {
|
||||||
use schema::podcast::dsl::*;
|
use schema::shows::dsl::*;
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let con = db.get()?;
|
let con = db.get()?;
|
||||||
|
|
||||||
info!("Updating {}", self.title);
|
info!("Updating {}", self.title);
|
||||||
diesel::update(podcast.filter(id.eq(podcast_id)))
|
diesel::update(shows.filter(id.eq(show_id)))
|
||||||
.set(self)
|
.set(self)
|
||||||
.execute(&con)
|
.execute(&con)
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
@ -61,7 +61,7 @@ impl Update<()> for NewPodcast {
|
|||||||
|
|
||||||
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
||||||
// It would make unti testing better too.
|
// It would make unti testing better too.
|
||||||
impl Index<()> for NewPodcast {
|
impl Index<()> for NewShow {
|
||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
fn index(&self) -> Result<(), DataError> {
|
fn index(&self) -> Result<(), DataError> {
|
||||||
@ -81,8 +81,8 @@ impl Index<()> for NewPodcast {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq<Podcast> for NewPodcast {
|
impl PartialEq<Show> for NewShow {
|
||||||
fn eq(&self, other: &Podcast) -> bool {
|
fn eq(&self, other: &Show) -> bool {
|
||||||
(self.link() == other.link())
|
(self.link() == other.link())
|
||||||
&& (self.title() == other.title())
|
&& (self.title() == other.title())
|
||||||
&& (self.image_uri() == other.image_uri())
|
&& (self.image_uri() == other.image_uri())
|
||||||
@ -91,9 +91,9 @@ impl PartialEq<Podcast> for NewPodcast {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NewPodcast {
|
impl NewShow {
|
||||||
/// Parses a `rss::Channel` into a `NewPodcast` Struct.
|
/// Parses a `rss::Channel` into a `NewShow` Struct.
|
||||||
pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewPodcast {
|
pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewShow {
|
||||||
let title = chan.title().trim();
|
let title = chan.title().trim();
|
||||||
let link = url_cleaner(chan.link().trim());
|
let link = url_cleaner(chan.link().trim());
|
||||||
|
|
||||||
@ -111,7 +111,7 @@ impl NewPodcast {
|
|||||||
// If itunes is None, try to get the channel.image from the rss spec
|
// If itunes is None, try to get the channel.image from the rss spec
|
||||||
let image_uri = itunes_img.or_else(|| chan.image().map(|s| s.url().trim().to_owned()));
|
let image_uri = itunes_img.or_else(|| chan.image().map(|s| s.url().trim().to_owned()));
|
||||||
|
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title(title)
|
.title(title)
|
||||||
.description(description)
|
.description(description)
|
||||||
.link(link)
|
.link(link)
|
||||||
@ -122,14 +122,14 @@ impl NewPodcast {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Look out for when tryinto lands into stable.
|
// Look out for when tryinto lands into stable.
|
||||||
pub(crate) fn to_podcast(&self) -> Result<Podcast, DataError> {
|
pub(crate) fn to_podcast(&self) -> Result<Show, DataError> {
|
||||||
self.index()?;
|
self.index()?;
|
||||||
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ignore the following geters. They are used in unit tests mainly.
|
// Ignore the following geters. They are used in unit tests mainly.
|
||||||
impl NewPodcast {
|
impl NewShow {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn source_id(&self) -> i32 {
|
pub(crate) fn source_id(&self) -> i32 {
|
||||||
self.source_id
|
self.source_id
|
||||||
@ -160,14 +160,14 @@ mod tests {
|
|||||||
use rss::Channel;
|
use rss::Channel;
|
||||||
|
|
||||||
use database::truncate_db;
|
use database::truncate_db;
|
||||||
use models::{NewPodcastBuilder, Save};
|
use models::NewShowBuilder;
|
||||||
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
|
|
||||||
// Pre-built expected NewPodcast structs.
|
// Pre-built expected NewShow structs.
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref EXPECTED_INTERCEPTED: NewPodcast = {
|
static ref EXPECTED_INTERCEPTED: NewShow = {
|
||||||
let descr = "The people behind The Intercept’s fearless reporting and incisive \
|
let descr = "The people behind The Intercept’s fearless reporting and incisive \
|
||||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and \
|
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and \
|
||||||
others—discuss the crucial issues of our time: national security, civil \
|
others—discuss the crucial issues of our time: national security, civil \
|
||||||
@ -175,7 +175,7 @@ mod tests {
|
|||||||
artists, thinkers, and newsmakers who challenge our preconceptions about \
|
artists, thinkers, and newsmakers who challenge our preconceptions about \
|
||||||
the world we live in.";
|
the world we live in.";
|
||||||
|
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("Intercepted with Jeremy Scahill")
|
.title("Intercepted with Jeremy Scahill")
|
||||||
.link("https://theintercept.com/podcasts")
|
.link("https://theintercept.com/podcasts")
|
||||||
.description(descr)
|
.description(descr)
|
||||||
@ -188,12 +188,12 @@ mod tests {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
static ref EXPECTED_LUP: NewPodcast = {
|
static ref EXPECTED_LUP: NewShow = {
|
||||||
let descr = "An open show powered by community LINUX Unplugged takes the best \
|
let descr = "An open show powered by community LINUX Unplugged takes the best \
|
||||||
attributes of open collaboration and focuses them into a weekly \
|
attributes of open collaboration and focuses them into a weekly \
|
||||||
lifestyle show about Linux.";
|
lifestyle show about Linux.";
|
||||||
|
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("LINUX Unplugged Podcast")
|
.title("LINUX Unplugged Podcast")
|
||||||
.link("http://www.jupiterbroadcasting.com/")
|
.link("http://www.jupiterbroadcasting.com/")
|
||||||
.description(descr)
|
.description(descr)
|
||||||
@ -204,7 +204,7 @@ mod tests {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
static ref EXPECTED_TIPOFF: NewPodcast = {
|
static ref EXPECTED_TIPOFF: NewShow = {
|
||||||
let desc = "<p>Welcome to The Tip Off- the podcast where we take you behind the \
|
let desc = "<p>Welcome to The Tip Off- the podcast where we take you behind the \
|
||||||
scenes of some of the best investigative journalism from recent years. \
|
scenes of some of the best investigative journalism from recent years. \
|
||||||
Each episode we’ll be digging into an investigative scoop- hearing from \
|
Each episode we’ll be digging into an investigative scoop- hearing from \
|
||||||
@ -215,7 +215,7 @@ mod tests {
|
|||||||
complicated detective work that goes into doing great investigative \
|
complicated detective work that goes into doing great investigative \
|
||||||
journalism- then this is the podcast for you.</p>";
|
journalism- then this is the podcast for you.</p>";
|
||||||
|
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("The Tip Off")
|
.title("The Tip Off")
|
||||||
.link("http://www.acast.com/thetipoff")
|
.link("http://www.acast.com/thetipoff")
|
||||||
.description(desc)
|
.description(desc)
|
||||||
@ -227,7 +227,7 @@ mod tests {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
static ref EXPECTED_STARS: NewPodcast = {
|
static ref EXPECTED_STARS: NewShow = {
|
||||||
let descr = "<p>The first audio drama from Tor Labs and Gideon Media, Steal the Stars \
|
let descr = "<p>The first audio drama from Tor Labs and Gideon Media, Steal the Stars \
|
||||||
is a gripping noir science fiction thriller in 14 episodes: Forbidden \
|
is a gripping noir science fiction thriller in 14 episodes: Forbidden \
|
||||||
love, a crashed UFO, an alien body, and an impossible heist unlike any \
|
love, a crashed UFO, an alien body, and an impossible heist unlike any \
|
||||||
@ -237,7 +237,7 @@ mod tests {
|
|||||||
b183-7311d2e436c3/b3a4aa57a576bb662191f2a6bc2a436c8c4ae256ecffaff5c4c54fd42e\
|
b183-7311d2e436c3/b3a4aa57a576bb662191f2a6bc2a436c8c4ae256ecffaff5c4c54fd42e\
|
||||||
923914941c264d01efb1833234b52c9530e67d28a8cebbe3d11a4bc0fbbdf13ecdf1c3.jpeg";
|
923914941c264d01efb1833234b52c9530e67d28a8cebbe3d11a4bc0fbbdf13ecdf1c3.jpeg";
|
||||||
|
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("Steal the Stars")
|
.title("Steal the Stars")
|
||||||
.link("http://tor-labs.com/")
|
.link("http://tor-labs.com/")
|
||||||
.description(descr)
|
.description(descr)
|
||||||
@ -246,12 +246,12 @@ mod tests {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
static ref EXPECTED_CODE: NewPodcast = {
|
static ref EXPECTED_CODE: NewShow = {
|
||||||
let descr = "A podcast about humans and technology. Panelists: Coraline Ada Ehmke, \
|
let descr = "A podcast about humans and technology. Panelists: Coraline Ada Ehmke, \
|
||||||
David Brady, Jessica Kerr, Jay Bobo, Astrid Countee and Sam \
|
David Brady, Jessica Kerr, Jay Bobo, Astrid Countee and Sam \
|
||||||
Livingston-Gray. Brought to you by @therubyrep.";
|
Livingston-Gray. Brought to you by @therubyrep.";
|
||||||
|
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("Greater Than Code")
|
.title("Greater Than Code")
|
||||||
.link("https://www.greaterthancode.com/")
|
.link("https://www.greaterthancode.com/")
|
||||||
.description(descr)
|
.description(descr)
|
||||||
@ -262,8 +262,8 @@ mod tests {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
static ref EXPECTED_ELLINOFRENEIA: NewPodcast = {
|
static ref EXPECTED_ELLINOFRENEIA: NewShow = {
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("Ελληνοφρένεια")
|
.title("Ελληνοφρένεια")
|
||||||
.link("https://ellinofreneia.sealabs.net/feed.rss")
|
.link("https://ellinofreneia.sealabs.net/feed.rss")
|
||||||
.description("Ανεπίσημο feed της Ελληνοφρένειας")
|
.description("Ανεπίσημο feed της Ελληνοφρένειας")
|
||||||
@ -272,8 +272,8 @@ mod tests {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
};
|
};
|
||||||
static ref UPDATED_DESC_INTERCEPTED: NewPodcast = {
|
static ref UPDATED_DESC_INTERCEPTED: NewShow = {
|
||||||
NewPodcastBuilder::default()
|
NewShowBuilder::default()
|
||||||
.title("Intercepted with Jeremy Scahill")
|
.title("Intercepted with Jeremy Scahill")
|
||||||
.link("https://theintercept.com/podcasts")
|
.link("https://theintercept.com/podcasts")
|
||||||
.description("New Description")
|
.description("New Description")
|
||||||
@ -293,7 +293,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,7 +302,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_LUP, pd);
|
assert_eq!(*EXPECTED_LUP, pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -311,7 +311,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_TIPOFF, pd);
|
assert_eq!(*EXPECTED_TIPOFF, pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -320,7 +320,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_STARS, pd);
|
assert_eq!(*EXPECTED_STARS, pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -329,7 +329,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_CODE, pd);
|
assert_eq!(*EXPECTED_CODE, pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,7 +338,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml").unwrap();
|
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let pd = NewPodcast::new(&channel, 42);
|
let pd = NewShow::new(&channel, 42);
|
||||||
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
|
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -349,7 +349,7 @@ mod tests {
|
|||||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||||
|
|
||||||
let npd = NewPodcast::new(&channel, 42);
|
let npd = NewShow::new(&channel, 42);
|
||||||
npd.insert().unwrap();
|
npd.insert().unwrap();
|
||||||
let pd = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let pd = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||||
|
|
||||||
@ -369,20 +369,13 @@ mod tests {
|
|||||||
|
|
||||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||||
updated.update(old.id()).unwrap();
|
updated.update(old.id()).unwrap();
|
||||||
let mut new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||||
|
|
||||||
assert_ne!(old, new);
|
assert_ne!(old, new);
|
||||||
assert_eq!(old.id(), new.id());
|
assert_eq!(old.id(), new.id());
|
||||||
assert_eq!(old.source_id(), new.source_id());
|
assert_eq!(old.source_id(), new.source_id());
|
||||||
assert_eq!(updated, &new);
|
assert_eq!(updated, &new);
|
||||||
assert_ne!(updated, &old);
|
assert_ne!(updated, &old);
|
||||||
|
|
||||||
// Chech that the update does not override user preferences.
|
|
||||||
new.set_archive(true);
|
|
||||||
new.save().unwrap();
|
|
||||||
|
|
||||||
let new2 = dbqueries::get_podcast_from_source_id(42).unwrap();
|
|
||||||
assert_eq!(true, new2.archive());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -395,14 +388,14 @@ mod tests {
|
|||||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||||
// Get the podcast
|
// Get the podcast
|
||||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||||
// Assert that NewPodcast is equal to the Indexed one
|
// Assert that NewShow is equal to the Indexed one
|
||||||
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
||||||
|
|
||||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||||
|
|
||||||
// Update the podcast
|
// Update the podcast
|
||||||
assert!(updated.index().is_ok());
|
assert!(updated.index().is_ok());
|
||||||
// Get the new Podcast
|
// Get the new Show
|
||||||
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||||
// Assert it's diff from the old one.
|
// Assert it's diff from the old one.
|
||||||
assert_ne!(new, old);
|
assert_ne!(new, old);
|
||||||
@ -424,16 +417,7 @@ mod tests {
|
|||||||
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
||||||
// This should error as a unique constrain violation
|
// This should error as a unique constrain violation
|
||||||
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
||||||
let mut old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
|
||||||
assert_eq!(old, pd);
|
|
||||||
|
|
||||||
old.set_archive(true);
|
|
||||||
old.save().unwrap();
|
|
||||||
|
|
||||||
// Assert that it does not mess with user preferences
|
|
||||||
let pd = UPDATED_DESC_INTERCEPTED.to_podcast().unwrap();
|
|
||||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||||
assert_eq!(old, pd);
|
assert_eq!(old, pd);
|
||||||
assert_eq!(old.archive(), true);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3,42 +3,37 @@ use diesel::SaveChangesDsl;
|
|||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::DataError;
|
use errors::DataError;
|
||||||
use models::{Save, Source};
|
use models::{Save, Source};
|
||||||
use schema::podcast;
|
use schema::shows;
|
||||||
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||||
#[belongs_to(Source, foreign_key = "source_id")]
|
#[belongs_to(Source, foreign_key = "source_id")]
|
||||||
#[changeset_options(treat_none_as_null = "true")]
|
#[changeset_options(treat_none_as_null = "true")]
|
||||||
#[table_name = "podcast"]
|
#[table_name = "shows"]
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
/// Diesel Model of the podcast table.
|
/// Diesel Model of the shows table.
|
||||||
pub struct Podcast {
|
pub struct Show {
|
||||||
id: i32,
|
id: i32,
|
||||||
title: String,
|
title: String,
|
||||||
link: String,
|
link: String,
|
||||||
description: String,
|
description: String,
|
||||||
image_uri: Option<String>,
|
image_uri: Option<String>,
|
||||||
favorite: bool,
|
|
||||||
archive: bool,
|
|
||||||
always_dl: bool,
|
|
||||||
source_id: i32,
|
source_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Save<Podcast> for Podcast {
|
impl Save<Show> for Show {
|
||||||
type Error = DataError;
|
type Error = DataError;
|
||||||
|
|
||||||
/// Helper method to easily save/"sync" current state of self to the
|
/// Helper method to easily save/"sync" current state of self to the
|
||||||
/// Database.
|
/// Database.
|
||||||
fn save(&self) -> Result<Podcast, Self::Error> {
|
fn save(&self) -> Result<Show, Self::Error> {
|
||||||
let db = connection();
|
let db = connection();
|
||||||
let tempdb = db.get()?;
|
let tempdb = db.get()?;
|
||||||
|
|
||||||
self.save_changes::<Podcast>(&*tempdb).map_err(From::from)
|
self.save_changes::<Show>(&*tempdb).map_err(From::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Podcast {
|
impl Show {
|
||||||
/// Get the Feed `id`.
|
/// Get the Feed `id`.
|
||||||
pub fn id(&self) -> i32 {
|
pub fn id(&self) -> i32 {
|
||||||
self.id
|
self.id
|
||||||
@ -56,7 +51,7 @@ impl Podcast {
|
|||||||
&self.link
|
&self.link
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the Podcast/Feed `link`.
|
/// Set the Show/Feed `link`.
|
||||||
pub fn set_link(&mut self, value: &str) {
|
pub fn set_link(&mut self, value: &str) {
|
||||||
self.link = value.to_string();
|
self.link = value.to_string();
|
||||||
}
|
}
|
||||||
@ -83,41 +78,6 @@ impl Podcast {
|
|||||||
self.image_uri = value.map(|x| x.to_string());
|
self.image_uri = value.map(|x| x.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents the archiving policy for the episode.
|
|
||||||
pub fn archive(&self) -> bool {
|
|
||||||
self.archive
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the `archive` policy.
|
|
||||||
pub fn set_archive(&mut self, b: bool) {
|
|
||||||
self.archive = b
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the `favorite` status of the `Podcast` Feed.
|
|
||||||
pub fn favorite(&self) -> bool {
|
|
||||||
self.favorite
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set `favorite` status.
|
|
||||||
pub fn set_favorite(&mut self, b: bool) {
|
|
||||||
self.favorite = b
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents the download policy for the `Podcast` Feed.
|
|
||||||
///
|
|
||||||
/// Reserved for the use with a Download manager, yet to be implemented.
|
|
||||||
///
|
|
||||||
/// If true Podcast Episode should be downloaded automaticly/skipping
|
|
||||||
/// the selection queue.
|
|
||||||
pub fn always_download(&self) -> bool {
|
|
||||||
self.always_dl
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the download policy.
|
|
||||||
pub fn set_always_download(&mut self, b: bool) {
|
|
||||||
self.always_dl = b
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `Source` table foreign key.
|
/// `Source` table foreign key.
|
||||||
pub fn source_id(&self) -> i32 {
|
pub fn source_id(&self) -> i32 {
|
||||||
self.source_id
|
self.source_id
|
||||||
@ -125,17 +85,17 @@ impl Podcast {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, Debug, Clone)]
|
#[derive(Queryable, Debug, Clone)]
|
||||||
/// Diesel Model of the podcast cover query.
|
/// Diesel Model of the Show cover query.
|
||||||
/// Used for fetching information about a Podcast's cover.
|
/// Used for fetching information about a Show's cover.
|
||||||
pub struct PodcastCoverQuery {
|
pub struct ShowCoverModel {
|
||||||
id: i32,
|
id: i32,
|
||||||
title: String,
|
title: String,
|
||||||
image_uri: Option<String>,
|
image_uri: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Podcast> for PodcastCoverQuery {
|
impl From<Show> for ShowCoverModel {
|
||||||
fn from(p: Podcast) -> PodcastCoverQuery {
|
fn from(p: Show) -> ShowCoverModel {
|
||||||
PodcastCoverQuery {
|
ShowCoverModel {
|
||||||
id: p.id(),
|
id: p.id(),
|
||||||
title: p.title,
|
title: p.title,
|
||||||
image_uri: p.image_uri,
|
image_uri: p.image_uri,
|
||||||
@ -143,17 +103,7 @@ impl From<Podcast> for PodcastCoverQuery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Arc<Podcast>> for PodcastCoverQuery {
|
impl ShowCoverModel {
|
||||||
fn from(p: Arc<Podcast>) -> PodcastCoverQuery {
|
|
||||||
PodcastCoverQuery {
|
|
||||||
id: p.id(),
|
|
||||||
title: p.title.clone(),
|
|
||||||
image_uri: p.image_uri.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PodcastCoverQuery {
|
|
||||||
/// Get the Feed `id`.
|
/// Get the Feed `id`.
|
||||||
pub fn id(&self) -> i32 {
|
pub fn id(&self) -> i32 {
|
||||||
self.id
|
self.id
|
||||||
29
hammond-data/src/schema.patch
Normal file
29
hammond-data/src/schema.patch
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
diff --git a/hammond-data/src/schema.rs b/hammond-data/src/schema.rs
|
||||||
|
index 03cbed0..88f1622 100644
|
||||||
|
--- a/hammond-data/src/schema.rs
|
||||||
|
+++ b/hammond-data/src/schema.rs
|
||||||
|
@@ -1,8 +1,11 @@
|
||||||
|
+#![allow(warnings)]
|
||||||
|
+
|
||||||
|
table! {
|
||||||
|
episodes (title, show_id) {
|
||||||
|
+ rowid -> Integer,
|
||||||
|
title -> Text,
|
||||||
|
uri -> Nullable<Text>,
|
||||||
|
local_uri -> Nullable<Text>,
|
||||||
|
description -> Nullable<Text>,
|
||||||
|
epoch -> Integer,
|
||||||
|
length -> Nullable<Integer>,
|
||||||
|
@@ -30,11 +33,7 @@ table! {
|
||||||
|
uri -> Text,
|
||||||
|
last_modified -> Nullable<Text>,
|
||||||
|
http_etag -> Nullable<Text>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
-allow_tables_to_appear_in_same_query!(
|
||||||
|
- episodes,
|
||||||
|
- shows,
|
||||||
|
- source,
|
||||||
|
-);
|
||||||
|
+allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||||
@ -1,5 +1,7 @@
|
|||||||
|
#![allow(warnings)]
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
episode (title, podcast_id) {
|
episodes (title, show_id) {
|
||||||
rowid -> Integer,
|
rowid -> Integer,
|
||||||
title -> Text,
|
title -> Text,
|
||||||
uri -> Nullable<Text>,
|
uri -> Nullable<Text>,
|
||||||
@ -10,22 +12,17 @@ table! {
|
|||||||
duration -> Nullable<Integer>,
|
duration -> Nullable<Integer>,
|
||||||
guid -> Nullable<Text>,
|
guid -> Nullable<Text>,
|
||||||
played -> Nullable<Integer>,
|
played -> Nullable<Integer>,
|
||||||
favorite -> Bool,
|
show_id -> Integer,
|
||||||
archive -> Bool,
|
|
||||||
podcast_id -> Integer,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
podcast (id) {
|
shows (id) {
|
||||||
id -> Integer,
|
id -> Integer,
|
||||||
title -> Text,
|
title -> Text,
|
||||||
link -> Text,
|
link -> Text,
|
||||||
description -> Text,
|
description -> Text,
|
||||||
image_uri -> Nullable<Text>,
|
image_uri -> Nullable<Text>,
|
||||||
favorite -> Bool,
|
|
||||||
archive -> Bool,
|
|
||||||
always_dl -> Bool,
|
|
||||||
source_id -> Integer,
|
source_id -> Integer,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -39,4 +36,4 @@ table! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
allow_tables_to_appear_in_same_query!(episode, podcast, source,);
|
allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||||
|
|||||||
@ -7,7 +7,7 @@ use url::{Position, Url};
|
|||||||
|
|
||||||
use dbqueries;
|
use dbqueries;
|
||||||
use errors::DataError;
|
use errors::DataError;
|
||||||
use models::{EpisodeCleanerQuery, Podcast, Save};
|
use models::{EpisodeCleanerModel, Save, Show};
|
||||||
use xdg_dirs::DL_DIR;
|
use xdg_dirs::DL_DIR;
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
@ -59,7 +59,7 @@ fn played_cleaner(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Check `ep.local_uri` field and delete the file it points to.
|
/// Check `ep.local_uri` field and delete the file it points to.
|
||||||
fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> {
|
fn delete_local_content(ep: &mut EpisodeCleanerModel) -> Result<(), DataError> {
|
||||||
if ep.local_uri().is_some() {
|
if ep.local_uri().is_some() {
|
||||||
let uri = ep.local_uri().unwrap().to_owned();
|
let uri = ep.local_uri().unwrap().to_owned();
|
||||||
if Path::new(&uri).exists() {
|
if Path::new(&uri).exists() {
|
||||||
@ -108,9 +108,9 @@ pub fn url_cleaner(s: &str) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the URI of a Podcast Downloads given it's title.
|
/// Returns the URI of a Show Downloads given it's title.
|
||||||
pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
||||||
// It might be better to make it a hash of the title or the podcast rowid
|
// It might be better to make it a hash of the title or the Show rowid
|
||||||
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
||||||
|
|
||||||
// Create the folder
|
// Create the folder
|
||||||
@ -123,7 +123,7 @@ pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
|||||||
/// Removes all the entries associated with the given show from the database,
|
/// Removes all the entries associated with the given show from the database,
|
||||||
/// and deletes all of the downloaded content.
|
/// and deletes all of the downloaded content.
|
||||||
// TODO: Write Tests
|
// TODO: Write Tests
|
||||||
pub fn delete_show(pd: &Podcast) -> Result<(), DataError> {
|
pub fn delete_show(pd: &Show) -> Result<(), DataError> {
|
||||||
dbqueries::remove_feed(pd)?;
|
dbqueries::remove_feed(pd)?;
|
||||||
info!("{} was removed succesfully.", pd.title());
|
info!("{} was removed succesfully.", pd.title());
|
||||||
|
|
||||||
@ -183,7 +183,7 @@ mod tests {
|
|||||||
// Setup episodes
|
// Setup episodes
|
||||||
let n1 = NewEpisodeBuilder::default()
|
let n1 = NewEpisodeBuilder::default()
|
||||||
.title("foo_bar".to_string())
|
.title("foo_bar".to_string())
|
||||||
.podcast_id(0)
|
.show_id(0)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_episode()
|
.to_episode()
|
||||||
@ -191,14 +191,14 @@ mod tests {
|
|||||||
|
|
||||||
let n2 = NewEpisodeBuilder::default()
|
let n2 = NewEpisodeBuilder::default()
|
||||||
.title("bar_baz".to_string())
|
.title("bar_baz".to_string())
|
||||||
.podcast_id(1)
|
.show_id(1)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_episode()
|
.to_episode()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.podcast_id()).unwrap();
|
let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.show_id()).unwrap();
|
||||||
let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.podcast_id()).unwrap();
|
let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.show_id()).unwrap();
|
||||||
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
||||||
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
||||||
|
|
||||||
@ -230,7 +230,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_download_cleaner() {
|
fn test_download_cleaner() {
|
||||||
let _tmp_dir = helper_db();
|
let _tmp_dir = helper_db();
|
||||||
let mut episode: EpisodeCleanerQuery =
|
let mut episode: EpisodeCleanerModel =
|
||||||
dbqueries::get_episode_from_pk("foo_bar", 0).unwrap().into();
|
dbqueries::get_episode_from_pk("foo_bar", 0).unwrap().into();
|
||||||
|
|
||||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||||
|
|||||||
@ -12,7 +12,7 @@ use std::path::Path;
|
|||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use hammond_data::xdg_dirs::HAMMOND_CACHE;
|
use hammond_data::xdg_dirs::HAMMOND_CACHE;
|
||||||
use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save};
|
use hammond_data::{EpisodeWidgetModel, Save, ShowCoverModel};
|
||||||
|
|
||||||
// use failure::Error;
|
// use failure::Error;
|
||||||
use errors::DownloadError;
|
use errors::DownloadError;
|
||||||
@ -161,7 +161,7 @@ fn save_io(
|
|||||||
|
|
||||||
// TODO: Refactor
|
// TODO: Refactor
|
||||||
pub fn get_episode(
|
pub fn get_episode(
|
||||||
ep: &mut EpisodeWidgetQuery,
|
ep: &mut EpisodeWidgetModel,
|
||||||
download_folder: &str,
|
download_folder: &str,
|
||||||
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
progress: Option<Arc<Mutex<DownloadProgress>>>,
|
||||||
) -> Result<(), DownloadError> {
|
) -> Result<(), DownloadError> {
|
||||||
@ -196,7 +196,7 @@ pub fn get_episode(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cache_image(pd: &PodcastCoverQuery) -> Result<String, DownloadError> {
|
pub fn cache_image(pd: &ShowCoverModel) -> Result<String, DownloadError> {
|
||||||
let url = pd
|
let url = pd
|
||||||
.image_uri()
|
.image_uri()
|
||||||
.ok_or_else(|| DownloadError::NoImageLocation)?
|
.ok_or_else(|| DownloadError::NoImageLocation)?
|
||||||
|
|||||||
@ -10,7 +10,7 @@ use gtk::prelude::*;
|
|||||||
use gtk::SettingsExt as GtkSettingsExt;
|
use gtk::SettingsExt as GtkSettingsExt;
|
||||||
|
|
||||||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||||
use hammond_data::Podcast;
|
use hammond_data::Show;
|
||||||
|
|
||||||
use headerbar::Header;
|
use headerbar::Header;
|
||||||
use settings::{self, WindowGeometry};
|
use settings::{self, WindowGeometry};
|
||||||
@ -43,7 +43,7 @@ pub enum Action {
|
|||||||
RefreshEpisodesView,
|
RefreshEpisodesView,
|
||||||
RefreshEpisodesViewBGR,
|
RefreshEpisodesViewBGR,
|
||||||
RefreshShowsView,
|
RefreshShowsView,
|
||||||
ReplaceWidget(Arc<Podcast>),
|
ReplaceWidget(Arc<Show>),
|
||||||
RefreshWidgetIfSame(i32),
|
RefreshWidgetIfSame(i32),
|
||||||
ShowWidgetAnimated,
|
ShowWidgetAnimated,
|
||||||
ShowShowsAnimated,
|
ShowShowsAnimated,
|
||||||
@ -51,8 +51,8 @@ pub enum Action {
|
|||||||
HeaderBarNormal,
|
HeaderBarNormal,
|
||||||
HeaderBarShowUpdateIndicator,
|
HeaderBarShowUpdateIndicator,
|
||||||
HeaderBarHideUpdateIndicator,
|
HeaderBarHideUpdateIndicator,
|
||||||
MarkAllPlayerNotification(Arc<Podcast>),
|
MarkAllPlayerNotification(Arc<Show>),
|
||||||
RemoveShow(Arc<Podcast>),
|
RemoveShow(Arc<Show>),
|
||||||
ErrorNotification(String),
|
ErrorNotification(String),
|
||||||
InitEpisode(i32),
|
InitEpisode(i32),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,7 +6,7 @@ use crossbeam_channel::Sender;
|
|||||||
use failure::Error;
|
use failure::Error;
|
||||||
|
|
||||||
use hammond_data::dbqueries;
|
use hammond_data::dbqueries;
|
||||||
use hammond_data::Podcast;
|
use hammond_data::Show;
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
use widgets::{ShowWidget, ShowsView};
|
use widgets::{ShowWidget, ShowsView};
|
||||||
@ -89,13 +89,11 @@ impl PopulatedStack {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace_widget(&mut self, pd: Arc<Podcast>) -> Result<(), Error> {
|
pub fn replace_widget(&mut self, pd: Arc<Show>) -> Result<(), Error> {
|
||||||
let old = self.show.container.clone();
|
let old = self.show.container.clone();
|
||||||
|
|
||||||
// save the ShowWidget vertical scrollabar alignment
|
// save the ShowWidget vertical scrollabar alignment
|
||||||
self.show
|
self.show.show_id().map(|id| self.show.save_vadjustment(id));
|
||||||
.podcast_id()
|
|
||||||
.map(|id| self.show.save_vadjustment(id));
|
|
||||||
|
|
||||||
let new = ShowWidget::new(pd, self.sender.clone());
|
let new = ShowWidget::new(pd, self.sender.clone());
|
||||||
self.show = new;
|
self.show = new;
|
||||||
@ -113,7 +111,7 @@ impl PopulatedStack {
|
|||||||
|
|
||||||
pub fn update_widget(&mut self) -> Result<(), Error> {
|
pub fn update_widget(&mut self) -> Result<(), Error> {
|
||||||
let old = self.show.container.clone();
|
let old = self.show.container.clone();
|
||||||
let id = self.show.podcast_id();
|
let id = self.show.show_id();
|
||||||
if id.is_none() {
|
if id.is_none() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -131,9 +129,9 @@ impl PopulatedStack {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only update widget if its podcast_id is equal to pid.
|
// Only update widget if its show_id is equal to pid.
|
||||||
pub fn update_widget_if_same(&mut self, pid: i32) -> Result<(), Error> {
|
pub fn update_widget_if_same(&mut self, pid: i32) -> Result<(), Error> {
|
||||||
if self.show.podcast_id() != Some(pid) {
|
if self.show.show_id() != Some(pid) {
|
||||||
debug!("Different widget. Early return");
|
debug!("Different widget. Early return");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|||||||
@ -226,15 +226,15 @@ lazy_static! {
|
|||||||
// GObjects do not implement Send trait, so SendCell is a way around that.
|
// GObjects do not implement Send trait, so SendCell is a way around that.
|
||||||
// Also lazy_static requires Sync trait, so that's what the mutexes are.
|
// Also lazy_static requires Sync trait, so that's what the mutexes are.
|
||||||
// TODO: maybe use something that would just scale to requested size?
|
// TODO: maybe use something that would just scale to requested size?
|
||||||
pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Result<(), Error> {
|
pub fn set_image_from_path(image: >k::Image, show_id: i32, size: u32) -> Result<(), Error> {
|
||||||
// Check if there's an active download about this show cover.
|
// Check if there's an active download about this show cover.
|
||||||
// If there is, a callback will be set so this function will be called again.
|
// If there is, a callback will be set so this function will be called again.
|
||||||
// If the download succedes, there should be a quick return from the pixbuf cache_image
|
// If the download succedes, there should be a quick return from the pixbuf cache_image
|
||||||
// If it fails another download will be scheduled.
|
// If it fails another download will be scheduled.
|
||||||
if let Ok(guard) = COVER_DL_REGISTRY.read() {
|
if let Ok(guard) = COVER_DL_REGISTRY.read() {
|
||||||
if guard.contains(&podcast_id) {
|
if guard.contains(&show_id) {
|
||||||
let callback = clone!(image => move || {
|
let callback = clone!(image => move || {
|
||||||
let _ = set_image_from_path(&image, podcast_id, size);
|
let _ = set_image_from_path(&image, show_id, size);
|
||||||
glib::Continue(false)
|
glib::Continue(false)
|
||||||
});
|
});
|
||||||
gtk::timeout_add(250, callback);
|
gtk::timeout_add(250, callback);
|
||||||
@ -245,7 +245,7 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re
|
|||||||
if let Ok(hashmap) = CACHED_PIXBUFS.read() {
|
if let Ok(hashmap) = CACHED_PIXBUFS.read() {
|
||||||
// Check if the requested (cover + size) is already in the chache
|
// Check if the requested (cover + size) is already in the chache
|
||||||
// and if so do an early return after that.
|
// and if so do an early return after that.
|
||||||
if let Some(guard) = hashmap.get(&(podcast_id, size)) {
|
if let Some(guard) = hashmap.get(&(show_id, size)) {
|
||||||
guard
|
guard
|
||||||
.lock()
|
.lock()
|
||||||
.map_err(|err| format_err!("SendCell Mutex: {}", err))
|
.map_err(|err| format_err!("SendCell Mutex: {}", err))
|
||||||
@ -263,11 +263,11 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re
|
|||||||
let (sender, receiver) = unbounded();
|
let (sender, receiver) = unbounded();
|
||||||
THREADPOOL.spawn(move || {
|
THREADPOOL.spawn(move || {
|
||||||
if let Ok(mut guard) = COVER_DL_REGISTRY.write() {
|
if let Ok(mut guard) = COVER_DL_REGISTRY.write() {
|
||||||
guard.insert(podcast_id);
|
guard.insert(show_id);
|
||||||
if let Ok(pd) = dbqueries::get_podcast_cover_from_id(podcast_id) {
|
if let Ok(pd) = dbqueries::get_podcast_cover_from_id(show_id) {
|
||||||
sender.send(downloader::cache_image(&pd));
|
sender.send(downloader::cache_image(&pd));
|
||||||
}
|
}
|
||||||
guard.remove(&podcast_id);
|
guard.remove(&show_id);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -278,7 +278,7 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re
|
|||||||
if let Ok(path) = path {
|
if let Ok(path) = path {
|
||||||
if let Ok(px) = Pixbuf::new_from_file_at_scale(&path, s, s, true) {
|
if let Ok(px) = Pixbuf::new_from_file_at_scale(&path, s, s, true) {
|
||||||
if let Ok(mut hashmap) = CACHED_PIXBUFS.write() {
|
if let Ok(mut hashmap) = CACHED_PIXBUFS.write() {
|
||||||
hashmap.insert((podcast_id, size), Mutex::new(SendCell::new(px.clone())));
|
hashmap.insert((show_id, size), Mutex::new(SendCell::new(px.clone())));
|
||||||
image.set_from_pixbuf(&px);
|
image.set_from_pixbuf(&px);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,7 +12,7 @@ use open;
|
|||||||
|
|
||||||
use hammond_data::dbqueries;
|
use hammond_data::dbqueries;
|
||||||
use hammond_data::utils::get_download_folder;
|
use hammond_data::utils::get_download_folder;
|
||||||
use hammond_data::EpisodeWidgetQuery;
|
use hammond_data::EpisodeWidgetModel;
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
use manager;
|
use manager;
|
||||||
@ -68,7 +68,7 @@ struct Buttons {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl InfoLabels {
|
impl InfoLabels {
|
||||||
fn init(&self, episode: &EpisodeWidgetQuery) {
|
fn init(&self, episode: &EpisodeWidgetModel) {
|
||||||
// Set the title label state.
|
// Set the title label state.
|
||||||
self.set_title(episode);
|
self.set_title(episode);
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ impl InfoLabels {
|
|||||||
self.set_size(episode.length())
|
self.set_size(episode.length())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_title(&self, episode: &EpisodeWidgetQuery) {
|
fn set_title(&self, episode: &EpisodeWidgetModel) {
|
||||||
self.title.set_text(episode.title());
|
self.title.set_text(episode.title());
|
||||||
|
|
||||||
if episode.played().is_some() {
|
if episode.played().is_some() {
|
||||||
@ -206,7 +206,7 @@ impl Default for EpisodeWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EpisodeWidget {
|
impl EpisodeWidget {
|
||||||
pub fn new(episode: &EpisodeWidgetQuery, sender: &Sender<Action>) -> Rc<Self> {
|
pub fn new(episode: &EpisodeWidgetModel, sender: &Sender<Action>) -> Rc<Self> {
|
||||||
let widget = Rc::new(Self::default());
|
let widget = Rc::new(Self::default());
|
||||||
widget.info.init(episode);
|
widget.info.init(episode);
|
||||||
Self::determine_buttons_state(&widget, episode, sender)
|
Self::determine_buttons_state(&widget, episode, sender)
|
||||||
@ -297,7 +297,7 @@ impl EpisodeWidget {
|
|||||||
/// -------------------
|
/// -------------------
|
||||||
fn determine_buttons_state(
|
fn determine_buttons_state(
|
||||||
widget: &Rc<Self>,
|
widget: &Rc<Self>,
|
||||||
episode: &EpisodeWidgetQuery,
|
episode: &EpisodeWidgetModel,
|
||||||
sender: &Sender<Action>,
|
sender: &Sender<Action>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
// Reset the buttons state no matter the glade file.
|
// Reset the buttons state no matter the glade file.
|
||||||
@ -428,8 +428,8 @@ impl EpisodeWidget {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_download_clicked(ep: &EpisodeWidgetQuery, sender: &Sender<Action>) -> Result<(), Error> {
|
fn on_download_clicked(ep: &EpisodeWidgetModel, sender: &Sender<Action>) -> Result<(), Error> {
|
||||||
let pd = dbqueries::get_podcast_from_id(ep.podcast_id())?;
|
let pd = dbqueries::get_podcast_from_id(ep.show_id())?;
|
||||||
let download_fold = get_download_folder(&pd.title())?;
|
let download_fold = get_download_folder(&pd.title())?;
|
||||||
|
|
||||||
// Start a new download.
|
// Start a new download.
|
||||||
@ -442,7 +442,7 @@ fn on_download_clicked(ep: &EpisodeWidgetQuery, sender: &Sender<Action>) -> Resu
|
|||||||
|
|
||||||
fn on_play_bttn_clicked(
|
fn on_play_bttn_clicked(
|
||||||
widget: &Rc<EpisodeWidget>,
|
widget: &Rc<EpisodeWidget>,
|
||||||
episode: &mut EpisodeWidgetQuery,
|
episode: &mut EpisodeWidgetModel,
|
||||||
sender: &Sender<Action>,
|
sender: &Sender<Action>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
// Mark played
|
// Mark played
|
||||||
|
|||||||
@ -6,7 +6,7 @@ use gtk::prelude::*;
|
|||||||
|
|
||||||
use crossbeam_channel::Sender;
|
use crossbeam_channel::Sender;
|
||||||
use hammond_data::dbqueries;
|
use hammond_data::dbqueries;
|
||||||
use hammond_data::EpisodeWidgetQuery;
|
use hammond_data::EpisodeWidgetModel;
|
||||||
use send_cell::SendCell;
|
use send_cell::SendCell;
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
@ -93,7 +93,7 @@ impl HomeView {
|
|||||||
let now_utc = Utc::now();
|
let now_utc = Utc::now();
|
||||||
|
|
||||||
let view_ = view.clone();
|
let view_ = view.clone();
|
||||||
let func = move |ep: EpisodeWidgetQuery| {
|
let func = move |ep: EpisodeWidgetModel| {
|
||||||
let epoch = ep.epoch();
|
let epoch = ep.epoch();
|
||||||
let widget = HomeEpisode::new(&ep, &sender);
|
let widget = HomeEpisode::new(&ep, &sender);
|
||||||
|
|
||||||
@ -197,12 +197,12 @@ impl Default for HomeEpisode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HomeEpisode {
|
impl HomeEpisode {
|
||||||
fn new(episode: &EpisodeWidgetQuery, sender: &Sender<Action>) -> HomeEpisode {
|
fn new(episode: &EpisodeWidgetModel, sender: &Sender<Action>) -> HomeEpisode {
|
||||||
let builder =
|
let builder =
|
||||||
gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/episodes_view_widget.ui");
|
gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/episodes_view_widget.ui");
|
||||||
let container: gtk::Box = builder.get_object("container").unwrap();
|
let container: gtk::Box = builder.get_object("container").unwrap();
|
||||||
let image: gtk::Image = builder.get_object("cover").unwrap();
|
let image: gtk::Image = builder.get_object("cover").unwrap();
|
||||||
let pid = episode.podcast_id();
|
let pid = episode.show_id();
|
||||||
let ep = EpisodeWidget::new(episode, sender);
|
let ep = EpisodeWidget::new(episode, sender);
|
||||||
|
|
||||||
let view = HomeEpisode {
|
let view = HomeEpisode {
|
||||||
@ -215,15 +215,15 @@ impl HomeEpisode {
|
|||||||
view
|
view
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init(&self, podcast_id: i32) {
|
fn init(&self, show_id: i32) {
|
||||||
self.set_cover(podcast_id)
|
self.set_cover(show_id)
|
||||||
.map_err(|err| error!("Failed to set a cover: {}", err))
|
.map_err(|err| error!("Failed to set a cover: {}", err))
|
||||||
.ok();
|
.ok();
|
||||||
|
|
||||||
self.container.pack_start(&self.episode, true, true, 6);
|
self.container.pack_start(&self.episode, true, true, 6);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_cover(&self, podcast_id: i32) -> Result<(), Error> {
|
fn set_cover(&self, show_id: i32) -> Result<(), Error> {
|
||||||
utils::set_image_from_path(&self.image, podcast_id, 64)
|
utils::set_image_from_path(&self.image, show_id, 64)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,7 +14,7 @@ use failure::Error;
|
|||||||
use send_cell::SendCell;
|
use send_cell::SendCell;
|
||||||
|
|
||||||
use hammond_data::{dbqueries, USER_AGENT};
|
use hammond_data::{dbqueries, USER_AGENT};
|
||||||
use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery};
|
use hammond_data::{EpisodeWidgetModel, ShowCoverModel};
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
use utils::set_image_from_path;
|
use utils::set_image_from_path;
|
||||||
@ -49,23 +49,23 @@ struct PlayerInfo {
|
|||||||
|
|
||||||
impl PlayerInfo {
|
impl PlayerInfo {
|
||||||
// FIXME: create a Diesel Model of the joined episode and podcast query instead
|
// FIXME: create a Diesel Model of the joined episode and podcast query instead
|
||||||
fn init(&self, episode: &EpisodeWidgetQuery, podcast: &PodcastCoverQuery) {
|
fn init(&self, episode: &EpisodeWidgetModel, podcast: &ShowCoverModel) {
|
||||||
self.set_cover_image(podcast);
|
self.set_cover_image(podcast);
|
||||||
self.set_show_title(podcast);
|
self.set_show_title(podcast);
|
||||||
self.set_episode_title(episode);
|
self.set_episode_title(episode);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_episode_title(&self, episode: &EpisodeWidgetQuery) {
|
fn set_episode_title(&self, episode: &EpisodeWidgetModel) {
|
||||||
self.episode.set_text(episode.title());
|
self.episode.set_text(episode.title());
|
||||||
self.episode.set_tooltip_text(episode.title());
|
self.episode.set_tooltip_text(episode.title());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_show_title(&self, show: &PodcastCoverQuery) {
|
fn set_show_title(&self, show: &ShowCoverModel) {
|
||||||
self.show.set_text(show.title());
|
self.show.set_text(show.title());
|
||||||
self.show.set_tooltip_text(show.title());
|
self.show.set_tooltip_text(show.title());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_cover_image(&self, show: &PodcastCoverQuery) {
|
fn set_cover_image(&self, show: &ShowCoverModel) {
|
||||||
set_image_from_path(&self.cover, show.id(), 34)
|
set_image_from_path(&self.cover, show.id(), 34)
|
||||||
.map_err(|err| error!("Player Cover: {}", err))
|
.map_err(|err| error!("Player Cover: {}", err))
|
||||||
.ok();
|
.ok();
|
||||||
@ -357,7 +357,7 @@ impl PlayerWidget {
|
|||||||
|
|
||||||
pub fn initialize_episode(&self, rowid: i32) -> Result<(), Error> {
|
pub fn initialize_episode(&self, rowid: i32) -> Result<(), Error> {
|
||||||
let ep = dbqueries::get_episode_widget_from_rowid(rowid)?;
|
let ep = dbqueries::get_episode_widget_from_rowid(rowid)?;
|
||||||
let pd = dbqueries::get_podcast_cover_from_id(ep.podcast_id())?;
|
let pd = dbqueries::get_podcast_cover_from_id(ep.show_id())?;
|
||||||
|
|
||||||
self.info.init(&ep, &pd);
|
self.info.init(&ep, &pd);
|
||||||
// Currently that will always be the case since the play button is
|
// Currently that will always be the case since the play button is
|
||||||
|
|||||||
@ -11,7 +11,7 @@ use send_cell::SendCell;
|
|||||||
|
|
||||||
use hammond_data::dbqueries;
|
use hammond_data::dbqueries;
|
||||||
use hammond_data::utils::delete_show;
|
use hammond_data::utils::delete_show;
|
||||||
use hammond_data::Podcast;
|
use hammond_data::Show;
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
use utils::{self, lazy_load};
|
use utils::{self, lazy_load};
|
||||||
@ -36,7 +36,7 @@ pub struct ShowWidget {
|
|||||||
settings: gtk::MenuButton,
|
settings: gtk::MenuButton,
|
||||||
unsub: gtk::Button,
|
unsub: gtk::Button,
|
||||||
episodes: gtk::ListBox,
|
episodes: gtk::ListBox,
|
||||||
podcast_id: Option<i32>,
|
show_id: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ShowWidget {
|
impl Default for ShowWidget {
|
||||||
@ -61,13 +61,13 @@ impl Default for ShowWidget {
|
|||||||
link,
|
link,
|
||||||
settings,
|
settings,
|
||||||
episodes,
|
episodes,
|
||||||
podcast_id: None,
|
show_id: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShowWidget {
|
impl ShowWidget {
|
||||||
pub fn new(pd: Arc<Podcast>, sender: Sender<Action>) -> Rc<ShowWidget> {
|
pub fn new(pd: Arc<Show>, sender: Sender<Action>) -> Rc<ShowWidget> {
|
||||||
let mut pdw = ShowWidget::default();
|
let mut pdw = ShowWidget::default();
|
||||||
pdw.init(&pd, &sender);
|
pdw.init(&pd, &sender);
|
||||||
let pdw = Rc::new(pdw);
|
let pdw = Rc::new(pdw);
|
||||||
@ -78,7 +78,7 @@ impl ShowWidget {
|
|||||||
pdw
|
pdw
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init(&mut self, pd: &Arc<Podcast>, sender: &Sender<Action>) {
|
pub fn init(&mut self, pd: &Arc<Show>, sender: &Sender<Action>) {
|
||||||
let builder = gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/show_widget.ui");
|
let builder = gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/show_widget.ui");
|
||||||
|
|
||||||
self.unsub
|
self.unsub
|
||||||
@ -87,7 +87,7 @@ impl ShowWidget {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
self.set_description(pd.description());
|
self.set_description(pd.description());
|
||||||
self.podcast_id = Some(pd.id());
|
self.show_id = Some(pd.id());
|
||||||
|
|
||||||
self.set_cover(&pd)
|
self.set_cover(&pd)
|
||||||
.map_err(|err| error!("Failed to set a cover: {}", err))
|
.map_err(|err| error!("Failed to set a cover: {}", err))
|
||||||
@ -118,7 +118,7 @@ impl ShowWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Set the show cover.
|
/// Set the show cover.
|
||||||
fn set_cover(&self, pd: &Arc<Podcast>) -> Result<(), Error> {
|
fn set_cover(&self, pd: &Arc<Show>) -> Result<(), Error> {
|
||||||
utils::set_image_from_path(&self.cover, pd.id(), 256)
|
utils::set_image_from_path(&self.cover, pd.id(), 256)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,7 +143,7 @@ impl ShowWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Set scrolled window vertical adjustment.
|
/// Set scrolled window vertical adjustment.
|
||||||
fn set_vadjustment(&self, pd: &Arc<Podcast>) -> Result<(), Error> {
|
fn set_vadjustment(&self, pd: &Arc<Show>) -> Result<(), Error> {
|
||||||
let guard = SHOW_WIDGET_VALIGNMENT
|
let guard = SHOW_WIDGET_VALIGNMENT
|
||||||
.lock()
|
.lock()
|
||||||
.map_err(|err| format_err!("Failed to lock widget align mutex: {}", err))?;
|
.map_err(|err| format_err!("Failed to lock widget align mutex: {}", err))?;
|
||||||
@ -166,15 +166,15 @@ impl ShowWidget {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn podcast_id(&self) -> Option<i32> {
|
pub fn show_id(&self) -> Option<i32> {
|
||||||
self.podcast_id
|
self.show_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Populate the listbox with the shows episodes.
|
/// Populate the listbox with the shows episodes.
|
||||||
fn populate_listbox(
|
fn populate_listbox(
|
||||||
show: &Rc<ShowWidget>,
|
show: &Rc<ShowWidget>,
|
||||||
pd: Arc<Podcast>,
|
pd: Arc<Show>,
|
||||||
sender: Sender<Action>,
|
sender: Sender<Action>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
use crossbeam_channel::bounded;
|
use crossbeam_channel::bounded;
|
||||||
@ -223,7 +223,7 @@ fn populate_listbox(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_unsub_button_clicked(pd: Arc<Podcast>, unsub_button: >k::Button, sender: &Sender<Action>) {
|
fn on_unsub_button_clicked(pd: Arc<Show>, unsub_button: >k::Button, sender: &Sender<Action>) {
|
||||||
// hack to get away without properly checking for none.
|
// hack to get away without properly checking for none.
|
||||||
// if pressed twice would panic.
|
// if pressed twice would panic.
|
||||||
unsub_button.set_sensitive(false);
|
unsub_button.set_sensitive(false);
|
||||||
@ -239,7 +239,7 @@ fn on_unsub_button_clicked(pd: Arc<Podcast>, unsub_button: >k::Button, sender:
|
|||||||
unsub_button.set_sensitive(true);
|
unsub_button.set_sensitive(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_played_button_clicked(pd: Arc<Podcast>, episodes: >k::ListBox, sender: &Sender<Action>) {
|
fn on_played_button_clicked(pd: Arc<Show>, episodes: >k::ListBox, sender: &Sender<Action>) {
|
||||||
if dim_titles(episodes).is_none() {
|
if dim_titles(episodes).is_none() {
|
||||||
error!("Something went horribly wrong when dimming the titles.");
|
error!("Something went horribly wrong when dimming the titles.");
|
||||||
warn!("RUN WHILE YOU STILL CAN!");
|
warn!("RUN WHILE YOU STILL CAN!");
|
||||||
@ -248,7 +248,7 @@ fn on_played_button_clicked(pd: Arc<Podcast>, episodes: >k::ListBox, sender: &
|
|||||||
sender.send(Action::MarkAllPlayerNotification(pd))
|
sender.send(Action::MarkAllPlayerNotification(pd))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mark_all_watched(pd: &Podcast, sender: &Sender<Action>) -> Result<(), Error> {
|
fn mark_all_watched(pd: &Show, sender: &Sender<Action>) -> Result<(), Error> {
|
||||||
dbqueries::update_none_to_played_now(pd)?;
|
dbqueries::update_none_to_played_now(pd)?;
|
||||||
// Not all widgets migth have been loaded when the mark_all is hit
|
// Not all widgets migth have been loaded when the mark_all is hit
|
||||||
// So we will need to refresh again after it's done.
|
// So we will need to refresh again after it's done.
|
||||||
@ -257,7 +257,7 @@ fn mark_all_watched(pd: &Podcast, sender: &Sender<Action>) -> Result<(), Error>
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mark_all_notif(pd: Arc<Podcast>, sender: &Sender<Action>) -> InAppNotification {
|
pub fn mark_all_notif(pd: Arc<Show>, sender: &Sender<Action>) -> InAppNotification {
|
||||||
let id = pd.id();
|
let id = pd.id();
|
||||||
let callback = clone!(sender => move || {
|
let callback = clone!(sender => move || {
|
||||||
mark_all_watched(&pd, &sender)
|
mark_all_watched(&pd, &sender)
|
||||||
@ -271,7 +271,7 @@ pub fn mark_all_notif(pd: Arc<Podcast>, sender: &Sender<Action>) -> InAppNotific
|
|||||||
InAppNotification::new(text, callback, undo_callback, UndoState::Shown)
|
InAppNotification::new(text, callback, undo_callback, UndoState::Shown)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_show_notif(pd: Arc<Podcast>, sender: Sender<Action>) -> InAppNotification {
|
pub fn remove_show_notif(pd: Arc<Show>, sender: Sender<Action>) -> InAppNotification {
|
||||||
let text = format!("Unsubscribed from {}", pd.title());
|
let text = format!("Unsubscribed from {}", pd.title());
|
||||||
|
|
||||||
utils::ignore_show(pd.id())
|
utils::ignore_show(pd.id())
|
||||||
|
|||||||
@ -6,7 +6,7 @@ use failure::Error;
|
|||||||
use send_cell::SendCell;
|
use send_cell::SendCell;
|
||||||
|
|
||||||
use hammond_data::dbqueries;
|
use hammond_data::dbqueries;
|
||||||
use hammond_data::Podcast;
|
use hammond_data::Show;
|
||||||
|
|
||||||
use app::Action;
|
use app::Action;
|
||||||
use utils::{self, get_ignored_shows, lazy_load, set_image_from_path};
|
use utils::{self, get_ignored_shows, lazy_load, set_image_from_path};
|
||||||
@ -45,7 +45,7 @@ impl ShowsView {
|
|||||||
pub fn new(sender: Sender<Action>) -> Result<Rc<Self>, Error> {
|
pub fn new(sender: Sender<Action>) -> Result<Rc<Self>, Error> {
|
||||||
let pop = Rc::new(ShowsView::default());
|
let pop = Rc::new(ShowsView::default());
|
||||||
pop.init(sender);
|
pop.init(sender);
|
||||||
// Populate the flowbox with the Podcasts.
|
// Populate the flowbox with the Shows.
|
||||||
populate_flowbox(&pop)?;
|
populate_flowbox(&pop)?;
|
||||||
Ok(pop)
|
Ok(pop)
|
||||||
}
|
}
|
||||||
@ -147,13 +147,13 @@ impl Default for ShowsChild {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ShowsChild {
|
impl ShowsChild {
|
||||||
pub fn new(pd: &Podcast) -> ShowsChild {
|
pub fn new(pd: &Show) -> ShowsChild {
|
||||||
let child = ShowsChild::default();
|
let child = ShowsChild::default();
|
||||||
child.init(pd);
|
child.init(pd);
|
||||||
child
|
child
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init(&self, pd: &Podcast) {
|
fn init(&self, pd: &Show) {
|
||||||
self.container.set_tooltip_text(pd.title());
|
self.container.set_tooltip_text(pd.title());
|
||||||
WidgetExt::set_name(&self.child, &pd.id().to_string());
|
WidgetExt::set_name(&self.child, &pd.id().to_string());
|
||||||
|
|
||||||
@ -162,7 +162,7 @@ impl ShowsChild {
|
|||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_cover(&self, podcast_id: i32) -> Result<(), Error> {
|
fn set_cover(&self, show_id: i32) -> Result<(), Error> {
|
||||||
set_image_from_path(&self.cover, podcast_id, 256)
|
set_image_from_path(&self.cover, show_id, 256)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user