Database: Rename the tables to better match the userfacing strings
This commit is contained in:
parent
79bb9bdde8
commit
f3fb27005a
@ -0,0 +1,40 @@
|
||||
ALTER TABLE episodes RENAME TO old_table;
|
||||
ALTER TABLE shows RENAME TO podcast;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
show_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,40 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
ALTER TABLE podcast RENAME TO shows;
|
||||
|
||||
CREATE TABLE episodes (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
show_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, show_id)
|
||||
);
|
||||
|
||||
INSERT INTO episodes (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
show_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -68,8 +68,8 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
|
||||
pub fn truncate_db() -> Result<(), DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
con.execute("DELETE FROM episode")?;
|
||||
con.execute("DELETE FROM podcast")?;
|
||||
con.execute("DELETE FROM episodes")?;
|
||||
con.execute("DELETE FROM shows")?;
|
||||
con.execute("DELETE FROM source")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -24,22 +24,22 @@ pub fn get_sources() -> Result<Vec<Source>, DataError> {
|
||||
}
|
||||
|
||||
pub fn get_podcasts() -> Result<Vec<Podcast>, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
shows
|
||||
.order(title.asc())
|
||||
.load::<Podcast>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result<Vec<Podcast>, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
shows
|
||||
.order(title.asc())
|
||||
.filter(id.ne_all(filter_ids))
|
||||
.load::<Podcast>(&con)
|
||||
@ -47,22 +47,22 @@ pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result<Vec<Podcast>, DataError
|
||||
}
|
||||
|
||||
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(local_uri.is_not_null())
|
||||
.load::<EpisodeCleanerQuery>(&con)
|
||||
@ -70,22 +70,22 @@ pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>, Data
|
||||
}
|
||||
|
||||
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
// use schema::episode::dsl::*;
|
||||
// use schema::episodes::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
// episode
|
||||
// episodes
|
||||
// .filter(played.is_not_null())
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(played.is_not_null())
|
||||
.load::<EpisodeCleanerQuery>(&con)
|
||||
@ -93,24 +93,24 @@ pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>,
|
||||
}
|
||||
|
||||
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result<EpisodeWidgetQuery, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.select((
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id,
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
))
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<EpisodeWidgetQuery>(&con)
|
||||
@ -118,11 +118,11 @@ pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result<EpisodeWidgetQuery, D
|
||||
}
|
||||
|
||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.filter(rowid.eq(ep_id))
|
||||
.select(local_uri)
|
||||
.get_result::<Option<String>>(&con)
|
||||
@ -133,39 +133,39 @@ pub fn get_episodes_widgets_filter_limit(
|
||||
filter_ids: &[i32],
|
||||
limit: u32,
|
||||
) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
let columns = (
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id,
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
);
|
||||
|
||||
episode
|
||||
episodes
|
||||
.select(columns)
|
||||
.order(epoch.desc())
|
||||
.filter(podcast_id.ne_all(filter_ids))
|
||||
.filter(show_id.ne_all(filter_ids))
|
||||
.limit(i64::from(limit))
|
||||
.load::<EpisodeWidgetQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
shows
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<Podcast>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
shows
|
||||
.select((id, title, image_uri))
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<PodcastCoverQuery>(&con)
|
||||
@ -173,7 +173,7 @@ pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery, DataErro
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
@ -194,23 +194,23 @@ pub fn get_pd_episodes_count(parent: &Podcast) -> Result<i64, DataError> {
|
||||
}
|
||||
|
||||
pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
let columns = (
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id,
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
);
|
||||
|
||||
episode
|
||||
episodes
|
||||
.select(columns)
|
||||
.filter(podcast_id.eq(parent.id()))
|
||||
.filter(show_id.eq(parent.id()))
|
||||
.order(epoch.desc())
|
||||
.load::<EpisodeWidgetQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
@ -222,7 +222,7 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataEr
|
||||
}
|
||||
|
||||
// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) ->
|
||||
// Result<Vec<Episode>, DataError> { use schema::episode::dsl::*;
|
||||
// Result<Vec<Episode>, DataError> { use schema::episodes::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
@ -257,24 +257,24 @@ pub fn get_source_from_id(id_: i32) -> Result<Source, DataError> {
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
shows
|
||||
.filter(source_id.eq(sid))
|
||||
.get_result::<Podcast>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
episodes
|
||||
.filter(title.eq(title_))
|
||||
.filter(podcast_id.eq(pid))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
@ -283,14 +283,14 @@ pub(crate) fn get_episode_minimal_from_pk(
|
||||
title_: &str,
|
||||
pid: i32,
|
||||
) -> Result<EpisodeMinimal, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.select((rowid, title, uri, epoch, duration, guid, podcast_id))
|
||||
episodes
|
||||
.select((rowid, title, uri, epoch, duration, guid, show_id))
|
||||
.filter(title.eq(title_))
|
||||
.filter(podcast_id.eq(pid))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<EpisodeMinimal>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
@ -314,16 +314,16 @@ fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
|
||||
diesel::delete(source.filter(id.eq(source_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
|
||||
use schema::podcast::dsl::*;
|
||||
fn delete_podcast(con: &SqliteConnection, show_id: i32) -> QueryResult<usize> {
|
||||
use schema::shows::dsl::*;
|
||||
|
||||
diesel::delete(podcast.filter(id.eq(podcast_id))).execute(con)
|
||||
diesel::delete(shows.filter(id.eq(show_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
|
||||
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con)
|
||||
diesel::delete(episodes.filter(show_id.eq(parent_id))).execute(con)
|
||||
}
|
||||
|
||||
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||
@ -338,62 +338,62 @@ pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(podcast.filter(source_id.eq(source_id_))))
|
||||
select(exists(shows.filter(source_id.eq(source_id_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||
pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
pub(crate) fn episode_exists(title_: &str, show_id_: i32) -> Result<bool, DataError> {
|
||||
use schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episode.filter(podcast_id.eq(podcast_id_)).filter(title.eq(title_))))
|
||||
select(exists(episodes.filter(show_id.eq(show_id_)).filter(title.eq(title_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `episode table contains any rows
|
||||
/// Check if the `episodes table contains any rows
|
||||
///
|
||||
/// Return true if `episode` table is populated.
|
||||
/// Return true if `episodes` table is populated.
|
||||
pub fn is_episodes_populated() -> Result<bool, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episode.as_query()))
|
||||
select(exists(episodes.as_query()))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `podcast` table contains any rows
|
||||
/// Check if the `shows` table contains any rows
|
||||
///
|
||||
/// Return true if `podcast table is populated.
|
||||
/// Return true if `shows table is populated.
|
||||
pub fn is_podcasts_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(podcast.filter(id.ne_all(filter_ids))))
|
||||
select(exists(shows.filter(id.ne_all(filter_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(episode)
|
||||
diesel::insert_into(episodes)
|
||||
.values(eps)
|
||||
.execute(&*con)
|
||||
.map_err(From::from)
|
||||
@ -401,7 +401,7 @@ pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||
}
|
||||
|
||||
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
|
||||
@ -65,12 +65,12 @@ fn determine_ep_state(
|
||||
item: &rss::Item,
|
||||
) -> Result<IndexState<NewEpisode>, DataError> {
|
||||
// Check if feed exists
|
||||
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
|
||||
let exists = dbqueries::episode_exists(ep.title(), ep.show_id())?;
|
||||
|
||||
if !exists {
|
||||
Ok(IndexState::Index(ep.into_new_episode(item)))
|
||||
} else {
|
||||
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.podcast_id())?;
|
||||
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.show_id())?;
|
||||
let rowid = old.rowid();
|
||||
|
||||
if ep != old {
|
||||
|
||||
@ -6,13 +6,13 @@ use diesel::SaveChangesDsl;
|
||||
use database::connection;
|
||||
use errors::DataError;
|
||||
use models::{Podcast, Save};
|
||||
use schema::episode;
|
||||
use schema::episodes;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[belongs_to(Podcast, foreign_key = "podcast_id")]
|
||||
#[primary_key(title, show_id)]
|
||||
#[belongs_to(Podcast, foreign_key = "show_id")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the episode table.
|
||||
pub struct Episode {
|
||||
@ -26,7 +26,7 @@ pub struct Episode {
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
played: Option<i32>,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl Save<Episode> for Episode {
|
||||
@ -153,8 +153,8 @@ impl Episode {
|
||||
}
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
|
||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||
@ -166,9 +166,9 @@ impl Episode {
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used for constructing `EpisodeWidgets`.
|
||||
pub struct EpisodeWidgetQuery {
|
||||
@ -180,7 +180,7 @@ pub struct EpisodeWidgetQuery {
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
played: Option<i32>,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeWidgetQuery {
|
||||
@ -194,7 +194,7 @@ impl From<Episode> for EpisodeWidgetQuery {
|
||||
length: e.length,
|
||||
duration: e.duration,
|
||||
played: e.played,
|
||||
podcast_id: e.podcast_id,
|
||||
show_id: e.show_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -205,12 +205,12 @@ impl Save<usize> for EpisodeWidgetQuery {
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<usize, Self::Error> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
diesel::update(episode.filter(rowid.eq(self.rowid)))
|
||||
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||
.set(self)
|
||||
.execute(&*tempdb)
|
||||
.map_err(From::from)
|
||||
@ -293,8 +293,8 @@ impl EpisodeWidgetQuery {
|
||||
}
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
|
||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||
@ -306,9 +306,9 @@ impl EpisodeWidgetQuery {
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used internal with the `utils::checkup` function.
|
||||
pub struct EpisodeCleanerQuery {
|
||||
@ -323,12 +323,12 @@ impl Save<usize> for EpisodeCleanerQuery {
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<usize, Self::Error> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
diesel::update(episode.filter(rowid.eq(self.rowid)))
|
||||
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||
.set(self)
|
||||
.execute(&*tempdb)
|
||||
.map_err(From::from)
|
||||
@ -378,9 +378,9 @@ impl EpisodeCleanerQuery {
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used for FIXME.
|
||||
pub struct EpisodeMinimal {
|
||||
@ -390,7 +390,7 @@ pub struct EpisodeMinimal {
|
||||
epoch: i32,
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeMinimal {
|
||||
@ -402,7 +402,7 @@ impl From<Episode> for EpisodeMinimal {
|
||||
guid: e.guid,
|
||||
epoch: e.epoch,
|
||||
duration: e.duration,
|
||||
podcast_id: e.podcast_id,
|
||||
show_id: e.show_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -446,7 +446,7 @@ impl EpisodeMinimal {
|
||||
}
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,11 +9,11 @@ use dbqueries;
|
||||
use errors::DataError;
|
||||
use models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
||||
use parser;
|
||||
use schema::episode;
|
||||
use schema::episodes;
|
||||
use utils::url_cleaner;
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
@ -26,7 +26,7 @@ pub(crate) struct NewEpisode {
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
epoch: i32,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<NewEpisodeMinimal> for NewEpisode {
|
||||
@ -36,7 +36,7 @@ impl From<NewEpisodeMinimal> for NewEpisode {
|
||||
.uri(e.uri)
|
||||
.duration(e.duration)
|
||||
.epoch(e.epoch)
|
||||
.podcast_id(e.podcast_id)
|
||||
.show_id(e.show_id)
|
||||
.guid(e.guid)
|
||||
.build()
|
||||
.unwrap()
|
||||
@ -47,12 +47,12 @@ impl Insert<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
fn insert(&self) -> Result<(), DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Inserting {:?}", self.title);
|
||||
diesel::insert_into(episode)
|
||||
diesel::insert_into(episodes)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
@ -64,12 +64,12 @@ impl Update<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
fn update(&self, episode_id: i32) -> Result<(), DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {:?}", self.title);
|
||||
diesel::update(episode.filter(rowid.eq(episode_id)))
|
||||
diesel::update(episodes.filter(rowid.eq(episode_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
@ -83,10 +83,10 @@ impl Index<()> for NewEpisode {
|
||||
// Does not update the episode description if it's the only thing that has
|
||||
// changed.
|
||||
fn index(&self) -> Result<(), DataError> {
|
||||
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
|
||||
let exists = dbqueries::episode_exists(self.title(), self.show_id())?;
|
||||
|
||||
if exists {
|
||||
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.podcast_id())?;
|
||||
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.show_id())?;
|
||||
|
||||
if self != &other {
|
||||
self.update(other.rowid())
|
||||
@ -106,7 +106,7 @@ impl PartialEq<EpisodeMinimal> for NewEpisode {
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.podcast_id() == other.podcast_id())
|
||||
&& (self.show_id() == other.show_id())
|
||||
}
|
||||
}
|
||||
|
||||
@ -117,7 +117,7 @@ impl PartialEq<Episode> for NewEpisode {
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.podcast_id() == other.podcast_id())
|
||||
&& (self.show_id() == other.show_id())
|
||||
&& (self.description() == other.description())
|
||||
&& (self.length() == other.length())
|
||||
}
|
||||
@ -126,14 +126,14 @@ impl PartialEq<Episode> for NewEpisode {
|
||||
impl NewEpisode {
|
||||
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self, DataError> {
|
||||
NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item))
|
||||
pub(crate) fn new(item: &rss::Item, show_id: i32) -> Result<Self, DataError> {
|
||||
NewEpisodeMinimal::new(item, show_id).map(|ep| ep.into_new_episode(item))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn to_episode(&self) -> Result<Episode, DataError> {
|
||||
self.index()?;
|
||||
dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from)
|
||||
dbqueries::get_episode_from_pk(&self.title, self.show_id).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
@ -167,13 +167,13 @@ impl NewEpisode {
|
||||
self.length
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub(crate) fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
@ -183,7 +183,7 @@ pub(crate) struct NewEpisodeMinimal {
|
||||
duration: Option<i32>,
|
||||
epoch: i32,
|
||||
guid: Option<String>,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
||||
@ -193,7 +193,7 @@ impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.podcast_id() == other.podcast_id())
|
||||
&& (self.show_id() == other.show_id())
|
||||
}
|
||||
}
|
||||
|
||||
@ -241,7 +241,7 @@ impl NewEpisodeMinimal {
|
||||
.duration(duration)
|
||||
.epoch(epoch)
|
||||
.guid(guid)
|
||||
.podcast_id(parent_id)
|
||||
.show_id(parent_id)
|
||||
.build()
|
||||
.map_err(From::from)
|
||||
}
|
||||
@ -263,7 +263,7 @@ impl NewEpisodeMinimal {
|
||||
.uri(self.uri)
|
||||
.duration(self.duration)
|
||||
.epoch(self.epoch)
|
||||
.podcast_id(self.podcast_id)
|
||||
.show_id(self.show_id)
|
||||
.guid(self.guid)
|
||||
.length(length)
|
||||
.description(description)
|
||||
@ -294,8 +294,8 @@ impl NewEpisodeMinimal {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub(crate) fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
@ -324,7 +324,7 @@ mod tests {
|
||||
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(4171))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -337,7 +337,7 @@ mod tests {
|
||||
.guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d")))
|
||||
.epoch(1502272800)
|
||||
.duration(Some(4415))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -358,7 +358,7 @@ mod tests {
|
||||
.length(Some(66738886))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(4171))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -382,7 +382,7 @@ mod tests {
|
||||
.length(Some(67527575))
|
||||
.epoch(1502272800)
|
||||
.duration(Some(4415))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -397,7 +397,7 @@ mod tests {
|
||||
.length(Some(66738886))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(424242))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -410,7 +410,7 @@ mod tests {
|
||||
.guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D")))
|
||||
.epoch(1505280282)
|
||||
.duration(Some(5733))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -423,7 +423,7 @@ mod tests {
|
||||
.guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B")))
|
||||
.epoch(1504670247)
|
||||
.duration(Some(4491))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -443,7 +443,7 @@ mod tests {
|
||||
.length(Some(46479789))
|
||||
.epoch(1505280282)
|
||||
.duration(Some(5733))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -465,7 +465,7 @@ mod tests {
|
||||
.length(Some(36544272))
|
||||
.epoch(1504670247)
|
||||
.duration(Some(4491))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
@ -558,7 +558,7 @@ mod tests {
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
new_ep.insert().unwrap();
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap();
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
|
||||
|
||||
assert_eq!(new_ep, ep);
|
||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
||||
@ -567,7 +567,7 @@ mod tests {
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
new_ep.insert().unwrap();
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap();
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap();
|
||||
|
||||
assert_eq!(new_ep, ep);
|
||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
||||
@ -581,21 +581,15 @@ mod tests {
|
||||
|
||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||
updated.update(old.rowid()).unwrap();
|
||||
let mut new = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap();
|
||||
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id()).unwrap();
|
||||
|
||||
// Assert that updating does not change the rowid and podcast_id
|
||||
// Assert that updating does not change the rowid and show_id
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.rowid(), new.rowid());
|
||||
assert_eq!(old.podcast_id(), new.podcast_id());
|
||||
assert_eq!(old.show_id(), new.show_id());
|
||||
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
|
||||
new.set_archive(true);
|
||||
new.save().unwrap();
|
||||
|
||||
let new2 = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap();
|
||||
assert_eq!(true, new2.archive());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -608,7 +602,7 @@ mod tests {
|
||||
// Second identical, This should take the early return path
|
||||
assert!(expected.index().is_ok());
|
||||
// Get the episode
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||
// Assert that NewPodcast is equal to the Indexed one
|
||||
assert_eq!(*expected, old);
|
||||
|
||||
@ -617,23 +611,22 @@ mod tests {
|
||||
// Update the podcast
|
||||
assert!(updated.index().is_ok());
|
||||
// Get the new Podcast
|
||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||
// Assert it's diff from the old one.
|
||||
assert_ne!(new, old);
|
||||
assert_eq!(*updated, new);
|
||||
assert_eq!(new.rowid(), old.rowid());
|
||||
assert_eq!(new.podcast_id(), old.podcast_id());
|
||||
assert_eq!(new.show_id(), old.show_id());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_to_episode() {
|
||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||
|
||||
// Assert insert() produces the same result that you would get with to_podcast()
|
||||
truncate_db().unwrap();
|
||||
expected.insert().unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||
let ep = expected.to_episode().unwrap();
|
||||
assert_eq!(old, ep);
|
||||
|
||||
@ -642,17 +635,7 @@ mod tests {
|
||||
let ep = expected.to_episode().unwrap();
|
||||
// This should error as a unique constrain violation
|
||||
assert!(expected.insert().is_err());
|
||||
let mut old =
|
||||
dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap();
|
||||
assert_eq!(old, ep);
|
||||
|
||||
old.set_archive(true);
|
||||
old.save().unwrap();
|
||||
|
||||
// Assert that it does not mess with user preferences
|
||||
let ep = updated.to_episode().unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
assert_eq!(old, ep);
|
||||
assert_eq!(old.archive(), true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,14 +6,14 @@ use rss;
|
||||
use errors::DataError;
|
||||
use models::Podcast;
|
||||
use models::{Index, Insert, Update};
|
||||
use schema::podcast;
|
||||
use schema::shows;
|
||||
|
||||
use database::connection;
|
||||
use dbqueries;
|
||||
use utils::url_cleaner;
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "podcast"]
|
||||
#[table_name = "shows"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
@ -30,11 +30,11 @@ impl Insert<()> for NewPodcast {
|
||||
type Error = DataError;
|
||||
|
||||
fn insert(&self) -> Result<(), Self::Error> {
|
||||
use schema::podcast::dsl::*;
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(podcast)
|
||||
diesel::insert_into(shows)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
@ -45,13 +45,13 @@ impl Insert<()> for NewPodcast {
|
||||
impl Update<()> for NewPodcast {
|
||||
type Error = DataError;
|
||||
|
||||
fn update(&self, podcast_id: i32) -> Result<(), Self::Error> {
|
||||
use schema::podcast::dsl::*;
|
||||
fn update(&self, show_id: i32) -> Result<(), Self::Error> {
|
||||
use schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {}", self.title);
|
||||
diesel::update(podcast.filter(id.eq(podcast_id)))
|
||||
diesel::update(shows.filter(id.eq(show_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
@ -160,7 +160,7 @@ mod tests {
|
||||
use rss::Channel;
|
||||
|
||||
use database::truncate_db;
|
||||
use models::{NewPodcastBuilder, Save};
|
||||
use models::NewPodcastBuilder;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
@ -369,20 +369,13 @@ mod tests {
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
updated.update(old.id()).unwrap();
|
||||
let mut new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.id(), new.id());
|
||||
assert_eq!(old.source_id(), new.source_id());
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
|
||||
// Chech that the update does not override user preferences.
|
||||
new.set_archive(true);
|
||||
new.save().unwrap();
|
||||
|
||||
let new2 = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
assert_eq!(true, new2.archive());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -424,16 +417,7 @@ mod tests {
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
||||
// This should error as a unique constrain violation
|
||||
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
||||
let mut old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
assert_eq!(old, pd);
|
||||
|
||||
old.set_archive(true);
|
||||
old.save().unwrap();
|
||||
|
||||
// Assert that it does not mess with user preferences
|
||||
let pd = UPDATED_DESC_INTERCEPTED.to_podcast().unwrap();
|
||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
assert_eq!(old, pd);
|
||||
assert_eq!(old.archive(), true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,16 +3,16 @@ use diesel::SaveChangesDsl;
|
||||
use database::connection;
|
||||
use errors::DataError;
|
||||
use models::{Save, Source};
|
||||
use schema::podcast;
|
||||
use schema::shows;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[belongs_to(Source, foreign_key = "source_id")]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[table_name = "podcast"]
|
||||
#[table_name = "shows"]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the podcast table.
|
||||
/// Diesel Model of the shows table.
|
||||
pub struct Podcast {
|
||||
id: i32,
|
||||
title: String,
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
table! {
|
||||
episode (title, podcast_id) {
|
||||
episodes (title, show_id) {
|
||||
rowid -> Integer,
|
||||
title -> Text,
|
||||
uri -> Nullable<Text>,
|
||||
@ -10,12 +10,12 @@ table! {
|
||||
duration -> Nullable<Integer>,
|
||||
guid -> Nullable<Text>,
|
||||
played -> Nullable<Integer>,
|
||||
podcast_id -> Integer,
|
||||
show_id -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
podcast (id) {
|
||||
shows (id) {
|
||||
id -> Integer,
|
||||
title -> Text,
|
||||
link -> Text,
|
||||
@ -34,4 +34,4 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
allow_tables_to_appear_in_same_query!(episode, podcast, source);
|
||||
allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||
|
||||
@ -183,7 +183,7 @@ mod tests {
|
||||
// Setup episodes
|
||||
let n1 = NewEpisodeBuilder::default()
|
||||
.title("foo_bar".to_string())
|
||||
.podcast_id(0)
|
||||
.show_id(0)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_episode()
|
||||
@ -191,14 +191,14 @@ mod tests {
|
||||
|
||||
let n2 = NewEpisodeBuilder::default()
|
||||
.title("bar_baz".to_string())
|
||||
.podcast_id(1)
|
||||
.show_id(1)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_episode()
|
||||
.unwrap();
|
||||
|
||||
let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.podcast_id()).unwrap();
|
||||
let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.podcast_id()).unwrap();
|
||||
let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.show_id()).unwrap();
|
||||
let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.show_id()).unwrap();
|
||||
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
||||
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
||||
|
||||
|
||||
@ -93,9 +93,7 @@ impl PopulatedStack {
|
||||
let old = self.show.container.clone();
|
||||
|
||||
// save the ShowWidget vertical scrollabar alignment
|
||||
self.show
|
||||
.podcast_id()
|
||||
.map(|id| self.show.save_vadjustment(id));
|
||||
self.show.show_id().map(|id| self.show.save_vadjustment(id));
|
||||
|
||||
let new = ShowWidget::new(pd, self.sender.clone());
|
||||
self.show = new;
|
||||
@ -113,7 +111,7 @@ impl PopulatedStack {
|
||||
|
||||
pub fn update_widget(&mut self) -> Result<(), Error> {
|
||||
let old = self.show.container.clone();
|
||||
let id = self.show.podcast_id();
|
||||
let id = self.show.show_id();
|
||||
if id.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
@ -131,9 +129,9 @@ impl PopulatedStack {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Only update widget if its podcast_id is equal to pid.
|
||||
// Only update widget if its show_id is equal to pid.
|
||||
pub fn update_widget_if_same(&mut self, pid: i32) -> Result<(), Error> {
|
||||
if self.show.podcast_id() != Some(pid) {
|
||||
if self.show.show_id() != Some(pid) {
|
||||
debug!("Different widget. Early return");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@ -226,15 +226,15 @@ lazy_static! {
|
||||
// GObjects do not implement Send trait, so SendCell is a way around that.
|
||||
// Also lazy_static requires Sync trait, so that's what the mutexes are.
|
||||
// TODO: maybe use something that would just scale to requested size?
|
||||
pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Result<(), Error> {
|
||||
pub fn set_image_from_path(image: >k::Image, show_id: i32, size: u32) -> Result<(), Error> {
|
||||
// Check if there's an active download about this show cover.
|
||||
// If there is, a callback will be set so this function will be called again.
|
||||
// If the download succedes, there should be a quick return from the pixbuf cache_image
|
||||
// If it fails another download will be scheduled.
|
||||
if let Ok(guard) = COVER_DL_REGISTRY.read() {
|
||||
if guard.contains(&podcast_id) {
|
||||
if guard.contains(&show_id) {
|
||||
let callback = clone!(image => move || {
|
||||
let _ = set_image_from_path(&image, podcast_id, size);
|
||||
let _ = set_image_from_path(&image, show_id, size);
|
||||
glib::Continue(false)
|
||||
});
|
||||
gtk::timeout_add(250, callback);
|
||||
@ -245,7 +245,7 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re
|
||||
if let Ok(hashmap) = CACHED_PIXBUFS.read() {
|
||||
// Check if the requested (cover + size) is already in the chache
|
||||
// and if so do an early return after that.
|
||||
if let Some(guard) = hashmap.get(&(podcast_id, size)) {
|
||||
if let Some(guard) = hashmap.get(&(show_id, size)) {
|
||||
guard
|
||||
.lock()
|
||||
.map_err(|err| format_err!("SendCell Mutex: {}", err))
|
||||
@ -263,11 +263,11 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re
|
||||
let (sender, receiver) = unbounded();
|
||||
THREADPOOL.spawn(move || {
|
||||
if let Ok(mut guard) = COVER_DL_REGISTRY.write() {
|
||||
guard.insert(podcast_id);
|
||||
if let Ok(pd) = dbqueries::get_podcast_cover_from_id(podcast_id) {
|
||||
guard.insert(show_id);
|
||||
if let Ok(pd) = dbqueries::get_podcast_cover_from_id(show_id) {
|
||||
sender.send(downloader::cache_image(&pd));
|
||||
}
|
||||
guard.remove(&podcast_id);
|
||||
guard.remove(&show_id);
|
||||
}
|
||||
});
|
||||
|
||||
@ -278,7 +278,7 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re
|
||||
if let Ok(path) = path {
|
||||
if let Ok(px) = Pixbuf::new_from_file_at_scale(&path, s, s, true) {
|
||||
if let Ok(mut hashmap) = CACHED_PIXBUFS.write() {
|
||||
hashmap.insert((podcast_id, size), Mutex::new(SendCell::new(px.clone())));
|
||||
hashmap.insert((show_id, size), Mutex::new(SendCell::new(px.clone())));
|
||||
image.set_from_pixbuf(&px);
|
||||
}
|
||||
}
|
||||
|
||||
@ -429,7 +429,7 @@ impl EpisodeWidget {
|
||||
}
|
||||
|
||||
fn on_download_clicked(ep: &EpisodeWidgetQuery, sender: &Sender<Action>) -> Result<(), Error> {
|
||||
let pd = dbqueries::get_podcast_from_id(ep.podcast_id())?;
|
||||
let pd = dbqueries::get_podcast_from_id(ep.show_id())?;
|
||||
let download_fold = get_download_folder(&pd.title())?;
|
||||
|
||||
// Start a new download.
|
||||
|
||||
@ -202,7 +202,7 @@ impl HomeEpisode {
|
||||
gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/episodes_view_widget.ui");
|
||||
let container: gtk::Box = builder.get_object("container").unwrap();
|
||||
let image: gtk::Image = builder.get_object("cover").unwrap();
|
||||
let pid = episode.podcast_id();
|
||||
let pid = episode.show_id();
|
||||
let ep = EpisodeWidget::new(episode, sender);
|
||||
|
||||
let view = HomeEpisode {
|
||||
@ -215,15 +215,15 @@ impl HomeEpisode {
|
||||
view
|
||||
}
|
||||
|
||||
fn init(&self, podcast_id: i32) {
|
||||
self.set_cover(podcast_id)
|
||||
fn init(&self, show_id: i32) {
|
||||
self.set_cover(show_id)
|
||||
.map_err(|err| error!("Failed to set a cover: {}", err))
|
||||
.ok();
|
||||
|
||||
self.container.pack_start(&self.episode, true, true, 6);
|
||||
}
|
||||
|
||||
fn set_cover(&self, podcast_id: i32) -> Result<(), Error> {
|
||||
utils::set_image_from_path(&self.image, podcast_id, 64)
|
||||
fn set_cover(&self, show_id: i32) -> Result<(), Error> {
|
||||
utils::set_image_from_path(&self.image, show_id, 64)
|
||||
}
|
||||
}
|
||||
|
||||
@ -357,7 +357,7 @@ impl PlayerWidget {
|
||||
|
||||
pub fn initialize_episode(&self, rowid: i32) -> Result<(), Error> {
|
||||
let ep = dbqueries::get_episode_widget_from_rowid(rowid)?;
|
||||
let pd = dbqueries::get_podcast_cover_from_id(ep.podcast_id())?;
|
||||
let pd = dbqueries::get_podcast_cover_from_id(ep.show_id())?;
|
||||
|
||||
self.info.init(&ep, &pd);
|
||||
// Currently that will always be the case since the play button is
|
||||
|
||||
@ -36,7 +36,7 @@ pub struct ShowWidget {
|
||||
settings: gtk::MenuButton,
|
||||
unsub: gtk::Button,
|
||||
episodes: gtk::ListBox,
|
||||
podcast_id: Option<i32>,
|
||||
show_id: Option<i32>,
|
||||
}
|
||||
|
||||
impl Default for ShowWidget {
|
||||
@ -61,7 +61,7 @@ impl Default for ShowWidget {
|
||||
link,
|
||||
settings,
|
||||
episodes,
|
||||
podcast_id: None,
|
||||
show_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -87,7 +87,7 @@ impl ShowWidget {
|
||||
}));
|
||||
|
||||
self.set_description(pd.description());
|
||||
self.podcast_id = Some(pd.id());
|
||||
self.show_id = Some(pd.id());
|
||||
|
||||
self.set_cover(&pd)
|
||||
.map_err(|err| error!("Failed to set a cover: {}", err))
|
||||
@ -166,8 +166,8 @@ impl ShowWidget {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn podcast_id(&self) -> Option<i32> {
|
||||
self.podcast_id
|
||||
pub fn show_id(&self) -> Option<i32> {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -162,7 +162,7 @@ impl ShowsChild {
|
||||
.ok();
|
||||
}
|
||||
|
||||
fn set_cover(&self, podcast_id: i32) -> Result<(), Error> {
|
||||
set_image_from_path(&self.cover, podcast_id, 256)
|
||||
fn set_cover(&self, show_id: i32) -> Result<(), Error> {
|
||||
set_image_from_path(&self.cover, show_id, 256)
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user