diff --git a/hammond-data/diesel.toml b/hammond-data/diesel.toml new file mode 100644 index 0000000..70ba106 --- /dev/null +++ b/hammond-data/diesel.toml @@ -0,0 +1,6 @@ +# For documentation on how to configure this file, +# see diesel.rs/guides/configuring-diesel-cli + +[print_schema] +file = "src/schema.rs" +patch_file = "src/schema.patch" diff --git a/hammond-data/migrations/2018-06-30-141659_remove_dead_fields/down.sql b/hammond-data/migrations/2018-06-30-141659_remove_dead_fields/down.sql new file mode 100644 index 0000000..c33f425 --- /dev/null +++ b/hammond-data/migrations/2018-06-30-141659_remove_dead_fields/down.sql @@ -0,0 +1,53 @@ +ALTER TABLE episode RENAME TO old_table; + +CREATE TABLE episode ( + title TEXT NOT NULL, + uri TEXT, + local_uri TEXT, + description TEXT, + epoch INTEGER NOT NULL DEFAULT 0, + length INTEGER, + duration INTEGER, + guid TEXT, + played INTEGER, + podcast_id INTEGER NOT NULL, + favorite INTEGER DEFAULT 0, + archive INTEGER DEFAULT 0, + PRIMARY KEY (title, podcast_id) +); + +INSERT INTO episode (title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, favorite, archive) +SELECT title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, 0, 0 +FROM old_table; + +Drop table old_table; + +ALTER TABLE podcast RENAME TO old_table; +CREATE TABLE `podcast` ( + `id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + `title` TEXT NOT NULL, + `link` TEXT NOT NULL, + `description` TEXT NOT NULL, + `image_uri` TEXT, + `source_id` INTEGER NOT NULL UNIQUE, + `favorite` INTEGER NOT NULL DEFAULT 0, + `archive` INTEGER NOT NULL DEFAULT 0, + `always_dl` INTEGER NOT NULL DEFAULT 0 +); + +INSERT INTO podcast ( + id, + title, + link, + description, + image_uri, + source_id +) SELECT id, + title, + link, + description, + image_uri, + source_id +FROM old_table; + +Drop table old_table; \ No newline at end of file diff --git a/hammond-data/migrations/2018-06-30-141659_remove_dead_fields/up.sql b/hammond-data/migrations/2018-06-30-141659_remove_dead_fields/up.sql new file mode 100644 index 0000000..f1afbf2 --- /dev/null +++ b/hammond-data/migrations/2018-06-30-141659_remove_dead_fields/up.sql @@ -0,0 +1,66 @@ +ALTER TABLE episode RENAME TO old_table; + +CREATE TABLE episode ( + title TEXT NOT NULL, + uri TEXT, + local_uri TEXT, + description TEXT, + epoch INTEGER NOT NULL DEFAULT 0, + length INTEGER, + duration INTEGER, + guid TEXT, + played INTEGER, + podcast_id INTEGER NOT NULL, + PRIMARY KEY (title, podcast_id) +); + +INSERT INTO episode ( + title, + uri, + local_uri, + description, + epoch, + length, + duration, + guid, + played, + podcast_id +) SELECT title, + uri, + local_uri, + description, + epoch, length, + duration, + guid, + played, + podcast_id +FROM old_table; + +Drop table old_table; + +ALTER TABLE podcast RENAME TO old_table; +CREATE TABLE `podcast` ( + `id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + `title` TEXT NOT NULL, + `link` TEXT NOT NULL, + `description` TEXT NOT NULL, + `image_uri` TEXT, + `source_id` INTEGER NOT NULL UNIQUE +); + +INSERT INTO podcast ( + id, + title, + link, + description, + image_uri, + source_id +) SELECT id, + title, + link, + description, + image_uri, + source_id +FROM old_table; + +Drop table old_table; diff --git a/hammond-data/migrations/2018-06-30-150717_rename_tables/down.sql b/hammond-data/migrations/2018-06-30-150717_rename_tables/down.sql new file mode 100644 index 0000000..3c602fb --- /dev/null +++ b/hammond-data/migrations/2018-06-30-150717_rename_tables/down.sql @@ -0,0 +1,40 @@ +ALTER TABLE episodes RENAME TO old_table; +ALTER TABLE shows RENAME TO podcast; + +CREATE TABLE episode ( + title TEXT NOT NULL, + uri TEXT, + local_uri TEXT, + description TEXT, + epoch INTEGER NOT NULL DEFAULT 0, + length INTEGER, + duration INTEGER, + guid TEXT, + played INTEGER, + podcast_id INTEGER NOT NULL, + PRIMARY KEY (title, podcast_id) +); + +INSERT INTO episode ( + title, + uri, + local_uri, + description, + epoch, + length, + duration, + guid, + played, + podcast_id +) SELECT title, + uri, + local_uri, + description, + epoch, length, + duration, + guid, + played, + show_id +FROM old_table; + +Drop table old_table; diff --git a/hammond-data/migrations/2018-06-30-150717_rename_tables/up.sql b/hammond-data/migrations/2018-06-30-150717_rename_tables/up.sql new file mode 100644 index 0000000..95a3ce8 --- /dev/null +++ b/hammond-data/migrations/2018-06-30-150717_rename_tables/up.sql @@ -0,0 +1,40 @@ +ALTER TABLE episode RENAME TO old_table; +ALTER TABLE podcast RENAME TO shows; + +CREATE TABLE episodes ( + title TEXT NOT NULL, + uri TEXT, + local_uri TEXT, + description TEXT, + epoch INTEGER NOT NULL DEFAULT 0, + length INTEGER, + duration INTEGER, + guid TEXT, + played INTEGER, + show_id INTEGER NOT NULL, + PRIMARY KEY (title, show_id) +); + +INSERT INTO episodes ( + title, + uri, + local_uri, + description, + epoch, + length, + duration, + guid, + played, + show_id +) SELECT title, + uri, + local_uri, + description, + epoch, length, + duration, + guid, + played, + podcast_id +FROM old_table; + +Drop table old_table; diff --git a/hammond-data/src/database.rs b/hammond-data/src/database.rs index 60680be..d467aee 100644 --- a/hammond-data/src/database.rs +++ b/hammond-data/src/database.rs @@ -68,8 +68,8 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> { pub fn truncate_db() -> Result<(), DataError> { let db = connection(); let con = db.get()?; - con.execute("DELETE FROM episode")?; - con.execute("DELETE FROM podcast")?; + con.execute("DELETE FROM episodes")?; + con.execute("DELETE FROM shows")?; con.execute("DELETE FROM source")?; Ok(()) } diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 0e7170d..214b141 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -23,106 +23,106 @@ pub fn get_sources() -> Result, DataError> { .map_err(From::from) } -pub fn get_podcasts() -> Result, DataError> { - use schema::podcast::dsl::*; +pub fn get_podcasts() -> Result, DataError> { + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - podcast + shows .order(title.asc()) - .load::(&con) + .load::(&con) .map_err(From::from) } -pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result, DataError> { - use schema::podcast::dsl::*; +pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result, DataError> { + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - podcast + shows .order(title.asc()) .filter(id.ne_all(filter_ids)) - .load::(&con) + .load::(&con) .map_err(From::from) } pub fn get_episodes() -> Result, DataError> { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .order(epoch.desc()) .load::(&con) .map_err(From::from) } -pub(crate) fn get_downloaded_episodes() -> Result, DataError> { - use schema::episode::dsl::*; +pub(crate) fn get_downloaded_episodes() -> Result, DataError> { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .select((rowid, local_uri, played)) .filter(local_uri.is_not_null()) - .load::(&con) + .load::(&con) .map_err(From::from) } // pub(crate) fn get_played_episodes() -> Result, DataError> { -// use schema::episode::dsl::*; +// use schema::episodes::dsl::*; // let db = connection(); // let con = db.get()?; -// episode +// episodes // .filter(played.is_not_null()) // .load::(&con) // .map_err(From::from) // } -pub(crate) fn get_played_cleaner_episodes() -> Result, DataError> { - use schema::episode::dsl::*; +pub(crate) fn get_played_cleaner_episodes() -> Result, DataError> { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .select((rowid, local_uri, played)) .filter(played.is_not_null()) - .load::(&con) + .load::(&con) .map_err(From::from) } pub fn get_episode_from_rowid(ep_id: i32) -> Result { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .filter(rowid.eq(ep_id)) .get_result::(&con) .map_err(From::from) } -pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result { - use schema::episode::dsl::*; +pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .select(( - rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id, + rowid, title, uri, local_uri, epoch, length, duration, played, show_id, )) .filter(rowid.eq(ep_id)) - .get_result::(&con) + .get_result::(&con) .map_err(From::from) } pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result, DataError> { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .filter(rowid.eq(ep_id)) .select(local_uri) .get_result::>(&con) @@ -132,48 +132,48 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result, DataE pub fn get_episodes_widgets_filter_limit( filter_ids: &[i32], limit: u32, -) -> Result, DataError> { - use schema::episode::dsl::*; +) -> Result, DataError> { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; let columns = ( - rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id, + rowid, title, uri, local_uri, epoch, length, duration, played, show_id, ); - episode + episodes .select(columns) .order(epoch.desc()) - .filter(podcast_id.ne_all(filter_ids)) + .filter(show_id.ne_all(filter_ids)) .limit(i64::from(limit)) - .load::(&con) + .load::(&con) .map_err(From::from) } -pub fn get_podcast_from_id(pid: i32) -> Result { - use schema::podcast::dsl::*; +pub fn get_podcast_from_id(pid: i32) -> Result { + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - podcast + shows .filter(id.eq(pid)) - .get_result::(&con) + .get_result::(&con) .map_err(From::from) } -pub fn get_podcast_cover_from_id(pid: i32) -> Result { - use schema::podcast::dsl::*; +pub fn get_podcast_cover_from_id(pid: i32) -> Result { + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - podcast + shows .select((id, title, image_uri)) .filter(id.eq(pid)) - .get_result::(&con) + .get_result::(&con) .map_err(From::from) } -pub fn get_pd_episodes(parent: &Podcast) -> Result, DataError> { - use schema::episode::dsl::*; +pub fn get_pd_episodes(parent: &Show) -> Result, DataError> { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; @@ -183,7 +183,7 @@ pub fn get_pd_episodes(parent: &Podcast) -> Result, DataError> { .map_err(From::from) } -pub fn get_pd_episodes_count(parent: &Podcast) -> Result { +pub fn get_pd_episodes_count(parent: &Show) -> Result { let db = connection(); let con = db.get()?; @@ -193,24 +193,24 @@ pub fn get_pd_episodes_count(parent: &Podcast) -> Result { .map_err(From::from) } -pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result, DataError> { - use schema::episode::dsl::*; +pub fn get_pd_episodeswidgets(parent: &Show) -> Result, DataError> { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; let columns = ( - rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id, + rowid, title, uri, local_uri, epoch, length, duration, played, show_id, ); - episode + episodes .select(columns) - .filter(podcast_id.eq(parent.id())) + .filter(show_id.eq(parent.id())) .order(epoch.desc()) - .load::(&con) + .load::(&con) .map_err(From::from) } -pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result, DataError> { - use schema::episode::dsl::*; +pub fn get_pd_unplayed_episodes(parent: &Show) -> Result, DataError> { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; @@ -221,8 +221,8 @@ pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result, DataEr .map_err(From::from) } -// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> -// Result, DataError> { use schema::episode::dsl::*; +// pub(crate) fn get_pd_episodes_limit(parent: &Show, limit: u32) -> +// Result, DataError> { use schema::episodes::dsl::*; // let db = connection(); // let con = db.get()?; @@ -256,25 +256,25 @@ pub fn get_source_from_id(id_: i32) -> Result { .map_err(From::from) } -pub fn get_podcast_from_source_id(sid: i32) -> Result { - use schema::podcast::dsl::*; +pub fn get_podcast_from_source_id(sid: i32) -> Result { + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - podcast + shows .filter(source_id.eq(sid)) - .get_result::(&con) + .get_result::(&con) .map_err(From::from) } pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode + episodes .filter(title.eq(title_)) - .filter(podcast_id.eq(pid)) + .filter(show_id.eq(pid)) .get_result::(&con) .map_err(From::from) } @@ -283,19 +283,19 @@ pub(crate) fn get_episode_minimal_from_pk( title_: &str, pid: i32, ) -> Result { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - episode - .select((rowid, title, uri, epoch, duration, guid, podcast_id)) + episodes + .select((rowid, title, uri, epoch, duration, guid, show_id)) .filter(title.eq(title_)) - .filter(podcast_id.eq(pid)) + .filter(show_id.eq(pid)) .get_result::(&con) .map_err(From::from) } -pub(crate) fn remove_feed(pd: &Podcast) -> Result<(), DataError> { +pub(crate) fn remove_feed(pd: &Show) -> Result<(), DataError> { let db = connection(); let con = db.get()?; @@ -314,16 +314,16 @@ fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult { diesel::delete(source.filter(id.eq(source_id))).execute(con) } -fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult { - use schema::podcast::dsl::*; +fn delete_podcast(con: &SqliteConnection, show_id: i32) -> QueryResult { + use schema::shows::dsl::*; - diesel::delete(podcast.filter(id.eq(podcast_id))).execute(con) + diesel::delete(shows.filter(id.eq(show_id))).execute(con) } fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; - diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con) + diesel::delete(episodes.filter(show_id.eq(parent_id))).execute(con) } pub fn source_exists(url: &str) -> Result { @@ -338,70 +338,70 @@ pub fn source_exists(url: &str) -> Result { } pub(crate) fn podcast_exists(source_id_: i32) -> Result { - use schema::podcast::dsl::*; + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - select(exists(podcast.filter(source_id.eq(source_id_)))) + select(exists(shows.filter(source_id.eq(source_id_)))) .get_result(&con) .map_err(From::from) } #[cfg_attr(rustfmt, rustfmt_skip)] -pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result { - use schema::episode::dsl::*; +pub(crate) fn episode_exists(title_: &str, show_id_: i32) -> Result { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - select(exists(episode.filter(podcast_id.eq(podcast_id_)).filter(title.eq(title_)))) + select(exists(episodes.filter(show_id.eq(show_id_)).filter(title.eq(title_)))) .get_result(&con) .map_err(From::from) } -/// Check if the `episode table contains any rows +/// Check if the `episodes table contains any rows /// -/// Return true if `episode` table is populated. +/// Return true if `episodes` table is populated. pub fn is_episodes_populated() -> Result { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - select(exists(episode.as_query())) + select(exists(episodes.as_query())) .get_result(&con) .map_err(From::from) } -/// Check if the `podcast` table contains any rows +/// Check if the `shows` table contains any rows /// -/// Return true if `podcast table is populated. +/// Return true if `shows table is populated. pub fn is_podcasts_populated(filter_ids: &[i32]) -> Result { - use schema::podcast::dsl::*; + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - select(exists(podcast.filter(id.ne_all(filter_ids)))) + select(exists(shows.filter(id.ne_all(filter_ids)))) .get_result(&con) .map_err(From::from) } pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; - diesel::insert_into(episode) + diesel::insert_into(episodes) .values(eps) .execute(&*con) .map_err(From::from) .map(|_| ()) } -pub fn update_none_to_played_now(parent: &Podcast) -> Result { - use schema::episode::dsl::*; +pub fn update_none_to_played_now(parent: &Show) -> Result { + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; diff --git a/hammond-data/src/feed.rs b/hammond-data/src/feed.rs index 745e551..2abcc29 100644 --- a/hammond-data/src/feed.rs +++ b/hammond-data/src/feed.rs @@ -9,7 +9,7 @@ use rss; use dbqueries; use errors::DataError; use models::{Index, IndexState, Update}; -use models::{NewEpisode, NewEpisodeMinimal, NewPodcast, Podcast}; +use models::{NewEpisode, NewEpisodeMinimal, NewShow, Show}; /// Wrapper struct that hold a `Source` id and the `rss::Channel` /// that corresponds to the `Source.uri` field. @@ -31,15 +31,15 @@ impl Feed { .and_then(move |pd| self.index_channel_items(pd)) } - fn parse_podcast(&self) -> NewPodcast { - NewPodcast::new(&self.channel, self.source_id) + fn parse_podcast(&self) -> NewShow { + NewShow::new(&self.channel, self.source_id) } - fn parse_podcast_async(&self) -> impl Future + Send { + fn parse_podcast_async(&self) -> impl Future + Send { ok(self.parse_podcast()) } - fn index_channel_items(self, pd: Podcast) -> impl Future + Send { + fn index_channel_items(self, pd: Show) -> impl Future + Send { let stream = stream::iter_ok::<_, DataError>(self.channel.into_items()); // Parse the episodes @@ -65,12 +65,12 @@ fn determine_ep_state( item: &rss::Item, ) -> Result, DataError> { // Check if feed exists - let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?; + let exists = dbqueries::episode_exists(ep.title(), ep.show_id())?; if !exists { Ok(IndexState::Index(ep.into_new_episode(item))) } else { - let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.podcast_id())?; + let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.show_id())?; let rowid = old.rowid(); if ep != old { @@ -204,7 +204,7 @@ mod tests { let file = fs::File::open(path).unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(feed.parse_podcast(), pd); } diff --git a/hammond-data/src/lib.rs b/hammond-data/src/lib.rs index d1a617c..0cadaae 100644 --- a/hammond-data/src/lib.rs +++ b/hammond-data/src/lib.rs @@ -81,7 +81,7 @@ pub mod utils; pub use feed::{Feed, FeedBuilder}; pub use models::Save; -pub use models::{Episode, EpisodeWidgetQuery, Podcast, PodcastCoverQuery, Source}; +pub use models::{Episode, EpisodeWidgetModel, Show, ShowCoverModel, Source}; // Set the user agent, See #53 for more // Keep this in sync with Tor-browser releases diff --git a/hammond-data/src/models/episode.rs b/hammond-data/src/models/episode.rs index b9232fe..2b812fd 100644 --- a/hammond-data/src/models/episode.rs +++ b/hammond-data/src/models/episode.rs @@ -5,14 +5,14 @@ use diesel::SaveChangesDsl; use database::connection; use errors::DataError; -use models::{Podcast, Save}; -use schema::episode; +use models::{Save, Show}; +use schema::episodes; #[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)] -#[table_name = "episode"] +#[table_name = "episodes"] #[changeset_options(treat_none_as_null = "true")] -#[primary_key(title, podcast_id)] -#[belongs_to(Podcast, foreign_key = "podcast_id")] +#[primary_key(title, show_id)] +#[belongs_to(Show, foreign_key = "show_id")] #[derive(Debug, Clone)] /// Diesel Model of the episode table. pub struct Episode { @@ -26,9 +26,7 @@ pub struct Episode { duration: Option, guid: Option, played: Option, - favorite: bool, - archive: bool, - podcast_id: i32, + show_id: i32, } impl Save for Episode { @@ -154,32 +152,9 @@ impl Episode { self.played = value; } - /// Represents the archiving policy for the episode. - pub fn archive(&self) -> bool { - self.archive - } - - /// Set the `archive` policy. - /// - /// If true, the download cleanr will ignore the episode - /// and the corresponding media value will never be automaticly deleted. - pub fn set_archive(&mut self, b: bool) { - self.archive = b - } - - /// Get the `favorite` status of the `Episode`. - pub fn favorite(&self) -> bool { - self.favorite - } - - /// Set `favorite` status. - pub fn set_favorite(&mut self, b: bool) { - self.favorite = b - } - - /// `Podcast` table foreign key. - pub fn podcast_id(&self) -> i32 { - self.podcast_id + /// `Show` table foreign key. + pub fn show_id(&self) -> i32 { + self.show_id } /// Sets the `played` value with the current `epoch` timestap and save it. @@ -191,12 +166,12 @@ impl Episode { } #[derive(Queryable, AsChangeset, PartialEq)] -#[table_name = "episode"] +#[table_name = "episodes"] #[changeset_options(treat_none_as_null = "true")] -#[primary_key(title, podcast_id)] +#[primary_key(title, show_id)] #[derive(Debug, Clone)] /// Diesel Model to be used for constructing `EpisodeWidgets`. -pub struct EpisodeWidgetQuery { +pub struct EpisodeWidgetModel { rowid: i32, title: String, uri: Option, @@ -205,14 +180,12 @@ pub struct EpisodeWidgetQuery { length: Option, duration: Option, played: Option, - // favorite: bool, - // archive: bool, - podcast_id: i32, + show_id: i32, } -impl From for EpisodeWidgetQuery { - fn from(e: Episode) -> EpisodeWidgetQuery { - EpisodeWidgetQuery { +impl From for EpisodeWidgetModel { + fn from(e: Episode) -> EpisodeWidgetModel { + EpisodeWidgetModel { rowid: e.rowid, title: e.title, uri: e.uri, @@ -221,30 +194,30 @@ impl From for EpisodeWidgetQuery { length: e.length, duration: e.duration, played: e.played, - podcast_id: e.podcast_id, + show_id: e.show_id, } } } -impl Save for EpisodeWidgetQuery { +impl Save for EpisodeWidgetModel { type Error = DataError; /// Helper method to easily save/"sync" current state of self to the /// Database. fn save(&self) -> Result { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let tempdb = db.get()?; - diesel::update(episode.filter(rowid.eq(self.rowid))) + diesel::update(episodes.filter(rowid.eq(self.rowid))) .set(self) .execute(&*tempdb) .map_err(From::from) } } -impl EpisodeWidgetQuery { +impl EpisodeWidgetModel { /// Get the value of the sqlite's `ROW_ID` pub fn rowid(&self) -> i32 { self.rowid @@ -319,32 +292,9 @@ impl EpisodeWidgetQuery { self.played = value; } - // /// Represents the archiving policy for the episode. - // pub fn archive(&self) -> bool { - // self.archive - // } - - // /// Set the `archive` policy. - // /// - // /// If true, the download cleanr will ignore the episode - // /// and the corresponding media value will never be automaticly deleted. - // pub fn set_archive(&mut self, b: bool) { - // self.archive = b - // } - - // /// Get the `favorite` status of the `Episode`. - // pub fn favorite(&self) -> bool { - // self.favorite - // } - - // /// Set `favorite` status. - // pub fn set_favorite(&mut self, b: bool) { - // self.favorite = b - // } - - /// `Podcast` table foreign key. - pub fn podcast_id(&self) -> i32 { - self.podcast_id + /// `Show` table foreign key. + pub fn show_id(&self) -> i32 { + self.show_id } /// Sets the `played` value with the current `epoch` timestap and save it. @@ -356,38 +306,38 @@ impl EpisodeWidgetQuery { } #[derive(Queryable, AsChangeset, PartialEq)] -#[table_name = "episode"] +#[table_name = "episodes"] #[changeset_options(treat_none_as_null = "true")] -#[primary_key(title, podcast_id)] +#[primary_key(title, show_id)] #[derive(Debug, Clone)] /// Diesel Model to be used internal with the `utils::checkup` function. -pub struct EpisodeCleanerQuery { +pub struct EpisodeCleanerModel { rowid: i32, local_uri: Option, played: Option, } -impl Save for EpisodeCleanerQuery { +impl Save for EpisodeCleanerModel { type Error = DataError; /// Helper method to easily save/"sync" current state of self to the /// Database. fn save(&self) -> Result { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let tempdb = db.get()?; - diesel::update(episode.filter(rowid.eq(self.rowid))) + diesel::update(episodes.filter(rowid.eq(self.rowid))) .set(self) .execute(&*tempdb) .map_err(From::from) } } -impl From for EpisodeCleanerQuery { - fn from(e: Episode) -> EpisodeCleanerQuery { - EpisodeCleanerQuery { +impl From for EpisodeCleanerModel { + fn from(e: Episode) -> EpisodeCleanerModel { + EpisodeCleanerModel { rowid: e.rowid(), local_uri: e.local_uri, played: e.played, @@ -395,7 +345,7 @@ impl From for EpisodeCleanerQuery { } } -impl EpisodeCleanerQuery { +impl EpisodeCleanerModel { /// Get the value of the sqlite's `ROW_ID` pub fn rowid(&self) -> i32 { self.rowid @@ -428,9 +378,9 @@ impl EpisodeCleanerQuery { } #[derive(Queryable, AsChangeset, PartialEq)] -#[table_name = "episode"] +#[table_name = "episodes"] #[changeset_options(treat_none_as_null = "true")] -#[primary_key(title, podcast_id)] +#[primary_key(title, show_id)] #[derive(Debug, Clone)] /// Diesel Model to be used for FIXME. pub struct EpisodeMinimal { @@ -440,7 +390,7 @@ pub struct EpisodeMinimal { epoch: i32, duration: Option, guid: Option, - podcast_id: i32, + show_id: i32, } impl From for EpisodeMinimal { @@ -452,7 +402,7 @@ impl From for EpisodeMinimal { guid: e.guid, epoch: e.epoch, duration: e.duration, - podcast_id: e.podcast_id, + show_id: e.show_id, } } } @@ -495,8 +445,8 @@ impl EpisodeMinimal { self.duration } - /// `Podcast` table foreign key. - pub fn podcast_id(&self) -> i32 { - self.podcast_id + /// `Show` table foreign key. + pub fn show_id(&self) -> i32 { + self.show_id } } diff --git a/hammond-data/src/models/mod.rs b/hammond-data/src/models/mod.rs index 59fffb5..260922b 100644 --- a/hammond-data/src/models/mod.rs +++ b/hammond-data/src/models/mod.rs @@ -1,26 +1,26 @@ mod new_episode; -mod new_podcast; +mod new_show; mod new_source; mod episode; -mod podcast; +mod show; mod source; // use futures::prelude::*; // use futures::future::*; -pub(crate) use self::episode::EpisodeCleanerQuery; +pub(crate) use self::episode::EpisodeCleanerModel; pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal}; -pub(crate) use self::new_podcast::NewPodcast; +pub(crate) use self::new_show::NewShow; pub(crate) use self::new_source::NewSource; #[cfg(test)] pub(crate) use self::new_episode::NewEpisodeBuilder; #[cfg(test)] -pub(crate) use self::new_podcast::NewPodcastBuilder; +pub(crate) use self::new_show::NewShowBuilder; -pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery}; -pub use self::podcast::{Podcast, PodcastCoverQuery}; +pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetModel}; +pub use self::show::{Show, ShowCoverModel}; pub use self::source::Source; #[derive(Debug, Clone, PartialEq)] diff --git a/hammond-data/src/models/new_episode.rs b/hammond-data/src/models/new_episode.rs index 0018042..79ced73 100644 --- a/hammond-data/src/models/new_episode.rs +++ b/hammond-data/src/models/new_episode.rs @@ -9,11 +9,11 @@ use dbqueries; use errors::DataError; use models::{Episode, EpisodeMinimal, Index, Insert, Update}; use parser; -use schema::episode; +use schema::episodes; use utils::url_cleaner; #[derive(Insertable, AsChangeset)] -#[table_name = "episode"] +#[table_name = "episodes"] #[derive(Debug, Clone, Default, Builder, PartialEq)] #[builder(default)] #[builder(derive(Debug))] @@ -26,7 +26,7 @@ pub(crate) struct NewEpisode { duration: Option, guid: Option, epoch: i32, - podcast_id: i32, + show_id: i32, } impl From for NewEpisode { @@ -36,7 +36,7 @@ impl From for NewEpisode { .uri(e.uri) .duration(e.duration) .epoch(e.epoch) - .podcast_id(e.podcast_id) + .show_id(e.show_id) .guid(e.guid) .build() .unwrap() @@ -47,12 +47,12 @@ impl Insert<()> for NewEpisode { type Error = DataError; fn insert(&self) -> Result<(), DataError> { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; info!("Inserting {:?}", self.title); - diesel::insert_into(episode) + diesel::insert_into(episodes) .values(self) .execute(&con) .map_err(From::from) @@ -64,12 +64,12 @@ impl Update<()> for NewEpisode { type Error = DataError; fn update(&self, episode_id: i32) -> Result<(), DataError> { - use schema::episode::dsl::*; + use schema::episodes::dsl::*; let db = connection(); let con = db.get()?; info!("Updating {:?}", self.title); - diesel::update(episode.filter(rowid.eq(episode_id))) + diesel::update(episodes.filter(rowid.eq(episode_id))) .set(self) .execute(&con) .map_err(From::from) @@ -83,10 +83,10 @@ impl Index<()> for NewEpisode { // Does not update the episode description if it's the only thing that has // changed. fn index(&self) -> Result<(), DataError> { - let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?; + let exists = dbqueries::episode_exists(self.title(), self.show_id())?; if exists { - let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.podcast_id())?; + let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.show_id())?; if self != &other { self.update(other.rowid()) @@ -106,7 +106,7 @@ impl PartialEq for NewEpisode { && (self.duration() == other.duration()) && (self.epoch() == other.epoch()) && (self.guid() == other.guid()) - && (self.podcast_id() == other.podcast_id()) + && (self.show_id() == other.show_id()) } } @@ -117,7 +117,7 @@ impl PartialEq for NewEpisode { && (self.duration() == other.duration()) && (self.epoch() == other.epoch()) && (self.guid() == other.guid()) - && (self.podcast_id() == other.podcast_id()) + && (self.show_id() == other.show_id()) && (self.description() == other.description()) && (self.length() == other.length()) } @@ -126,14 +126,14 @@ impl PartialEq for NewEpisode { impl NewEpisode { /// Parses an `rss::Item` into a `NewEpisode` Struct. #[allow(dead_code)] - pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result { - NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item)) + pub(crate) fn new(item: &rss::Item, show_id: i32) -> Result { + NewEpisodeMinimal::new(item, show_id).map(|ep| ep.into_new_episode(item)) } #[allow(dead_code)] pub(crate) fn to_episode(&self) -> Result { self.index()?; - dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from) + dbqueries::get_episode_from_pk(&self.title, self.show_id).map_err(From::from) } } @@ -167,13 +167,13 @@ impl NewEpisode { self.length } - pub(crate) fn podcast_id(&self) -> i32 { - self.podcast_id + pub(crate) fn show_id(&self) -> i32 { + self.show_id } } #[derive(Insertable, AsChangeset)] -#[table_name = "episode"] +#[table_name = "episodes"] #[derive(Debug, Clone, Builder, PartialEq)] #[builder(derive(Debug))] #[builder(setter(into))] @@ -183,7 +183,7 @@ pub(crate) struct NewEpisodeMinimal { duration: Option, epoch: i32, guid: Option, - podcast_id: i32, + show_id: i32, } impl PartialEq for NewEpisodeMinimal { @@ -193,7 +193,7 @@ impl PartialEq for NewEpisodeMinimal { && (self.duration() == other.duration()) && (self.epoch() == other.epoch()) && (self.guid() == other.guid()) - && (self.podcast_id() == other.podcast_id()) + && (self.show_id() == other.show_id()) } } @@ -241,7 +241,7 @@ impl NewEpisodeMinimal { .duration(duration) .epoch(epoch) .guid(guid) - .podcast_id(parent_id) + .show_id(parent_id) .build() .map_err(From::from) } @@ -263,7 +263,7 @@ impl NewEpisodeMinimal { .uri(self.uri) .duration(self.duration) .epoch(self.epoch) - .podcast_id(self.podcast_id) + .show_id(self.show_id) .guid(self.guid) .length(length) .description(description) @@ -294,8 +294,8 @@ impl NewEpisodeMinimal { self.epoch } - pub(crate) fn podcast_id(&self) -> i32 { - self.podcast_id + pub(crate) fn show_id(&self) -> i32 { + self.show_id } } #[cfg(test)] @@ -324,7 +324,7 @@ mod tests { .guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24"))) .epoch(1505296800) .duration(Some(4171)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -337,7 +337,7 @@ mod tests { .guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d"))) .epoch(1502272800) .duration(Some(4415)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -358,7 +358,7 @@ mod tests { .length(Some(66738886)) .epoch(1505296800) .duration(Some(4171)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -382,7 +382,7 @@ mod tests { .length(Some(67527575)) .epoch(1502272800) .duration(Some(4415)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -397,7 +397,7 @@ mod tests { .length(Some(66738886)) .epoch(1505296800) .duration(Some(424242)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -410,7 +410,7 @@ mod tests { .guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D"))) .epoch(1505280282) .duration(Some(5733)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -423,7 +423,7 @@ mod tests { .guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B"))) .epoch(1504670247) .duration(Some(4491)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -443,7 +443,7 @@ mod tests { .length(Some(46479789)) .epoch(1505280282) .duration(Some(5733)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -465,7 +465,7 @@ mod tests { .length(Some(36544272)) .epoch(1504670247) .duration(Some(4491)) - .podcast_id(42) + .show_id(42) .build() .unwrap() }; @@ -558,7 +558,7 @@ mod tests { let episode = channel.items().iter().nth(14).unwrap(); let new_ep = NewEpisode::new(&episode, 42).unwrap(); new_ep.insert().unwrap(); - let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap(); + let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap(); assert_eq!(new_ep, ep); assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1); @@ -567,7 +567,7 @@ mod tests { let episode = channel.items().iter().nth(15).unwrap(); let new_ep = NewEpisode::new(&episode, 42).unwrap(); new_ep.insert().unwrap(); - let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap(); + let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id()).unwrap(); assert_eq!(new_ep, ep); assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2); @@ -581,21 +581,15 @@ mod tests { let updated = &*UPDATED_DURATION_INTERCEPTED_1; updated.update(old.rowid()).unwrap(); - let mut new = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap(); + let new = dbqueries::get_episode_from_pk(old.title(), old.show_id()).unwrap(); - // Assert that updating does not change the rowid and podcast_id + // Assert that updating does not change the rowid and show_id assert_ne!(old, new); assert_eq!(old.rowid(), new.rowid()); - assert_eq!(old.podcast_id(), new.podcast_id()); + assert_eq!(old.show_id(), new.show_id()); assert_eq!(updated, &new); assert_ne!(updated, &old); - - new.set_archive(true); - new.save().unwrap(); - - let new2 = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap(); - assert_eq!(true, new2.archive()); } #[test] @@ -608,7 +602,7 @@ mod tests { // Second identical, This should take the early return path assert!(expected.index().is_ok()); // Get the episode - let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap(); + let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap(); // Assert that NewPodcast is equal to the Indexed one assert_eq!(*expected, old); @@ -617,23 +611,22 @@ mod tests { // Update the podcast assert!(updated.index().is_ok()); // Get the new Podcast - let new = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap(); + let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap(); // Assert it's diff from the old one. assert_ne!(new, old); assert_eq!(*updated, new); assert_eq!(new.rowid(), old.rowid()); - assert_eq!(new.podcast_id(), old.podcast_id()); + assert_eq!(new.show_id(), old.show_id()); } #[test] fn test_new_episode_to_episode() { let expected = &*EXPECTED_INTERCEPTED_1; - let updated = &*UPDATED_DURATION_INTERCEPTED_1; // Assert insert() produces the same result that you would get with to_podcast() truncate_db().unwrap(); expected.insert().unwrap(); - let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap(); + let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap(); let ep = expected.to_episode().unwrap(); assert_eq!(old, ep); @@ -642,17 +635,7 @@ mod tests { let ep = expected.to_episode().unwrap(); // This should error as a unique constrain violation assert!(expected.insert().is_err()); - let mut old = - dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap(); + let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id()).unwrap(); assert_eq!(old, ep); - - old.set_archive(true); - old.save().unwrap(); - - // Assert that it does not mess with user preferences - let ep = updated.to_episode().unwrap(); - let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap(); - assert_eq!(old, ep); - assert_eq!(old.archive(), true); } } diff --git a/hammond-data/src/models/new_podcast.rs b/hammond-data/src/models/new_show.rs similarity index 84% rename from hammond-data/src/models/new_podcast.rs rename to hammond-data/src/models/new_show.rs index 75152c5..fd916a0 100644 --- a/hammond-data/src/models/new_podcast.rs +++ b/hammond-data/src/models/new_show.rs @@ -4,21 +4,21 @@ use diesel::prelude::*; use rss; use errors::DataError; -use models::Podcast; +use models::Show; use models::{Index, Insert, Update}; -use schema::podcast; +use schema::shows; use database::connection; use dbqueries; use utils::url_cleaner; #[derive(Insertable, AsChangeset)] -#[table_name = "podcast"] +#[table_name = "shows"] #[derive(Debug, Clone, Default, Builder, PartialEq)] #[builder(default)] #[builder(derive(Debug))] #[builder(setter(into))] -pub(crate) struct NewPodcast { +pub(crate) struct NewShow { title: String, link: String, description: String, @@ -26,15 +26,15 @@ pub(crate) struct NewPodcast { source_id: i32, } -impl Insert<()> for NewPodcast { +impl Insert<()> for NewShow { type Error = DataError; fn insert(&self) -> Result<(), Self::Error> { - use schema::podcast::dsl::*; + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; - diesel::insert_into(podcast) + diesel::insert_into(shows) .values(self) .execute(&con) .map(|_| ()) @@ -42,16 +42,16 @@ impl Insert<()> for NewPodcast { } } -impl Update<()> for NewPodcast { +impl Update<()> for NewShow { type Error = DataError; - fn update(&self, podcast_id: i32) -> Result<(), Self::Error> { - use schema::podcast::dsl::*; + fn update(&self, show_id: i32) -> Result<(), Self::Error> { + use schema::shows::dsl::*; let db = connection(); let con = db.get()?; info!("Updating {}", self.title); - diesel::update(podcast.filter(id.eq(podcast_id))) + diesel::update(shows.filter(id.eq(show_id))) .set(self) .execute(&con) .map(|_| ()) @@ -61,7 +61,7 @@ impl Update<()> for NewPodcast { // TODO: Maybe return an Enum Instead. // It would make unti testing better too. -impl Index<()> for NewPodcast { +impl Index<()> for NewShow { type Error = DataError; fn index(&self) -> Result<(), DataError> { @@ -81,8 +81,8 @@ impl Index<()> for NewPodcast { } } -impl PartialEq for NewPodcast { - fn eq(&self, other: &Podcast) -> bool { +impl PartialEq for NewShow { + fn eq(&self, other: &Show) -> bool { (self.link() == other.link()) && (self.title() == other.title()) && (self.image_uri() == other.image_uri()) @@ -91,9 +91,9 @@ impl PartialEq for NewPodcast { } } -impl NewPodcast { - /// Parses a `rss::Channel` into a `NewPodcast` Struct. - pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewPodcast { +impl NewShow { + /// Parses a `rss::Channel` into a `NewShow` Struct. + pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewShow { let title = chan.title().trim(); let link = url_cleaner(chan.link().trim()); @@ -111,7 +111,7 @@ impl NewPodcast { // If itunes is None, try to get the channel.image from the rss spec let image_uri = itunes_img.or_else(|| chan.image().map(|s| s.url().trim().to_owned())); - NewPodcastBuilder::default() + NewShowBuilder::default() .title(title) .description(description) .link(link) @@ -122,14 +122,14 @@ impl NewPodcast { } // Look out for when tryinto lands into stable. - pub(crate) fn to_podcast(&self) -> Result { + pub(crate) fn to_podcast(&self) -> Result { self.index()?; dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from) } } // Ignore the following geters. They are used in unit tests mainly. -impl NewPodcast { +impl NewShow { #[allow(dead_code)] pub(crate) fn source_id(&self) -> i32 { self.source_id @@ -160,14 +160,14 @@ mod tests { use rss::Channel; use database::truncate_db; - use models::{NewPodcastBuilder, Save}; + use models::NewShowBuilder; use std::fs::File; use std::io::BufReader; - // Pre-built expected NewPodcast structs. + // Pre-built expected NewShow structs. lazy_static! { - static ref EXPECTED_INTERCEPTED: NewPodcast = { + static ref EXPECTED_INTERCEPTED: NewShow = { let descr = "The people behind The Intercept’s fearless reporting and incisive \ commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and \ others—discuss the crucial issues of our time: national security, civil \ @@ -175,7 +175,7 @@ mod tests { artists, thinkers, and newsmakers who challenge our preconceptions about \ the world we live in."; - NewPodcastBuilder::default() + NewShowBuilder::default() .title("Intercepted with Jeremy Scahill") .link("https://theintercept.com/podcasts") .description(descr) @@ -188,12 +188,12 @@ mod tests { .build() .unwrap() }; - static ref EXPECTED_LUP: NewPodcast = { + static ref EXPECTED_LUP: NewShow = { let descr = "An open show powered by community LINUX Unplugged takes the best \ attributes of open collaboration and focuses them into a weekly \ lifestyle show about Linux."; - NewPodcastBuilder::default() + NewShowBuilder::default() .title("LINUX Unplugged Podcast") .link("http://www.jupiterbroadcasting.com/") .description(descr) @@ -204,7 +204,7 @@ mod tests { .build() .unwrap() }; - static ref EXPECTED_TIPOFF: NewPodcast = { + static ref EXPECTED_TIPOFF: NewShow = { let desc = "

Welcome to The Tip Off- the podcast where we take you behind the \ scenes of some of the best investigative journalism from recent years. \ Each episode we’ll be digging into an investigative scoop- hearing from \ @@ -215,7 +215,7 @@ mod tests { complicated detective work that goes into doing great investigative \ journalism- then this is the podcast for you.

"; - NewPodcastBuilder::default() + NewShowBuilder::default() .title("The Tip Off") .link("http://www.acast.com/thetipoff") .description(desc) @@ -227,7 +227,7 @@ mod tests { .build() .unwrap() }; - static ref EXPECTED_STARS: NewPodcast = { + static ref EXPECTED_STARS: NewShow = { let descr = "

The first audio drama from Tor Labs and Gideon Media, Steal the Stars \ is a gripping noir science fiction thriller in 14 episodes: Forbidden \ love, a crashed UFO, an alien body, and an impossible heist unlike any \ @@ -237,7 +237,7 @@ mod tests { b183-7311d2e436c3/b3a4aa57a576bb662191f2a6bc2a436c8c4ae256ecffaff5c4c54fd42e\ 923914941c264d01efb1833234b52c9530e67d28a8cebbe3d11a4bc0fbbdf13ecdf1c3.jpeg"; - NewPodcastBuilder::default() + NewShowBuilder::default() .title("Steal the Stars") .link("http://tor-labs.com/") .description(descr) @@ -246,12 +246,12 @@ mod tests { .build() .unwrap() }; - static ref EXPECTED_CODE: NewPodcast = { + static ref EXPECTED_CODE: NewShow = { let descr = "A podcast about humans and technology. Panelists: Coraline Ada Ehmke, \ David Brady, Jessica Kerr, Jay Bobo, Astrid Countee and Sam \ Livingston-Gray. Brought to you by @therubyrep."; - NewPodcastBuilder::default() + NewShowBuilder::default() .title("Greater Than Code") .link("https://www.greaterthancode.com/") .description(descr) @@ -262,8 +262,8 @@ mod tests { .build() .unwrap() }; - static ref EXPECTED_ELLINOFRENEIA: NewPodcast = { - NewPodcastBuilder::default() + static ref EXPECTED_ELLINOFRENEIA: NewShow = { + NewShowBuilder::default() .title("Ελληνοφρένεια") .link("https://ellinofreneia.sealabs.net/feed.rss") .description("Ανεπίσημο feed της Ελληνοφρένειας") @@ -272,8 +272,8 @@ mod tests { .build() .unwrap() }; - static ref UPDATED_DESC_INTERCEPTED: NewPodcast = { - NewPodcastBuilder::default() + static ref UPDATED_DESC_INTERCEPTED: NewShow = { + NewShowBuilder::default() .title("Intercepted with Jeremy Scahill") .link("https://theintercept.com/podcasts") .description("New Description") @@ -293,7 +293,7 @@ mod tests { let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(*EXPECTED_INTERCEPTED, pd); } @@ -302,7 +302,7 @@ mod tests { let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(*EXPECTED_LUP, pd); } @@ -311,7 +311,7 @@ mod tests { let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(*EXPECTED_TIPOFF, pd); } @@ -320,7 +320,7 @@ mod tests { let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(*EXPECTED_STARS, pd); } @@ -329,7 +329,7 @@ mod tests { let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(*EXPECTED_CODE, pd); } @@ -338,7 +338,7 @@ mod tests { let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let pd = NewPodcast::new(&channel, 42); + let pd = NewShow::new(&channel, 42); assert_eq!(*EXPECTED_ELLINOFRENEIA, pd); } @@ -349,7 +349,7 @@ mod tests { let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap(); let channel = Channel::read_from(BufReader::new(file)).unwrap(); - let npd = NewPodcast::new(&channel, 42); + let npd = NewShow::new(&channel, 42); npd.insert().unwrap(); let pd = dbqueries::get_podcast_from_source_id(42).unwrap(); @@ -369,20 +369,13 @@ mod tests { let updated = &*UPDATED_DESC_INTERCEPTED; updated.update(old.id()).unwrap(); - let mut new = dbqueries::get_podcast_from_source_id(42).unwrap(); + let new = dbqueries::get_podcast_from_source_id(42).unwrap(); assert_ne!(old, new); assert_eq!(old.id(), new.id()); assert_eq!(old.source_id(), new.source_id()); assert_eq!(updated, &new); assert_ne!(updated, &old); - - // Chech that the update does not override user preferences. - new.set_archive(true); - new.save().unwrap(); - - let new2 = dbqueries::get_podcast_from_source_id(42).unwrap(); - assert_eq!(true, new2.archive()); } #[test] @@ -395,14 +388,14 @@ mod tests { assert!(EXPECTED_INTERCEPTED.index().is_ok()); // Get the podcast let old = dbqueries::get_podcast_from_source_id(42).unwrap(); - // Assert that NewPodcast is equal to the Indexed one + // Assert that NewShow is equal to the Indexed one assert_eq!(&*EXPECTED_INTERCEPTED, &old); let updated = &*UPDATED_DESC_INTERCEPTED; // Update the podcast assert!(updated.index().is_ok()); - // Get the new Podcast + // Get the new Show let new = dbqueries::get_podcast_from_source_id(42).unwrap(); // Assert it's diff from the old one. assert_ne!(new, old); @@ -424,16 +417,7 @@ mod tests { let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap(); // This should error as a unique constrain violation assert!(EXPECTED_INTERCEPTED.insert().is_err()); - let mut old = dbqueries::get_podcast_from_source_id(42).unwrap(); - assert_eq!(old, pd); - - old.set_archive(true); - old.save().unwrap(); - - // Assert that it does not mess with user preferences - let pd = UPDATED_DESC_INTERCEPTED.to_podcast().unwrap(); let old = dbqueries::get_podcast_from_source_id(42).unwrap(); assert_eq!(old, pd); - assert_eq!(old.archive(), true); } } diff --git a/hammond-data/src/models/podcast.rs b/hammond-data/src/models/show.rs similarity index 56% rename from hammond-data/src/models/podcast.rs rename to hammond-data/src/models/show.rs index a0090aa..85a35d5 100644 --- a/hammond-data/src/models/podcast.rs +++ b/hammond-data/src/models/show.rs @@ -3,42 +3,37 @@ use diesel::SaveChangesDsl; use database::connection; use errors::DataError; use models::{Save, Source}; -use schema::podcast; - -use std::sync::Arc; +use schema::shows; #[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)] #[belongs_to(Source, foreign_key = "source_id")] #[changeset_options(treat_none_as_null = "true")] -#[table_name = "podcast"] +#[table_name = "shows"] #[derive(Debug, Clone)] -/// Diesel Model of the podcast table. -pub struct Podcast { +/// Diesel Model of the shows table. +pub struct Show { id: i32, title: String, link: String, description: String, image_uri: Option, - favorite: bool, - archive: bool, - always_dl: bool, source_id: i32, } -impl Save for Podcast { +impl Save for Show { type Error = DataError; /// Helper method to easily save/"sync" current state of self to the /// Database. - fn save(&self) -> Result { + fn save(&self) -> Result { let db = connection(); let tempdb = db.get()?; - self.save_changes::(&*tempdb).map_err(From::from) + self.save_changes::(&*tempdb).map_err(From::from) } } -impl Podcast { +impl Show { /// Get the Feed `id`. pub fn id(&self) -> i32 { self.id @@ -56,7 +51,7 @@ impl Podcast { &self.link } - /// Set the Podcast/Feed `link`. + /// Set the Show/Feed `link`. pub fn set_link(&mut self, value: &str) { self.link = value.to_string(); } @@ -83,41 +78,6 @@ impl Podcast { self.image_uri = value.map(|x| x.to_string()); } - /// Represents the archiving policy for the episode. - pub fn archive(&self) -> bool { - self.archive - } - - /// Set the `archive` policy. - pub fn set_archive(&mut self, b: bool) { - self.archive = b - } - - /// Get the `favorite` status of the `Podcast` Feed. - pub fn favorite(&self) -> bool { - self.favorite - } - - /// Set `favorite` status. - pub fn set_favorite(&mut self, b: bool) { - self.favorite = b - } - - /// Represents the download policy for the `Podcast` Feed. - /// - /// Reserved for the use with a Download manager, yet to be implemented. - /// - /// If true Podcast Episode should be downloaded automaticly/skipping - /// the selection queue. - pub fn always_download(&self) -> bool { - self.always_dl - } - - /// Set the download policy. - pub fn set_always_download(&mut self, b: bool) { - self.always_dl = b - } - /// `Source` table foreign key. pub fn source_id(&self) -> i32 { self.source_id @@ -125,17 +85,17 @@ impl Podcast { } #[derive(Queryable, Debug, Clone)] -/// Diesel Model of the podcast cover query. -/// Used for fetching information about a Podcast's cover. -pub struct PodcastCoverQuery { +/// Diesel Model of the Show cover query. +/// Used for fetching information about a Show's cover. +pub struct ShowCoverModel { id: i32, title: String, image_uri: Option, } -impl From for PodcastCoverQuery { - fn from(p: Podcast) -> PodcastCoverQuery { - PodcastCoverQuery { +impl From for ShowCoverModel { + fn from(p: Show) -> ShowCoverModel { + ShowCoverModel { id: p.id(), title: p.title, image_uri: p.image_uri, @@ -143,17 +103,7 @@ impl From for PodcastCoverQuery { } } -impl From> for PodcastCoverQuery { - fn from(p: Arc) -> PodcastCoverQuery { - PodcastCoverQuery { - id: p.id(), - title: p.title.clone(), - image_uri: p.image_uri.clone(), - } - } -} - -impl PodcastCoverQuery { +impl ShowCoverModel { /// Get the Feed `id`. pub fn id(&self) -> i32 { self.id diff --git a/hammond-data/src/schema.patch b/hammond-data/src/schema.patch new file mode 100644 index 0000000..d961594 --- /dev/null +++ b/hammond-data/src/schema.patch @@ -0,0 +1,29 @@ +diff --git a/hammond-data/src/schema.rs b/hammond-data/src/schema.rs +index 03cbed0..88f1622 100644 +--- a/hammond-data/src/schema.rs ++++ b/hammond-data/src/schema.rs +@@ -1,8 +1,11 @@ ++#![allow(warnings)] ++ + table! { + episodes (title, show_id) { ++ rowid -> Integer, + title -> Text, + uri -> Nullable, + local_uri -> Nullable, + description -> Nullable, + epoch -> Integer, + length -> Nullable, +@@ -30,11 +33,7 @@ table! { + uri -> Text, + last_modified -> Nullable, + http_etag -> Nullable, + } + } + +-allow_tables_to_appear_in_same_query!( +- episodes, +- shows, +- source, +-); ++allow_tables_to_appear_in_same_query!(episodes, shows, source); diff --git a/hammond-data/src/schema.rs b/hammond-data/src/schema.rs index cf22457..88f1622 100644 --- a/hammond-data/src/schema.rs +++ b/hammond-data/src/schema.rs @@ -1,5 +1,7 @@ +#![allow(warnings)] + table! { - episode (title, podcast_id) { + episodes (title, show_id) { rowid -> Integer, title -> Text, uri -> Nullable, @@ -10,22 +12,17 @@ table! { duration -> Nullable, guid -> Nullable, played -> Nullable, - favorite -> Bool, - archive -> Bool, - podcast_id -> Integer, + show_id -> Integer, } } table! { - podcast (id) { + shows (id) { id -> Integer, title -> Text, link -> Text, description -> Text, image_uri -> Nullable, - favorite -> Bool, - archive -> Bool, - always_dl -> Bool, source_id -> Integer, } } @@ -39,4 +36,4 @@ table! { } } -allow_tables_to_appear_in_same_query!(episode, podcast, source,); +allow_tables_to_appear_in_same_query!(episodes, shows, source); diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index aa5289c..aeccd4c 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -7,7 +7,7 @@ use url::{Position, Url}; use dbqueries; use errors::DataError; -use models::{EpisodeCleanerQuery, Podcast, Save}; +use models::{EpisodeCleanerModel, Save, Show}; use xdg_dirs::DL_DIR; use std::fs; @@ -59,7 +59,7 @@ fn played_cleaner(cleanup_date: DateTime) -> Result<(), DataError> { } /// Check `ep.local_uri` field and delete the file it points to. -fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> { +fn delete_local_content(ep: &mut EpisodeCleanerModel) -> Result<(), DataError> { if ep.local_uri().is_some() { let uri = ep.local_uri().unwrap().to_owned(); if Path::new(&uri).exists() { @@ -108,9 +108,9 @@ pub fn url_cleaner(s: &str) -> String { } } -/// Returns the URI of a Podcast Downloads given it's title. +/// Returns the URI of a Show Downloads given it's title. pub fn get_download_folder(pd_title: &str) -> Result { - // It might be better to make it a hash of the title or the podcast rowid + // It might be better to make it a hash of the title or the Show rowid let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title); // Create the folder @@ -123,7 +123,7 @@ pub fn get_download_folder(pd_title: &str) -> Result { /// Removes all the entries associated with the given show from the database, /// and deletes all of the downloaded content. // TODO: Write Tests -pub fn delete_show(pd: &Podcast) -> Result<(), DataError> { +pub fn delete_show(pd: &Show) -> Result<(), DataError> { dbqueries::remove_feed(pd)?; info!("{} was removed succesfully.", pd.title()); @@ -183,7 +183,7 @@ mod tests { // Setup episodes let n1 = NewEpisodeBuilder::default() .title("foo_bar".to_string()) - .podcast_id(0) + .show_id(0) .build() .unwrap() .to_episode() @@ -191,14 +191,14 @@ mod tests { let n2 = NewEpisodeBuilder::default() .title("bar_baz".to_string()) - .podcast_id(1) + .show_id(1) .build() .unwrap() .to_episode() .unwrap(); - let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.podcast_id()).unwrap(); - let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.podcast_id()).unwrap(); + let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.show_id()).unwrap(); + let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.show_id()).unwrap(); ep1.set_local_uri(Some(valid_path.to_str().unwrap())); ep2.set_local_uri(Some(bad_path.to_str().unwrap())); @@ -230,7 +230,7 @@ mod tests { #[test] fn test_download_cleaner() { let _tmp_dir = helper_db(); - let mut episode: EpisodeCleanerQuery = + let mut episode: EpisodeCleanerModel = dbqueries::get_episode_from_pk("foo_bar", 0).unwrap().into(); let valid_path = episode.local_uri().unwrap().to_owned(); diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index 28fe2ca..be0bd48 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -12,7 +12,7 @@ use std::path::Path; use std::sync::{Arc, Mutex}; use hammond_data::xdg_dirs::HAMMOND_CACHE; -use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery, Save}; +use hammond_data::{EpisodeWidgetModel, Save, ShowCoverModel}; // use failure::Error; use errors::DownloadError; @@ -161,7 +161,7 @@ fn save_io( // TODO: Refactor pub fn get_episode( - ep: &mut EpisodeWidgetQuery, + ep: &mut EpisodeWidgetModel, download_folder: &str, progress: Option>>, ) -> Result<(), DownloadError> { @@ -196,7 +196,7 @@ pub fn get_episode( Ok(()) } -pub fn cache_image(pd: &PodcastCoverQuery) -> Result { +pub fn cache_image(pd: &ShowCoverModel) -> Result { let url = pd .image_uri() .ok_or_else(|| DownloadError::NoImageLocation)? diff --git a/hammond-gtk/src/app.rs b/hammond-gtk/src/app.rs index 18bba1b..6cc56d6 100644 --- a/hammond-gtk/src/app.rs +++ b/hammond-gtk/src/app.rs @@ -10,7 +10,7 @@ use gtk::prelude::*; use gtk::SettingsExt as GtkSettingsExt; use crossbeam_channel::{unbounded, Receiver, Sender}; -use hammond_data::Podcast; +use hammond_data::Show; use headerbar::Header; use settings::{self, WindowGeometry}; @@ -43,7 +43,7 @@ pub enum Action { RefreshEpisodesView, RefreshEpisodesViewBGR, RefreshShowsView, - ReplaceWidget(Arc), + ReplaceWidget(Arc), RefreshWidgetIfSame(i32), ShowWidgetAnimated, ShowShowsAnimated, @@ -51,8 +51,8 @@ pub enum Action { HeaderBarNormal, HeaderBarShowUpdateIndicator, HeaderBarHideUpdateIndicator, - MarkAllPlayerNotification(Arc), - RemoveShow(Arc), + MarkAllPlayerNotification(Arc), + RemoveShow(Arc), ErrorNotification(String), InitEpisode(i32), } diff --git a/hammond-gtk/src/stacks/populated.rs b/hammond-gtk/src/stacks/populated.rs index a4048a9..d192b4f 100644 --- a/hammond-gtk/src/stacks/populated.rs +++ b/hammond-gtk/src/stacks/populated.rs @@ -6,7 +6,7 @@ use crossbeam_channel::Sender; use failure::Error; use hammond_data::dbqueries; -use hammond_data::Podcast; +use hammond_data::Show; use app::Action; use widgets::{ShowWidget, ShowsView}; @@ -89,13 +89,11 @@ impl PopulatedStack { Ok(()) } - pub fn replace_widget(&mut self, pd: Arc) -> Result<(), Error> { + pub fn replace_widget(&mut self, pd: Arc) -> Result<(), Error> { let old = self.show.container.clone(); // save the ShowWidget vertical scrollabar alignment - self.show - .podcast_id() - .map(|id| self.show.save_vadjustment(id)); + self.show.show_id().map(|id| self.show.save_vadjustment(id)); let new = ShowWidget::new(pd, self.sender.clone()); self.show = new; @@ -113,7 +111,7 @@ impl PopulatedStack { pub fn update_widget(&mut self) -> Result<(), Error> { let old = self.show.container.clone(); - let id = self.show.podcast_id(); + let id = self.show.show_id(); if id.is_none() { return Ok(()); } @@ -131,9 +129,9 @@ impl PopulatedStack { Ok(()) } - // Only update widget if its podcast_id is equal to pid. + // Only update widget if its show_id is equal to pid. pub fn update_widget_if_same(&mut self, pid: i32) -> Result<(), Error> { - if self.show.podcast_id() != Some(pid) { + if self.show.show_id() != Some(pid) { debug!("Different widget. Early return"); return Ok(()); } diff --git a/hammond-gtk/src/utils.rs b/hammond-gtk/src/utils.rs index 6fdbb3a..1985418 100644 --- a/hammond-gtk/src/utils.rs +++ b/hammond-gtk/src/utils.rs @@ -226,15 +226,15 @@ lazy_static! { // GObjects do not implement Send trait, so SendCell is a way around that. // Also lazy_static requires Sync trait, so that's what the mutexes are. // TODO: maybe use something that would just scale to requested size? -pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Result<(), Error> { +pub fn set_image_from_path(image: >k::Image, show_id: i32, size: u32) -> Result<(), Error> { // Check if there's an active download about this show cover. // If there is, a callback will be set so this function will be called again. // If the download succedes, there should be a quick return from the pixbuf cache_image // If it fails another download will be scheduled. if let Ok(guard) = COVER_DL_REGISTRY.read() { - if guard.contains(&podcast_id) { + if guard.contains(&show_id) { let callback = clone!(image => move || { - let _ = set_image_from_path(&image, podcast_id, size); + let _ = set_image_from_path(&image, show_id, size); glib::Continue(false) }); gtk::timeout_add(250, callback); @@ -245,7 +245,7 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re if let Ok(hashmap) = CACHED_PIXBUFS.read() { // Check if the requested (cover + size) is already in the chache // and if so do an early return after that. - if let Some(guard) = hashmap.get(&(podcast_id, size)) { + if let Some(guard) = hashmap.get(&(show_id, size)) { guard .lock() .map_err(|err| format_err!("SendCell Mutex: {}", err)) @@ -263,11 +263,11 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re let (sender, receiver) = unbounded(); THREADPOOL.spawn(move || { if let Ok(mut guard) = COVER_DL_REGISTRY.write() { - guard.insert(podcast_id); - if let Ok(pd) = dbqueries::get_podcast_cover_from_id(podcast_id) { + guard.insert(show_id); + if let Ok(pd) = dbqueries::get_podcast_cover_from_id(show_id) { sender.send(downloader::cache_image(&pd)); } - guard.remove(&podcast_id); + guard.remove(&show_id); } }); @@ -278,7 +278,7 @@ pub fn set_image_from_path(image: >k::Image, podcast_id: i32, size: u32) -> Re if let Ok(path) = path { if let Ok(px) = Pixbuf::new_from_file_at_scale(&path, s, s, true) { if let Ok(mut hashmap) = CACHED_PIXBUFS.write() { - hashmap.insert((podcast_id, size), Mutex::new(SendCell::new(px.clone()))); + hashmap.insert((show_id, size), Mutex::new(SendCell::new(px.clone()))); image.set_from_pixbuf(&px); } } diff --git a/hammond-gtk/src/widgets/episode.rs b/hammond-gtk/src/widgets/episode.rs index e66229b..720c244 100644 --- a/hammond-gtk/src/widgets/episode.rs +++ b/hammond-gtk/src/widgets/episode.rs @@ -12,7 +12,7 @@ use open; use hammond_data::dbqueries; use hammond_data::utils::get_download_folder; -use hammond_data::EpisodeWidgetQuery; +use hammond_data::EpisodeWidgetModel; use app::Action; use manager; @@ -68,7 +68,7 @@ struct Buttons { } impl InfoLabels { - fn init(&self, episode: &EpisodeWidgetQuery) { + fn init(&self, episode: &EpisodeWidgetModel) { // Set the title label state. self.set_title(episode); @@ -82,7 +82,7 @@ impl InfoLabels { self.set_size(episode.length()) } - fn set_title(&self, episode: &EpisodeWidgetQuery) { + fn set_title(&self, episode: &EpisodeWidgetModel) { self.title.set_text(episode.title()); if episode.played().is_some() { @@ -206,7 +206,7 @@ impl Default for EpisodeWidget { } impl EpisodeWidget { - pub fn new(episode: &EpisodeWidgetQuery, sender: &Sender) -> Rc { + pub fn new(episode: &EpisodeWidgetModel, sender: &Sender) -> Rc { let widget = Rc::new(Self::default()); widget.info.init(episode); Self::determine_buttons_state(&widget, episode, sender) @@ -297,7 +297,7 @@ impl EpisodeWidget { /// ------------------- fn determine_buttons_state( widget: &Rc, - episode: &EpisodeWidgetQuery, + episode: &EpisodeWidgetModel, sender: &Sender, ) -> Result<(), Error> { // Reset the buttons state no matter the glade file. @@ -428,8 +428,8 @@ impl EpisodeWidget { } } -fn on_download_clicked(ep: &EpisodeWidgetQuery, sender: &Sender) -> Result<(), Error> { - let pd = dbqueries::get_podcast_from_id(ep.podcast_id())?; +fn on_download_clicked(ep: &EpisodeWidgetModel, sender: &Sender) -> Result<(), Error> { + let pd = dbqueries::get_podcast_from_id(ep.show_id())?; let download_fold = get_download_folder(&pd.title())?; // Start a new download. @@ -442,7 +442,7 @@ fn on_download_clicked(ep: &EpisodeWidgetQuery, sender: &Sender) -> Resu fn on_play_bttn_clicked( widget: &Rc, - episode: &mut EpisodeWidgetQuery, + episode: &mut EpisodeWidgetModel, sender: &Sender, ) -> Result<(), Error> { // Mark played diff --git a/hammond-gtk/src/widgets/home_view.rs b/hammond-gtk/src/widgets/home_view.rs index e0b6d94..584491a 100644 --- a/hammond-gtk/src/widgets/home_view.rs +++ b/hammond-gtk/src/widgets/home_view.rs @@ -6,7 +6,7 @@ use gtk::prelude::*; use crossbeam_channel::Sender; use hammond_data::dbqueries; -use hammond_data::EpisodeWidgetQuery; +use hammond_data::EpisodeWidgetModel; use send_cell::SendCell; use app::Action; @@ -93,7 +93,7 @@ impl HomeView { let now_utc = Utc::now(); let view_ = view.clone(); - let func = move |ep: EpisodeWidgetQuery| { + let func = move |ep: EpisodeWidgetModel| { let epoch = ep.epoch(); let widget = HomeEpisode::new(&ep, &sender); @@ -197,12 +197,12 @@ impl Default for HomeEpisode { } impl HomeEpisode { - fn new(episode: &EpisodeWidgetQuery, sender: &Sender) -> HomeEpisode { + fn new(episode: &EpisodeWidgetModel, sender: &Sender) -> HomeEpisode { let builder = gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/episodes_view_widget.ui"); let container: gtk::Box = builder.get_object("container").unwrap(); let image: gtk::Image = builder.get_object("cover").unwrap(); - let pid = episode.podcast_id(); + let pid = episode.show_id(); let ep = EpisodeWidget::new(episode, sender); let view = HomeEpisode { @@ -215,15 +215,15 @@ impl HomeEpisode { view } - fn init(&self, podcast_id: i32) { - self.set_cover(podcast_id) + fn init(&self, show_id: i32) { + self.set_cover(show_id) .map_err(|err| error!("Failed to set a cover: {}", err)) .ok(); self.container.pack_start(&self.episode, true, true, 6); } - fn set_cover(&self, podcast_id: i32) -> Result<(), Error> { - utils::set_image_from_path(&self.image, podcast_id, 64) + fn set_cover(&self, show_id: i32) -> Result<(), Error> { + utils::set_image_from_path(&self.image, show_id, 64) } } diff --git a/hammond-gtk/src/widgets/player.rs b/hammond-gtk/src/widgets/player.rs index c25b4ca..e0228aa 100644 --- a/hammond-gtk/src/widgets/player.rs +++ b/hammond-gtk/src/widgets/player.rs @@ -14,7 +14,7 @@ use failure::Error; use send_cell::SendCell; use hammond_data::{dbqueries, USER_AGENT}; -use hammond_data::{EpisodeWidgetQuery, PodcastCoverQuery}; +use hammond_data::{EpisodeWidgetModel, ShowCoverModel}; use app::Action; use utils::set_image_from_path; @@ -49,23 +49,23 @@ struct PlayerInfo { impl PlayerInfo { // FIXME: create a Diesel Model of the joined episode and podcast query instead - fn init(&self, episode: &EpisodeWidgetQuery, podcast: &PodcastCoverQuery) { + fn init(&self, episode: &EpisodeWidgetModel, podcast: &ShowCoverModel) { self.set_cover_image(podcast); self.set_show_title(podcast); self.set_episode_title(episode); } - fn set_episode_title(&self, episode: &EpisodeWidgetQuery) { + fn set_episode_title(&self, episode: &EpisodeWidgetModel) { self.episode.set_text(episode.title()); self.episode.set_tooltip_text(episode.title()); } - fn set_show_title(&self, show: &PodcastCoverQuery) { + fn set_show_title(&self, show: &ShowCoverModel) { self.show.set_text(show.title()); self.show.set_tooltip_text(show.title()); } - fn set_cover_image(&self, show: &PodcastCoverQuery) { + fn set_cover_image(&self, show: &ShowCoverModel) { set_image_from_path(&self.cover, show.id(), 34) .map_err(|err| error!("Player Cover: {}", err)) .ok(); @@ -357,7 +357,7 @@ impl PlayerWidget { pub fn initialize_episode(&self, rowid: i32) -> Result<(), Error> { let ep = dbqueries::get_episode_widget_from_rowid(rowid)?; - let pd = dbqueries::get_podcast_cover_from_id(ep.podcast_id())?; + let pd = dbqueries::get_podcast_cover_from_id(ep.show_id())?; self.info.init(&ep, &pd); // Currently that will always be the case since the play button is diff --git a/hammond-gtk/src/widgets/show.rs b/hammond-gtk/src/widgets/show.rs index 89652f5..e62249e 100644 --- a/hammond-gtk/src/widgets/show.rs +++ b/hammond-gtk/src/widgets/show.rs @@ -11,7 +11,7 @@ use send_cell::SendCell; use hammond_data::dbqueries; use hammond_data::utils::delete_show; -use hammond_data::Podcast; +use hammond_data::Show; use app::Action; use utils::{self, lazy_load}; @@ -36,7 +36,7 @@ pub struct ShowWidget { settings: gtk::MenuButton, unsub: gtk::Button, episodes: gtk::ListBox, - podcast_id: Option, + show_id: Option, } impl Default for ShowWidget { @@ -61,13 +61,13 @@ impl Default for ShowWidget { link, settings, episodes, - podcast_id: None, + show_id: None, } } } impl ShowWidget { - pub fn new(pd: Arc, sender: Sender) -> Rc { + pub fn new(pd: Arc, sender: Sender) -> Rc { let mut pdw = ShowWidget::default(); pdw.init(&pd, &sender); let pdw = Rc::new(pdw); @@ -78,7 +78,7 @@ impl ShowWidget { pdw } - pub fn init(&mut self, pd: &Arc, sender: &Sender) { + pub fn init(&mut self, pd: &Arc, sender: &Sender) { let builder = gtk::Builder::new_from_resource("/org/gnome/Hammond/gtk/show_widget.ui"); self.unsub @@ -87,7 +87,7 @@ impl ShowWidget { })); self.set_description(pd.description()); - self.podcast_id = Some(pd.id()); + self.show_id = Some(pd.id()); self.set_cover(&pd) .map_err(|err| error!("Failed to set a cover: {}", err)) @@ -118,7 +118,7 @@ impl ShowWidget { } /// Set the show cover. - fn set_cover(&self, pd: &Arc) -> Result<(), Error> { + fn set_cover(&self, pd: &Arc) -> Result<(), Error> { utils::set_image_from_path(&self.cover, pd.id(), 256) } @@ -143,7 +143,7 @@ impl ShowWidget { } /// Set scrolled window vertical adjustment. - fn set_vadjustment(&self, pd: &Arc) -> Result<(), Error> { + fn set_vadjustment(&self, pd: &Arc) -> Result<(), Error> { let guard = SHOW_WIDGET_VALIGNMENT .lock() .map_err(|err| format_err!("Failed to lock widget align mutex: {}", err))?; @@ -166,15 +166,15 @@ impl ShowWidget { Ok(()) } - pub fn podcast_id(&self) -> Option { - self.podcast_id + pub fn show_id(&self) -> Option { + self.show_id } } /// Populate the listbox with the shows episodes. fn populate_listbox( show: &Rc, - pd: Arc, + pd: Arc, sender: Sender, ) -> Result<(), Error> { use crossbeam_channel::bounded; @@ -223,7 +223,7 @@ fn populate_listbox( Ok(()) } -fn on_unsub_button_clicked(pd: Arc, unsub_button: >k::Button, sender: &Sender) { +fn on_unsub_button_clicked(pd: Arc, unsub_button: >k::Button, sender: &Sender) { // hack to get away without properly checking for none. // if pressed twice would panic. unsub_button.set_sensitive(false); @@ -239,7 +239,7 @@ fn on_unsub_button_clicked(pd: Arc, unsub_button: >k::Button, sender: unsub_button.set_sensitive(true); } -fn on_played_button_clicked(pd: Arc, episodes: >k::ListBox, sender: &Sender) { +fn on_played_button_clicked(pd: Arc, episodes: >k::ListBox, sender: &Sender) { if dim_titles(episodes).is_none() { error!("Something went horribly wrong when dimming the titles."); warn!("RUN WHILE YOU STILL CAN!"); @@ -248,7 +248,7 @@ fn on_played_button_clicked(pd: Arc, episodes: >k::ListBox, sender: & sender.send(Action::MarkAllPlayerNotification(pd)) } -fn mark_all_watched(pd: &Podcast, sender: &Sender) -> Result<(), Error> { +fn mark_all_watched(pd: &Show, sender: &Sender) -> Result<(), Error> { dbqueries::update_none_to_played_now(pd)?; // Not all widgets migth have been loaded when the mark_all is hit // So we will need to refresh again after it's done. @@ -257,7 +257,7 @@ fn mark_all_watched(pd: &Podcast, sender: &Sender) -> Result<(), Error> Ok(()) } -pub fn mark_all_notif(pd: Arc, sender: &Sender) -> InAppNotification { +pub fn mark_all_notif(pd: Arc, sender: &Sender) -> InAppNotification { let id = pd.id(); let callback = clone!(sender => move || { mark_all_watched(&pd, &sender) @@ -271,7 +271,7 @@ pub fn mark_all_notif(pd: Arc, sender: &Sender) -> InAppNotific InAppNotification::new(text, callback, undo_callback, UndoState::Shown) } -pub fn remove_show_notif(pd: Arc, sender: Sender) -> InAppNotification { +pub fn remove_show_notif(pd: Arc, sender: Sender) -> InAppNotification { let text = format!("Unsubscribed from {}", pd.title()); utils::ignore_show(pd.id()) diff --git a/hammond-gtk/src/widgets/shows_view.rs b/hammond-gtk/src/widgets/shows_view.rs index ec134fd..5504a73 100644 --- a/hammond-gtk/src/widgets/shows_view.rs +++ b/hammond-gtk/src/widgets/shows_view.rs @@ -6,7 +6,7 @@ use failure::Error; use send_cell::SendCell; use hammond_data::dbqueries; -use hammond_data::Podcast; +use hammond_data::Show; use app::Action; use utils::{self, get_ignored_shows, lazy_load, set_image_from_path}; @@ -45,7 +45,7 @@ impl ShowsView { pub fn new(sender: Sender) -> Result, Error> { let pop = Rc::new(ShowsView::default()); pop.init(sender); - // Populate the flowbox with the Podcasts. + // Populate the flowbox with the Shows. populate_flowbox(&pop)?; Ok(pop) } @@ -147,13 +147,13 @@ impl Default for ShowsChild { } impl ShowsChild { - pub fn new(pd: &Podcast) -> ShowsChild { + pub fn new(pd: &Show) -> ShowsChild { let child = ShowsChild::default(); child.init(pd); child } - fn init(&self, pd: &Podcast) { + fn init(&self, pd: &Show) { self.container.set_tooltip_text(pd.title()); WidgetExt::set_name(&self.child, &pd.id().to_string()); @@ -162,7 +162,7 @@ impl ShowsChild { .ok(); } - fn set_cover(&self, podcast_id: i32) -> Result<(), Error> { - set_image_from_path(&self.cover, podcast_id, 256) + fn set_cover(&self, show_id: i32) -> Result<(), Error> { + set_image_from_path(&self.cover, show_id, 256) } }