From bc57c33491588f60c30934573c069612780670db Mon Sep 17 00:00:00 2001 From: Jordan Petridis Date: Sat, 9 Dec 2017 17:38:46 +0200 Subject: [PATCH] Change episode table schema. --- .../down.sql | 19 ++++++ .../up.sql | 22 +++++++ hammond-data/src/dbqueries.rs | 18 ++++-- hammond-data/src/models/insertables.rs | 31 ++++++---- hammond-data/src/models/queryables.rs | 28 +++++---- hammond-data/src/parser.rs | 59 +++++++++++-------- hammond-data/src/schema.rs | 8 +-- hammond-data/src/utils.rs | 26 ++++---- hammond-downloader/src/downloader.rs | 2 +- hammond-gtk/src/widgets/episode.rs | 12 ++-- 10 files changed, 150 insertions(+), 75 deletions(-) create mode 100644 hammond-data/migrations/2017-12-09-125835_change_episode_pk/down.sql create mode 100644 hammond-data/migrations/2017-12-09-125835_change_episode_pk/up.sql diff --git a/hammond-data/migrations/2017-12-09-125835_change_episode_pk/down.sql b/hammond-data/migrations/2017-12-09-125835_change_episode_pk/down.sql new file mode 100644 index 0000000..01f66ff --- /dev/null +++ b/hammond-data/migrations/2017-12-09-125835_change_episode_pk/down.sql @@ -0,0 +1,19 @@ +ALTER TABLE episode RENAME TO old_table; + +CREATE TABLE episode ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + title TEXT, + uri TEXT NOT NULL UNIQUE, + local_uri TEXT, + description TEXT, + published_date TEXT, + epoch INTEGER NOT NULL DEFAULT 0, + length INTEGER, + guid TEXT, + played INTEGER, + favorite INTEGER NOT NULL DEFAULT 0, + archive INTEGER NOT NULL DEFAULT 0, + podcast_id INTEGER NOT NULL +); + +INSERT INTO episode SELECT * FROM old_table; \ No newline at end of file diff --git a/hammond-data/migrations/2017-12-09-125835_change_episode_pk/up.sql b/hammond-data/migrations/2017-12-09-125835_change_episode_pk/up.sql new file mode 100644 index 0000000..cbe40dd --- /dev/null +++ b/hammond-data/migrations/2017-12-09-125835_change_episode_pk/up.sql @@ -0,0 +1,22 @@ +ALTER TABLE episode RENAME TO old_table; + +CREATE TABLE episode ( + title TEXT NOT NULL, + uri TEXT UNIQUE, + local_uri TEXT, + description TEXT, + published_date TEXT, + epoch INTEGER NOT NULL DEFAULT 0, + length INTEGER, + guid TEXT, + played INTEGER, + podcast_id INTEGER NOT NULL, + favorite INTEGER DEFAULT 0, + archive INTEGER DEFAULT 0, + PRIMARY KEY (title, podcast_id) +); + +INSERT INTO episode (title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id) +SELECT title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id +FROM old_table; +Drop table old_table; \ No newline at end of file diff --git a/hammond-data/src/dbqueries.rs b/hammond-data/src/dbqueries.rs index 1604547..f83bc0b 100644 --- a/hammond-data/src/dbqueries.rs +++ b/hammond-data/src/dbqueries.rs @@ -55,7 +55,9 @@ pub fn get_episode_from_id(ep_id: i32) -> Result { let db = connection(); let con = db.get()?; - Ok(episode.filter(id.eq(ep_id)).get_result::(&*con)?) + Ok(episode + .filter(rowid.eq(ep_id)) + .get_result::(&*con)?) } pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result> { @@ -65,7 +67,7 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result> { let con = db.get()?; Ok(episode - .filter(id.eq(ep_id)) + .filter(rowid.eq(ep_id)) .select(local_uri) .get_result::>(&*con)?) } @@ -153,10 +155,18 @@ pub fn get_podcast_from_source_id(sid: i32) -> Result { .get_result::(&*con)?) } -pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult { +// TODO: unhack me +pub fn get_episode_from_new_episode( + con: &SqliteConnection, + title_: &str, + pid: i32, +) -> QueryResult { use schema::episode::dsl::*; - episode.filter(uri.eq(uri_)).get_result::(&*con) + episode + .filter(title.eq(title_)) + .filter(podcast_id.eq(pid)) + .get_result::(&*con) } pub fn remove_feed(pd: &Podcast) -> Result<()> { diff --git a/hammond-data/src/models/insertables.rs b/hammond-data/src/models/insertables.rs index c303dfa..e63f64d 100644 --- a/hammond-data/src/models/insertables.rs +++ b/hammond-data/src/models/insertables.rs @@ -91,6 +91,7 @@ impl Update for NewPodcast { fn update(&self, con: &SqliteConnection, podcast_id: i32) -> QueryResult { use schema::podcast::dsl::*; + info!("Updating {}", self.title); diesel::update(podcast.filter(id.eq(podcast_id))) .set(self) .execute(&*con) @@ -160,8 +161,8 @@ impl NewPodcast { #[builder(derive(Debug))] #[builder(setter(into))] pub(crate) struct NewEpisode { - title: Option, - uri: String, + title: String, + uri: Option, description: Option, published_date: Option, length: Option, @@ -181,7 +182,8 @@ impl Update for NewEpisode { fn update(&self, con: &SqliteConnection, episode_id: i32) -> QueryResult { use schema::episode::dsl::*; - diesel::update(episode.filter(id.eq(episode_id))) + info!("Updating {:?}", self.title); + diesel::update(episode.filter(rowid.eq(episode_id))) .set(self) .execute(&*con) } @@ -191,11 +193,16 @@ impl NewEpisode { // TODO: Refactor into batch indexes instead. pub(crate) fn into_episode(self, con: &SqliteConnection) -> Result { self.index(con)?; - Ok(dbqueries::get_episode_from_uri(con, &self.uri)?) + Ok(dbqueries::get_episode_from_new_episode( + con, + &self.title, + self.podcast_id, + )?) } pub(crate) fn index(&self, con: &SqliteConnection) -> QueryResult<()> { - let ep = dbqueries::get_episode_from_uri(con, &self.uri.clone()); + // TODO: Change me + let ep = dbqueries::get_episode_from_new_episode(con, &self.title, self.podcast_id); match ep { Ok(foo) => { @@ -203,10 +210,10 @@ impl NewEpisode { error!("NEP pid: {}, EP pid: {}", self.podcast_id, foo.podcast_id()); }; - if foo.title() != self.title.as_ref().map(|x| x.as_str()) - || foo.published_date() != self.published_date.as_ref().map(|x| x.as_str()) + if foo.title() != self.title.as_str() || foo.epoch() != self.epoch + || foo.uri() != self.uri.as_ref().map(|s| s.as_str()) { - self.update(con, *foo.id())?; + self.update(con, foo.rowid())?; } } Err(_) => { @@ -220,12 +227,12 @@ impl NewEpisode { #[allow(dead_code)] // Ignore the following getters. They are used in unit tests mainly. impl NewEpisode { - pub(crate) fn title(&self) -> Option<&str> { - self.title.as_ref().map(|s| s.as_str()) + pub(crate) fn title(&self) -> &str { + self.title.as_ref() } - pub(crate) fn uri(&self) -> &str { - self.uri.as_ref() + pub(crate) fn uri(&self) -> Option<&str> { + self.uri.as_ref().map(|s| s.as_str()) } pub(crate) fn description(&self) -> Option<&str> { diff --git a/hammond-data/src/models/queryables.rs b/hammond-data/src/models/queryables.rs index 2e41ea6..f0cab54 100644 --- a/hammond-data/src/models/queryables.rs +++ b/hammond-data/src/models/queryables.rs @@ -18,13 +18,14 @@ use std::str::FromStr; #[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)] #[table_name = "episode"] #[changeset_options(treat_none_as_null = "true")] +#[primary_key(title, podcast_id)] #[belongs_to(Podcast, foreign_key = "podcast_id")] #[derive(Debug, Clone)] /// Diesel Model of the episode table. pub struct Episode { - id: i32, - title: Option, - uri: String, + rowid: i32, + title: String, + uri: Option, local_uri: Option, description: Option, published_date: Option, @@ -38,26 +39,31 @@ pub struct Episode { } impl Episode { + /// Get the value of the sqlite's `ROW_ID` + pub fn rowid(&self) -> i32 { + self.rowid + } + /// Get the value of the `title` field. - pub fn title(&self) -> Option<&str> { - self.title.as_ref().map(|s| s.as_str()) + pub fn title(&self) -> &str { + &self.title } /// Set the `title`. - pub fn set_title(&mut self, value: Option<&str>) { - self.title = value.map(|x| x.to_string()); + pub fn set_title(&mut self, value: &str) { + self.title = value.to_string(); } /// Get the value of the `uri`. /// /// Represents the url(usually) that the media file will be located at. - pub fn uri(&self) -> &str { - self.uri.as_ref() + pub fn uri(&self) -> Option<&str> { + self.uri.as_ref().map(|s| s.as_str()) } /// Set the `uri`. - pub fn set_uri(&mut self, value: &str) { - self.uri = value.to_string(); + pub fn set_uri(&mut self, value: Option<&str>) { + self.uri = value.map(|x| x.to_string()); } /// Get the value of the `local_uri`. diff --git a/hammond-data/src/parser.rs b/hammond-data/src/parser.rs index 9d2f8ed..1be8064 100644 --- a/hammond-data/src/parser.rs +++ b/hammond-data/src/parser.rs @@ -34,7 +34,10 @@ pub(crate) fn new_podcast(chan: &Channel, source_id: i32) -> NewPodcast { /// Parses an `rss::Item` into a `NewEpisode` Struct. pub(crate) fn new_episode(item: &Item, parent_id: i32) -> Result { - let title = item.title().map(|s| s.trim().to_owned()); + if item.title().is_none() { + bail!("No title specified for the item.") + } + let title = item.title().unwrap().trim().to_owned(); let description = item.description() .map(|s| replace_extra_spaces(&ammonia::clean(s))); let guid = item.guid().map(|s| s.value().trim().to_owned()); @@ -44,10 +47,11 @@ pub(crate) fn new_episode(item: &Item, parent_id: i32) -> Result { // Though the db scema has a requirment of episode uri being Unique && Not Null. // TODO: Restructure let x = item.enclosure().map(|s| url_cleaner(s.url())); + // FIXME: refactor let uri = if x.is_some() { - x.unwrap() + x } else if item.link().is_some() { - item.link().map(|s| url_cleaner(s)).unwrap() + item.link().map(|s| url_cleaner(s)) } else { bail!("No url specified for the item.") }; @@ -182,8 +186,11 @@ mod tests { performs."; let i = new_episode(&firstitem, 0).unwrap(); - assert_eq!(i.title(), Some("The Super Bowl of Racism")); - assert_eq!(i.uri(), "http://traffic.megaphone.fm/PPY6458293736.mp3"); + assert_eq!(i.title(), "The Super Bowl of Racism"); + assert_eq!( + i.uri(), + Some("http://traffic.megaphone.fm/PPY6458293736.mp3") + ); assert_eq!(i.description(), Some(descr)); assert_eq!(i.length(), Some(66738886)); assert_eq!(i.guid(), Some("7df4070a-9832-11e7-adac-cb37b05d5e24")); @@ -203,9 +210,12 @@ mod tests { Venezuela."; assert_eq!( i2.title(), - Some("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America") + "Atlas Golfed — U.S.-Backed Think Tanks Target Latin America" + ); + assert_eq!( + i2.uri(), + Some("http://traffic.megaphone.fm/FL5331443769.mp3") ); - assert_eq!(i2.uri(), "http://traffic.megaphone.fm/FL5331443769.mp3"); assert_eq!(i2.description(), Some(descr2)); assert_eq!(i2.length(), Some(67527575)); assert_eq!(i2.guid(), Some("7c207a24-e33f-11e6-9438-eb45dcf36a1d")); @@ -225,11 +235,11 @@ mod tests { assert_eq!( i.title(), - Some("The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill") + "The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill" ); assert_eq!( i.uri(), - "http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3" + Some("http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3") ); assert_eq!(i.description(), Some(descr)); assert_eq!(i.length(), Some(33396551)); @@ -251,14 +261,11 @@ mod tests { assert_eq!( i2.title(), - Some( - "The Breakthrough: Behind the Scenes of Hillary Clinton’s Failed Bid for \ - President" - ) + "The Breakthrough: Behind the Scenes of Hillary Clinton’s Failed Bid for President" ); assert_eq!( i2.uri(), - "http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3".to_string() + Some("http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3") ); assert_eq!(i2.description(), Some(descr2)); assert_eq!(i2.length(), Some(17964071)); @@ -285,10 +292,10 @@ mod tests { It’s a really packed episode!"; let i = new_episode(&firstitem, 0).unwrap(); - assert_eq!(i.title(), Some("Hacking Devices with Kali Linux | LUP 214")); + assert_eq!(i.title(), "Hacking Devices with Kali Linux | LUP 214"); assert_eq!( i.uri(), - "http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3" + Some("http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3") ); assert_eq!(i.description(), Some(descr)); assert_eq!(i.length(), Some(46479789)); @@ -305,10 +312,10 @@ mod tests { future.

\n

Plus we chat with Wimpy about the Ubuntu Rally in NYC, \ Microsoft’s sneaky move to turn Windows 10 into the “ULTIMATE LINUX \ RUNTIME”, community news & more!

"; - assert_eq!(i2.title(), Some("Gnome Does it Again | LUP 213")); + assert_eq!(i2.title(), "Gnome Does it Again | LUP 213"); assert_eq!( i2.uri(), - "http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3" + Some("http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3") ); assert_eq!(i2.description(), Some(descr2)); assert_eq!(i2.length(), Some(36544272)); @@ -327,11 +334,13 @@ mod tests { rel=\"noopener noreferrer\">RFC 2094 \"Non-lexical lifetimes\""; let i = new_episode(&firstitem, 0).unwrap(); - assert_eq!(i.title(), Some("Episode #9 - A Once in a Lifetime RFC")); + assert_eq!(i.title(), "Episode #9 - A Once in a Lifetime RFC"); assert_eq!( i.uri(), - "http://request-for-explanation.github.\ - io/podcast/ep9-a-once-in-a-lifetime-rfc/episode.mp3" + Some( + "http://request-for-explanation.github.\ + io/podcast/ep9-a-once-in-a-lifetime-rfc/episode.mp3" + ) ); assert_eq!(i.description(), Some(descr)); assert_eq!(i.length(), Some(15077388)); @@ -349,11 +358,13 @@ mod tests { href=\"https://github.com/rust-lang/rfcs/pull/2071\" rel=\"noopener \ noreferrer\">RFC 2071 \"Add impl Trait type alias and variable \ declarations\""; - assert_eq!(i2.title(), Some("Episode #8 - An Existential Crisis")); + assert_eq!(i2.title(), "Episode #8 - An Existential Crisis"); assert_eq!( i2.uri(), - "http://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/episode.\ - mp3" + Some( + "http://request-for-explanation.github.\ + io/podcast/ep8-an-existential-crisis/episode.mp3" + ) ); assert_eq!(i2.description(), Some(descr2)); assert_eq!(i2.length(), Some(13713219)); diff --git a/hammond-data/src/schema.rs b/hammond-data/src/schema.rs index 00aff92..7b43171 100644 --- a/hammond-data/src/schema.rs +++ b/hammond-data/src/schema.rs @@ -1,8 +1,8 @@ table! { - episode (id) { - id -> Integer, - title -> Nullable, - uri -> Text, + episode (title, podcast_id) { + rowid -> Integer, + title -> Text, + uri -> Nullable, local_uri -> Nullable, description -> Nullable, published_date -> Nullable, diff --git a/hammond-data/src/utils.rs b/hammond-data/src/utils.rs index 72ef146..42a6af7 100644 --- a/hammond-data/src/utils.rs +++ b/hammond-data/src/utils.rs @@ -146,22 +146,26 @@ mod tests { // Setup episodes let db = connection(); let con = db.get().unwrap(); - NewEpisodeBuilder::default() - .uri("foo_bar".to_string()) + let n1 = NewEpisodeBuilder::default() + .title("foo_bar".to_string()) + .podcast_id(0) .build() .unwrap() .into_episode(&con) .unwrap(); - NewEpisodeBuilder::default() - .uri("bar_baz".to_string()) + let n2 = NewEpisodeBuilder::default() + .title("bar_baz".to_string()) + .podcast_id(1) .build() .unwrap() .into_episode(&con) .unwrap(); - let mut ep1 = dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap(); - let mut ep2 = dbqueries::get_episode_from_uri(&con, "bar_baz").unwrap(); + let mut ep1 = + dbqueries::get_episode_from_new_episode(&con, n1.title(), n1.podcast_id()).unwrap(); + let mut ep2 = + dbqueries::get_episode_from_new_episode(&con, n2.title(), n2.podcast_id()).unwrap(); ep1.set_local_uri(Some(valid_path.to_str().unwrap())); ep2.set_local_uri(Some(bad_path.to_str().unwrap())); @@ -179,7 +183,7 @@ mod tests { let episodes = dbqueries::get_downloaded_episodes().unwrap(); assert_eq!(episodes.len(), 1); - assert_eq!("foo_bar", episodes.first().unwrap().uri()); + assert_eq!("foo_bar", episodes.first().unwrap().title()); } #[test] @@ -188,7 +192,7 @@ mod tests { let mut episode = { let db = connection(); let con = db.get().unwrap(); - dbqueries::get_episode_from_uri(&con, "bar_baz").unwrap() + dbqueries::get_episode_from_new_episode(&con, "bar_baz", 1).unwrap() }; checker_helper(&mut episode); @@ -201,7 +205,7 @@ mod tests { let mut episode = { let db = connection(); let con = db.get().unwrap(); - dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap() + dbqueries::get_episode_from_new_episode(&con, "foo_bar", 0).unwrap() }; let valid_path = episode.local_uri().unwrap().to_owned(); @@ -215,7 +219,7 @@ mod tests { let mut episode = { let db = connection(); let con = db.get().unwrap(); - dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap() + dbqueries::get_episode_from_new_episode(&con, "foo_bar", 0).unwrap() }; let now_utc = Utc::now().timestamp() as i32; // let limit = now_utc - 172_800; @@ -235,7 +239,7 @@ mod tests { let mut episode = { let db = connection(); let con = db.get().unwrap(); - dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap() + dbqueries::get_episode_from_new_episode(&con, "foo_bar", 0).unwrap() }; let now_utc = Utc::now().timestamp() as i32; // limit = 172_800; diff --git a/hammond-downloader/src/downloader.rs b/hammond-downloader/src/downloader.rs index f3b695a..fafd726 100644 --- a/hammond-downloader/src/downloader.rs +++ b/hammond-downloader/src/downloader.rs @@ -118,7 +118,7 @@ pub fn get_episode(ep: &mut Episode, download_folder: &str) -> Result<()> { ep.save()?; }; - let res = download_into(download_folder, ep.title().unwrap(), ep.uri()); + let res = download_into(download_folder, ep.title(), ep.uri().unwrap()); if let Ok(path) = res { // If download succedes set episode local_uri to dlpath. diff --git a/hammond-gtk/src/widgets/episode.rs b/hammond-gtk/src/widgets/episode.rs index c113b51..7166d0c 100644 --- a/hammond-gtk/src/widgets/episode.rs +++ b/hammond-gtk/src/widgets/episode.rs @@ -5,7 +5,6 @@ use gtk::{ContainerExt, TextBufferExt}; use open; use dissolve::strip_html_tags; -use diesel::associations::Identifiable; use hammond_data::dbqueries; use hammond_data::{Episode, Podcast}; @@ -77,10 +76,7 @@ impl EpisodeWidget { fn init(&self, episode: &mut Episode, pd: &Podcast) { self.title.set_xalign(0.0); - - if let Some(t) = episode.title() { - self.title.set_text(t); - } + self.title.set_text(episode.title()); if episode.description().is_some() { let text = episode.description().unwrap().to_owned(); @@ -116,7 +112,7 @@ impl EpisodeWidget { self.play .connect_clicked(clone!(episode, played, unplayed => move |_| { let mut episode = episode.clone(); - on_play_bttn_clicked(*episode.id()); + on_play_bttn_clicked(episode.rowid()); let _ = episode.set_played_now(); played.hide(); unplayed.show(); @@ -126,7 +122,7 @@ impl EpisodeWidget { let download = &self.download; self.delete .connect_clicked(clone!(episode, play, download => move |del| { - on_delete_bttn_clicked(*episode.id()); + on_delete_bttn_clicked(episode.rowid()); del.hide(); play.hide(); download.show(); @@ -189,7 +185,7 @@ fn on_download_clicked( let download_fold = downloader::get_download_folder(&pd_title).unwrap(); let e = downloader::get_episode(&mut ep, download_fold.as_str()); if let Err(err) = e { - error!("Error while trying to download: {}", ep.uri()); + error!("Error while trying to download: {:?}", ep.uri()); error!("Error: {}", err); }; sender.send(true).expect("Couldn't send data to channel");;