Change episode table schema.

This commit is contained in:
Jordan Petridis 2017-12-09 17:38:46 +02:00
parent 999a2a1fc1
commit bc57c33491
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
10 changed files with 150 additions and 75 deletions

View File

@ -0,0 +1,19 @@
ALTER TABLE episode RENAME TO old_table;
CREATE TABLE episode (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
title TEXT,
uri TEXT NOT NULL UNIQUE,
local_uri TEXT,
description TEXT,
published_date TEXT,
epoch INTEGER NOT NULL DEFAULT 0,
length INTEGER,
guid TEXT,
played INTEGER,
favorite INTEGER NOT NULL DEFAULT 0,
archive INTEGER NOT NULL DEFAULT 0,
podcast_id INTEGER NOT NULL
);
INSERT INTO episode SELECT * FROM old_table;

View File

@ -0,0 +1,22 @@
ALTER TABLE episode RENAME TO old_table;
CREATE TABLE episode (
title TEXT NOT NULL,
uri TEXT UNIQUE,
local_uri TEXT,
description TEXT,
published_date TEXT,
epoch INTEGER NOT NULL DEFAULT 0,
length INTEGER,
guid TEXT,
played INTEGER,
podcast_id INTEGER NOT NULL,
favorite INTEGER DEFAULT 0,
archive INTEGER DEFAULT 0,
PRIMARY KEY (title, podcast_id)
);
INSERT INTO episode (title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id)
SELECT title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id
FROM old_table;
Drop table old_table;

View File

@ -55,7 +55,9 @@ pub fn get_episode_from_id(ep_id: i32) -> Result<Episode> {
let db = connection();
let con = db.get()?;
Ok(episode.filter(id.eq(ep_id)).get_result::<Episode>(&*con)?)
Ok(episode
.filter(rowid.eq(ep_id))
.get_result::<Episode>(&*con)?)
}
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
@ -65,7 +67,7 @@ pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
let con = db.get()?;
Ok(episode
.filter(id.eq(ep_id))
.filter(rowid.eq(ep_id))
.select(local_uri)
.get_result::<Option<String>>(&*con)?)
}
@ -153,10 +155,18 @@ pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast> {
.get_result::<Podcast>(&*con)?)
}
pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<Episode> {
// TODO: unhack me
pub fn get_episode_from_new_episode(
con: &SqliteConnection,
title_: &str,
pid: i32,
) -> QueryResult<Episode> {
use schema::episode::dsl::*;
episode.filter(uri.eq(uri_)).get_result::<Episode>(&*con)
episode
.filter(title.eq(title_))
.filter(podcast_id.eq(pid))
.get_result::<Episode>(&*con)
}
pub fn remove_feed(pd: &Podcast) -> Result<()> {

View File

@ -91,6 +91,7 @@ impl Update for NewPodcast {
fn update(&self, con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
use schema::podcast::dsl::*;
info!("Updating {}", self.title);
diesel::update(podcast.filter(id.eq(podcast_id)))
.set(self)
.execute(&*con)
@ -160,8 +161,8 @@ impl NewPodcast {
#[builder(derive(Debug))]
#[builder(setter(into))]
pub(crate) struct NewEpisode {
title: Option<String>,
uri: String,
title: String,
uri: Option<String>,
description: Option<String>,
published_date: Option<String>,
length: Option<i32>,
@ -181,7 +182,8 @@ impl Update for NewEpisode {
fn update(&self, con: &SqliteConnection, episode_id: i32) -> QueryResult<usize> {
use schema::episode::dsl::*;
diesel::update(episode.filter(id.eq(episode_id)))
info!("Updating {:?}", self.title);
diesel::update(episode.filter(rowid.eq(episode_id)))
.set(self)
.execute(&*con)
}
@ -191,11 +193,16 @@ impl NewEpisode {
// TODO: Refactor into batch indexes instead.
pub(crate) fn into_episode(self, con: &SqliteConnection) -> Result<Episode> {
self.index(con)?;
Ok(dbqueries::get_episode_from_uri(con, &self.uri)?)
Ok(dbqueries::get_episode_from_new_episode(
con,
&self.title,
self.podcast_id,
)?)
}
pub(crate) fn index(&self, con: &SqliteConnection) -> QueryResult<()> {
let ep = dbqueries::get_episode_from_uri(con, &self.uri.clone());
// TODO: Change me
let ep = dbqueries::get_episode_from_new_episode(con, &self.title, self.podcast_id);
match ep {
Ok(foo) => {
@ -203,10 +210,10 @@ impl NewEpisode {
error!("NEP pid: {}, EP pid: {}", self.podcast_id, foo.podcast_id());
};
if foo.title() != self.title.as_ref().map(|x| x.as_str())
|| foo.published_date() != self.published_date.as_ref().map(|x| x.as_str())
if foo.title() != self.title.as_str() || foo.epoch() != self.epoch
|| foo.uri() != self.uri.as_ref().map(|s| s.as_str())
{
self.update(con, *foo.id())?;
self.update(con, foo.rowid())?;
}
}
Err(_) => {
@ -220,12 +227,12 @@ impl NewEpisode {
#[allow(dead_code)]
// Ignore the following getters. They are used in unit tests mainly.
impl NewEpisode {
pub(crate) fn title(&self) -> Option<&str> {
self.title.as_ref().map(|s| s.as_str())
pub(crate) fn title(&self) -> &str {
self.title.as_ref()
}
pub(crate) fn uri(&self) -> &str {
self.uri.as_ref()
pub(crate) fn uri(&self) -> Option<&str> {
self.uri.as_ref().map(|s| s.as_str())
}
pub(crate) fn description(&self) -> Option<&str> {

View File

@ -18,13 +18,14 @@ use std::str::FromStr;
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
#[table_name = "episode"]
#[changeset_options(treat_none_as_null = "true")]
#[primary_key(title, podcast_id)]
#[belongs_to(Podcast, foreign_key = "podcast_id")]
#[derive(Debug, Clone)]
/// Diesel Model of the episode table.
pub struct Episode {
id: i32,
title: Option<String>,
uri: String,
rowid: i32,
title: String,
uri: Option<String>,
local_uri: Option<String>,
description: Option<String>,
published_date: Option<String>,
@ -38,26 +39,31 @@ pub struct Episode {
}
impl Episode {
/// Get the value of the sqlite's `ROW_ID`
pub fn rowid(&self) -> i32 {
self.rowid
}
/// Get the value of the `title` field.
pub fn title(&self) -> Option<&str> {
self.title.as_ref().map(|s| s.as_str())
pub fn title(&self) -> &str {
&self.title
}
/// Set the `title`.
pub fn set_title(&mut self, value: Option<&str>) {
self.title = value.map(|x| x.to_string());
pub fn set_title(&mut self, value: &str) {
self.title = value.to_string();
}
/// Get the value of the `uri`.
///
/// Represents the url(usually) that the media file will be located at.
pub fn uri(&self) -> &str {
self.uri.as_ref()
pub fn uri(&self) -> Option<&str> {
self.uri.as_ref().map(|s| s.as_str())
}
/// Set the `uri`.
pub fn set_uri(&mut self, value: &str) {
self.uri = value.to_string();
pub fn set_uri(&mut self, value: Option<&str>) {
self.uri = value.map(|x| x.to_string());
}
/// Get the value of the `local_uri`.

View File

@ -34,7 +34,10 @@ pub(crate) fn new_podcast(chan: &Channel, source_id: i32) -> NewPodcast {
/// Parses an `rss::Item` into a `NewEpisode` Struct.
pub(crate) fn new_episode(item: &Item, parent_id: i32) -> Result<NewEpisode> {
let title = item.title().map(|s| s.trim().to_owned());
if item.title().is_none() {
bail!("No title specified for the item.")
}
let title = item.title().unwrap().trim().to_owned();
let description = item.description()
.map(|s| replace_extra_spaces(&ammonia::clean(s)));
let guid = item.guid().map(|s| s.value().trim().to_owned());
@ -44,10 +47,11 @@ pub(crate) fn new_episode(item: &Item, parent_id: i32) -> Result<NewEpisode> {
// Though the db scema has a requirment of episode uri being Unique && Not Null.
// TODO: Restructure
let x = item.enclosure().map(|s| url_cleaner(s.url()));
// FIXME: refactor
let uri = if x.is_some() {
x.unwrap()
x
} else if item.link().is_some() {
item.link().map(|s| url_cleaner(s)).unwrap()
item.link().map(|s| url_cleaner(s))
} else {
bail!("No url specified for the item.")
};
@ -182,8 +186,11 @@ mod tests {
performs.";
let i = new_episode(&firstitem, 0).unwrap();
assert_eq!(i.title(), Some("The Super Bowl of Racism"));
assert_eq!(i.uri(), "http://traffic.megaphone.fm/PPY6458293736.mp3");
assert_eq!(i.title(), "The Super Bowl of Racism");
assert_eq!(
i.uri(),
Some("http://traffic.megaphone.fm/PPY6458293736.mp3")
);
assert_eq!(i.description(), Some(descr));
assert_eq!(i.length(), Some(66738886));
assert_eq!(i.guid(), Some("7df4070a-9832-11e7-adac-cb37b05d5e24"));
@ -203,9 +210,12 @@ mod tests {
Venezuela.";
assert_eq!(
i2.title(),
Some("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America")
"Atlas Golfed — U.S.-Backed Think Tanks Target Latin America"
);
assert_eq!(
i2.uri(),
Some("http://traffic.megaphone.fm/FL5331443769.mp3")
);
assert_eq!(i2.uri(), "http://traffic.megaphone.fm/FL5331443769.mp3");
assert_eq!(i2.description(), Some(descr2));
assert_eq!(i2.length(), Some(67527575));
assert_eq!(i2.guid(), Some("7c207a24-e33f-11e6-9438-eb45dcf36a1d"));
@ -225,11 +235,11 @@ mod tests {
assert_eq!(
i.title(),
Some("The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill")
"The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill"
);
assert_eq!(
i.uri(),
"http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3"
Some("http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3")
);
assert_eq!(i.description(), Some(descr));
assert_eq!(i.length(), Some(33396551));
@ -251,14 +261,11 @@ mod tests {
assert_eq!(
i2.title(),
Some(
"The Breakthrough: Behind the Scenes of Hillary Clintons Failed Bid for \
President"
)
"The Breakthrough: Behind the Scenes of Hillary Clintons Failed Bid for President"
);
assert_eq!(
i2.uri(),
"http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3".to_string()
Some("http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3")
);
assert_eq!(i2.description(), Some(descr2));
assert_eq!(i2.length(), Some(17964071));
@ -285,10 +292,10 @@ mod tests {
Its a really packed episode!";
let i = new_episode(&firstitem, 0).unwrap();
assert_eq!(i.title(), Some("Hacking Devices with Kali Linux | LUP 214"));
assert_eq!(i.title(), "Hacking Devices with Kali Linux | LUP 214");
assert_eq!(
i.uri(),
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3"
Some("http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3")
);
assert_eq!(i.description(), Some(descr));
assert_eq!(i.length(), Some(46479789));
@ -305,10 +312,10 @@ mod tests {
future.</p>\n<p>Plus we chat with Wimpy about the Ubuntu Rally in NYC, \
Microsofts sneaky move to turn Windows 10 into the ULTIMATE LINUX \
RUNTIME, community news &amp; more!</p>";
assert_eq!(i2.title(), Some("Gnome Does it Again | LUP 213"));
assert_eq!(i2.title(), "Gnome Does it Again | LUP 213");
assert_eq!(
i2.uri(),
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3"
Some("http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3")
);
assert_eq!(i2.description(), Some(descr2));
assert_eq!(i2.length(), Some(36544272));
@ -327,11 +334,13 @@ mod tests {
rel=\"noopener noreferrer\">RFC 2094</a> \"Non-lexical lifetimes\"";
let i = new_episode(&firstitem, 0).unwrap();
assert_eq!(i.title(), Some("Episode #9 - A Once in a Lifetime RFC"));
assert_eq!(i.title(), "Episode #9 - A Once in a Lifetime RFC");
assert_eq!(
i.uri(),
"http://request-for-explanation.github.\
io/podcast/ep9-a-once-in-a-lifetime-rfc/episode.mp3"
Some(
"http://request-for-explanation.github.\
io/podcast/ep9-a-once-in-a-lifetime-rfc/episode.mp3"
)
);
assert_eq!(i.description(), Some(descr));
assert_eq!(i.length(), Some(15077388));
@ -349,11 +358,13 @@ mod tests {
href=\"https://github.com/rust-lang/rfcs/pull/2071\" rel=\"noopener \
noreferrer\">RFC 2071</a> \"Add impl Trait type alias and variable \
declarations\"";
assert_eq!(i2.title(), Some("Episode #8 - An Existential Crisis"));
assert_eq!(i2.title(), "Episode #8 - An Existential Crisis");
assert_eq!(
i2.uri(),
"http://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/episode.\
mp3"
Some(
"http://request-for-explanation.github.\
io/podcast/ep8-an-existential-crisis/episode.mp3"
)
);
assert_eq!(i2.description(), Some(descr2));
assert_eq!(i2.length(), Some(13713219));

View File

@ -1,8 +1,8 @@
table! {
episode (id) {
id -> Integer,
title -> Nullable<Text>,
uri -> Text,
episode (title, podcast_id) {
rowid -> Integer,
title -> Text,
uri -> Nullable<Text>,
local_uri -> Nullable<Text>,
description -> Nullable<Text>,
published_date -> Nullable<Text>,

View File

@ -146,22 +146,26 @@ mod tests {
// Setup episodes
let db = connection();
let con = db.get().unwrap();
NewEpisodeBuilder::default()
.uri("foo_bar".to_string())
let n1 = NewEpisodeBuilder::default()
.title("foo_bar".to_string())
.podcast_id(0)
.build()
.unwrap()
.into_episode(&con)
.unwrap();
NewEpisodeBuilder::default()
.uri("bar_baz".to_string())
let n2 = NewEpisodeBuilder::default()
.title("bar_baz".to_string())
.podcast_id(1)
.build()
.unwrap()
.into_episode(&con)
.unwrap();
let mut ep1 = dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap();
let mut ep2 = dbqueries::get_episode_from_uri(&con, "bar_baz").unwrap();
let mut ep1 =
dbqueries::get_episode_from_new_episode(&con, n1.title(), n1.podcast_id()).unwrap();
let mut ep2 =
dbqueries::get_episode_from_new_episode(&con, n2.title(), n2.podcast_id()).unwrap();
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
@ -179,7 +183,7 @@ mod tests {
let episodes = dbqueries::get_downloaded_episodes().unwrap();
assert_eq!(episodes.len(), 1);
assert_eq!("foo_bar", episodes.first().unwrap().uri());
assert_eq!("foo_bar", episodes.first().unwrap().title());
}
#[test]
@ -188,7 +192,7 @@ mod tests {
let mut episode = {
let db = connection();
let con = db.get().unwrap();
dbqueries::get_episode_from_uri(&con, "bar_baz").unwrap()
dbqueries::get_episode_from_new_episode(&con, "bar_baz", 1).unwrap()
};
checker_helper(&mut episode);
@ -201,7 +205,7 @@ mod tests {
let mut episode = {
let db = connection();
let con = db.get().unwrap();
dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap()
dbqueries::get_episode_from_new_episode(&con, "foo_bar", 0).unwrap()
};
let valid_path = episode.local_uri().unwrap().to_owned();
@ -215,7 +219,7 @@ mod tests {
let mut episode = {
let db = connection();
let con = db.get().unwrap();
dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap()
dbqueries::get_episode_from_new_episode(&con, "foo_bar", 0).unwrap()
};
let now_utc = Utc::now().timestamp() as i32;
// let limit = now_utc - 172_800;
@ -235,7 +239,7 @@ mod tests {
let mut episode = {
let db = connection();
let con = db.get().unwrap();
dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap()
dbqueries::get_episode_from_new_episode(&con, "foo_bar", 0).unwrap()
};
let now_utc = Utc::now().timestamp() as i32;
// limit = 172_800;

View File

@ -118,7 +118,7 @@ pub fn get_episode(ep: &mut Episode, download_folder: &str) -> Result<()> {
ep.save()?;
};
let res = download_into(download_folder, ep.title().unwrap(), ep.uri());
let res = download_into(download_folder, ep.title(), ep.uri().unwrap());
if let Ok(path) = res {
// If download succedes set episode local_uri to dlpath.

View File

@ -5,7 +5,6 @@ use gtk::{ContainerExt, TextBufferExt};
use open;
use dissolve::strip_html_tags;
use diesel::associations::Identifiable;
use hammond_data::dbqueries;
use hammond_data::{Episode, Podcast};
@ -77,10 +76,7 @@ impl EpisodeWidget {
fn init(&self, episode: &mut Episode, pd: &Podcast) {
self.title.set_xalign(0.0);
if let Some(t) = episode.title() {
self.title.set_text(t);
}
self.title.set_text(episode.title());
if episode.description().is_some() {
let text = episode.description().unwrap().to_owned();
@ -116,7 +112,7 @@ impl EpisodeWidget {
self.play
.connect_clicked(clone!(episode, played, unplayed => move |_| {
let mut episode = episode.clone();
on_play_bttn_clicked(*episode.id());
on_play_bttn_clicked(episode.rowid());
let _ = episode.set_played_now();
played.hide();
unplayed.show();
@ -126,7 +122,7 @@ impl EpisodeWidget {
let download = &self.download;
self.delete
.connect_clicked(clone!(episode, play, download => move |del| {
on_delete_bttn_clicked(*episode.id());
on_delete_bttn_clicked(episode.rowid());
del.hide();
play.hide();
download.show();
@ -189,7 +185,7 @@ fn on_download_clicked(
let download_fold = downloader::get_download_folder(&pd_title).unwrap();
let e = downloader::get_episode(&mut ep, download_fold.as_str());
if let Err(err) = e {
error!("Error while trying to download: {}", ep.uri());
error!("Error while trying to download: {:?}", ep.uri());
error!("Error: {}", err);
};
sender.send(true).expect("Couldn't send data to channel");;