hammond_data::Feed: Remove unused parts.
This commit is contained in:
parent
3358fcd0b3
commit
7f78e87551
5
TODO.md
5
TODO.md
@ -10,7 +10,6 @@
|
|||||||
## Second
|
## Second
|
||||||
|
|
||||||
- [ ] Make use of file metadas, [This](https://github.com/GuillaumeGomez/audio-video-metadata) might be helpfull.
|
- [ ] Make use of file metadas, [This](https://github.com/GuillaumeGomez/audio-video-metadata) might be helpfull.
|
||||||
- [ ] Notifications
|
|
||||||
- [ ] Episode queue
|
- [ ] Episode queue
|
||||||
- [ ] Embedded player
|
- [ ] Embedded player
|
||||||
- [ ] MPRIS integration
|
- [ ] MPRIS integration
|
||||||
@ -21,8 +20,8 @@
|
|||||||
- [ ] Download Queue
|
- [ ] Download Queue
|
||||||
- [ ] Ability to Stream content on demand
|
- [ ] Ability to Stream content on demand
|
||||||
- [ ] soundcloud and itunes feeds // [This](http://getrssfeed.com) seems intresting.
|
- [ ] soundcloud and itunes feeds // [This](http://getrssfeed.com) seems intresting.
|
||||||
- [ ] Integrate with Itunes API for various crap
|
- [ ] Integrate with Itunes API for various crap?
|
||||||
- [ ] YoutubeFeeds
|
- [ ] YoutubeFeeds?
|
||||||
|
|
||||||
## Rest Tasks
|
## Rest Tasks
|
||||||
|
|
||||||
|
|||||||
@ -95,7 +95,6 @@ fn bench_get_normal_feeds(b: &mut Bencher) {
|
|||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let sources = hammond_data::dbqueries::get_sources().unwrap();
|
let sources = hammond_data::dbqueries::get_sources().unwrap();
|
||||||
index_loop(sources);
|
index_loop(sources);
|
||||||
println!("I RUN");
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -14,11 +14,11 @@ use models::insertables::{NewEpisode, NewPodcast};
|
|||||||
use database::connection;
|
use database::connection;
|
||||||
use errors::*;
|
use errors::*;
|
||||||
|
|
||||||
#[cfg(test)]
|
// #[cfg(test)]
|
||||||
use models::queryables::Episode;
|
// use models::queryables::Episode;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// Wrapper struct that hold a `Source` and the `rss::Channel`
|
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
|
||||||
/// that corresponds to the `Source.uri` field.
|
/// that corresponds to the `Source.uri` field.
|
||||||
pub struct Feed {
|
pub struct Feed {
|
||||||
channel: rss::Channel,
|
channel: rss::Channel,
|
||||||
@ -36,18 +36,12 @@ impl Feed {
|
|||||||
Feed { channel, source_id }
|
Feed { channel, source_id }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// docs
|
/// Index the contents of the RSS `Feed` into the database.
|
||||||
pub fn index(&self) -> Result<()> {
|
pub fn index(&self) -> Result<()> {
|
||||||
let pd = self.get_podcast()?;
|
let pd = self.parse_podcast().into_podcast()?;
|
||||||
self.index_channel_items(&pd)
|
self.index_channel_items(&pd)
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[allow(dead_code)]
|
|
||||||
// fn index_channel(&self) -> Result<()> {
|
|
||||||
// self.parse_channel().index()?;
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
|
||||||
// TODO: Refactor transcactions and find a way to do it in parallel.
|
// TODO: Refactor transcactions and find a way to do it in parallel.
|
||||||
fn index_channel_items(&self, pd: &Podcast) -> Result<()> {
|
fn index_channel_items(&self, pd: &Podcast) -> Result<()> {
|
||||||
let episodes = self.parse_channel_items(pd);
|
let episodes = self.parse_channel_items(pd);
|
||||||
@ -56,8 +50,7 @@ impl Feed {
|
|||||||
|
|
||||||
let _ = con.transaction::<(), Error, _>(|| {
|
let _ = con.transaction::<(), Error, _>(|| {
|
||||||
episodes.into_iter().for_each(|x| {
|
episodes.into_iter().for_each(|x| {
|
||||||
let e = x.index(&con);
|
if let Err(err) = x.index(&con) {
|
||||||
if let Err(err) = e {
|
|
||||||
error!("Failed to index episode: {:?}.", x.title());
|
error!("Failed to index episode: {:?}.", x.title());
|
||||||
error!("Error msg: {}", err);
|
error!("Error msg: {}", err);
|
||||||
};
|
};
|
||||||
@ -67,7 +60,7 @@ impl Feed {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_channel(&self) -> NewPodcast {
|
fn parse_podcast(&self) -> NewPodcast {
|
||||||
parser::new_podcast(&self.channel, self.source_id)
|
parser::new_podcast(&self.channel, self.source_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,25 +74,20 @@ impl Feed {
|
|||||||
new_episodes
|
new_episodes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_podcast(&self) -> Result<Podcast> {
|
// #[cfg(test)]
|
||||||
self.parse_channel().into_podcast()
|
// /// This returns only the episodes in the xml feed.
|
||||||
}
|
// fn get_episodes(&self) -> Result<Vec<Episode>> {
|
||||||
|
// let pd = self.get_podcast()?;
|
||||||
|
// let eps = self.parse_channel_items(&pd);
|
||||||
|
|
||||||
#[cfg(test)]
|
// let db = connection();
|
||||||
/// This returns only the episodes in the xml feed.
|
// let con = db.get()?;
|
||||||
/// Used for unit-tests only.
|
// let episodes: Vec<_> = eps.into_iter()
|
||||||
fn get_episodes(&self) -> Result<Vec<Episode>> {
|
// .filter_map(|ep| ep.into_episode(&con).ok())
|
||||||
let pd = self.get_podcast()?;
|
// .collect();
|
||||||
let eps = self.parse_channel_items(&pd);
|
|
||||||
|
|
||||||
let db = connection();
|
// Ok(episodes)
|
||||||
let con = db.get()?;
|
// }
|
||||||
let episodes: Vec<_> = eps.into_iter()
|
|
||||||
.filter_map(|ep| ep.into_episode(&con).ok())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(episodes)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Index a "list" of `Source`s.
|
/// Index a "list" of `Source`s.
|
||||||
@ -208,41 +196,4 @@ mod tests {
|
|||||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 4);
|
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 4);
|
||||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 274);
|
assert_eq!(dbqueries::get_episodes().unwrap().len(), 274);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_partial_index_podcast() {
|
|
||||||
truncate_db().unwrap();
|
|
||||||
let url = "https://feeds.feedburner.com/InterceptedWithJeremyScahill";
|
|
||||||
|
|
||||||
let mut s1 = Source::from_url(url).unwrap();
|
|
||||||
let mut s2 = Source::from_url(url).unwrap();
|
|
||||||
assert_eq!(s1, s2);
|
|
||||||
assert_eq!(s1.id(), s2.id());
|
|
||||||
|
|
||||||
let f1 = s1.into_feed(false).unwrap();
|
|
||||||
let f2 = s2.into_feed(false).unwrap();
|
|
||||||
|
|
||||||
let p1 = f1.get_podcast().unwrap();
|
|
||||||
let p2 = {
|
|
||||||
f2.index().unwrap();
|
|
||||||
f2.get_podcast().unwrap()
|
|
||||||
};
|
|
||||||
assert_eq!(p1, p2);
|
|
||||||
assert_eq!(p1.id(), p2.id());
|
|
||||||
assert_eq!(p1.source_id(), p2.source_id());
|
|
||||||
|
|
||||||
let eps1 = f1.get_episodes().unwrap();
|
|
||||||
let eps2 = {
|
|
||||||
f2.index().unwrap();
|
|
||||||
f2.get_episodes().unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
eps1.into_par_iter().zip(eps2).into_par_iter().for_each(
|
|
||||||
|(ep1, ep2): (Episode, Episode)| {
|
|
||||||
assert_eq!(ep1, ep2);
|
|
||||||
assert_eq!(ep1.id(), ep2.id());
|
|
||||||
assert_eq!(ep1.podcast_id(), ep2.podcast_id());
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -113,6 +113,7 @@ impl NewPodcast {
|
|||||||
if (foo.link() != self.link) || (foo.title() != self.title)
|
if (foo.link() != self.link) || (foo.title() != self.title)
|
||||||
|| (foo.image_uri() != self.image_uri.as_ref().map(|x| x.as_str()))
|
|| (foo.image_uri() != self.image_uri.as_ref().map(|x| x.as_str()))
|
||||||
{
|
{
|
||||||
|
info!("NewEpisode: {:?}\n OldEpisode: {:?}", self, foo);
|
||||||
self.update(&con, foo.id())?;
|
self.update(&con, foo.id())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user