Fix batch indexing.

This commit is contained in:
Jordan Petridis 2018-01-22 14:51:31 +02:00
parent c2d5b5ded0
commit d231cc165f
No known key found for this signature in database
GPG Key ID: CEABAD9F5683B9A6
2 changed files with 13 additions and 20 deletions

View File

@ -293,17 +293,17 @@ pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool> {
.map_err(From::from)
}
// pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> {
// use schema::episode::dsl::*;
// let db = connection();
// let con = db.get()?;
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<()> {
use schema::episode::dsl::*;
let db = connection();
let con = db.get()?;
// diesel::insert_into(episode)
// .values(eps)
// .execute(&con)
// .map_err(From::from)
// .map(|_| ())
// }
diesel::insert_into(episode)
.values(eps)
.execute(&*con)
.map_err(From::from)
.map(|_| ())
}
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize> {
use schema::episode::dsl::*;

View File

@ -4,9 +4,9 @@ use futures::future::*;
use itertools::{Either, Itertools};
use rss;
// use dbqueries;
use dbqueries;
use errors::*;
use models::{IndexState, Insert, Update};
use models::{IndexState, Update};
use models::{NewEpisode, NewPodcast, Podcast};
use pipeline::*;
@ -47,14 +47,7 @@ impl Feed {
.and_then(|(insert, update)| {
if !insert.is_empty() {
info!("Indexing {} episodes.", insert.len());
// dbqueries::index_new_episodes(insert.as_slice())?;
// FIXME: workaround cause of a diesel 1.1 reggression.
insert.iter().for_each(|ep| {
if let Err(err) = ep.insert() {
error!("Failed to index episode: {:?}.", ep.title());
error!("Error msg: {}", err);
}
});
dbqueries::index_new_episodes(insert.as_slice())?;
}
Ok((insert, update))
})