Make Feed fields private.
This commit is contained in:
parent
54a0f17588
commit
3d108d4a7a
@ -55,7 +55,8 @@ fn get_temp_db() -> TempDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn index_urls(m: &Database) {
|
fn index_urls(m: &Database) {
|
||||||
URLS.par_iter().for_each(|&(buff, url)| {
|
let mut feeds: Vec<_> = URLS.par_iter()
|
||||||
|
.map(|&(buff, url)| {
|
||||||
// Create and insert a Source into db
|
// Create and insert a Source into db
|
||||||
let s = {
|
let s = {
|
||||||
let temp = m.lock().unwrap();
|
let temp = m.lock().unwrap();
|
||||||
@ -63,11 +64,11 @@ fn index_urls(m: &Database) {
|
|||||||
};
|
};
|
||||||
// parse it into a channel
|
// parse it into a channel
|
||||||
let chan = rss::Channel::read_from(buff).unwrap();
|
let chan = rss::Channel::read_from(buff).unwrap();
|
||||||
let feed = Feed(chan, s);
|
Feed::new_from_channel_source(chan, s)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
// Index the channel
|
index_feeds(m, &mut feeds);
|
||||||
index_feeds(m, &mut [feed]);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
|
|||||||
@ -13,9 +13,17 @@ use std::sync::{Arc, Mutex};
|
|||||||
pub type Database = Arc<Mutex<SqliteConnection>>;
|
pub type Database = Arc<Mutex<SqliteConnection>>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Feed(pub rss::Channel, pub Source);
|
pub struct Feed(rss::Channel, Source);
|
||||||
|
|
||||||
impl Feed {
|
impl Feed {
|
||||||
|
pub fn new_from_source(db: &Database, s: Source) -> Result<Feed> {
|
||||||
|
s.refresh(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_from_channel_source(chan: rss::Channel, s: Source) -> Feed {
|
||||||
|
Feed(chan, s)
|
||||||
|
}
|
||||||
|
|
||||||
fn index(&self, db: &Database) -> Result<()> {
|
fn index(&self, db: &Database) -> Result<()> {
|
||||||
let tempdb = db.lock().unwrap();
|
let tempdb = db.lock().unwrap();
|
||||||
let pd = self.index_channel(&tempdb)?;
|
let pd = self.index_channel(&tempdb)?;
|
||||||
@ -146,7 +154,7 @@ pub fn fetch_feeds(db: &Database, feeds: Vec<Source>) -> Vec<Feed> {
|
|||||||
.into_par_iter()
|
.into_par_iter()
|
||||||
.filter_map(|x| {
|
.filter_map(|x| {
|
||||||
let uri = x.uri().to_owned();
|
let uri = x.uri().to_owned();
|
||||||
let l = x.refresh(db);
|
let l = Feed::new_from_source(db, x);
|
||||||
if l.is_ok() {
|
if l.is_ok() {
|
||||||
l.ok()
|
l.ok()
|
||||||
} else {
|
} else {
|
||||||
@ -244,7 +252,8 @@ mod tests {
|
|||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
urls.iter().for_each(|&(path, url)| {
|
let mut feeds: Vec<_> = urls.iter()
|
||||||
|
.map(|&(path, url)| {
|
||||||
let tempdb = m.lock().unwrap();
|
let tempdb = m.lock().unwrap();
|
||||||
// Create and insert a Source into db
|
// Create and insert a Source into db
|
||||||
let s = insert_return_source(&tempdb, url).unwrap();
|
let s = insert_return_source(&tempdb, url).unwrap();
|
||||||
@ -254,11 +263,12 @@ mod tests {
|
|||||||
let feed = fs::File::open(path).unwrap();
|
let feed = fs::File::open(path).unwrap();
|
||||||
// parse it into a channel
|
// parse it into a channel
|
||||||
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
||||||
let feed = Feed(chan, s);
|
Feed::new_from_channel_source(chan, s)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
// Index the channel
|
// Index the channel
|
||||||
index_feeds(&m, &mut [feed]);
|
index_feeds(&m, &mut feeds);
|
||||||
});
|
|
||||||
|
|
||||||
// Assert the index rows equal the controlled results
|
// Assert the index rows equal the controlled results
|
||||||
let tempdb = m.lock().unwrap();
|
let tempdb = m.lock().unwrap();
|
||||||
|
|||||||
@ -339,6 +339,6 @@ impl<'a> Source {
|
|||||||
req.read_to_string(&mut buf)?;
|
req.read_to_string(&mut buf)?;
|
||||||
let chan = Channel::from_str(&buf)?;
|
let chan = Channel::from_str(&buf)?;
|
||||||
|
|
||||||
Ok(Feed(chan, self))
|
Ok(Feed::new_from_channel_source(chan, self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user