Merge branch 'alatiera/spelling' into 'master'

Fix spelling of things

See merge request World/podcasts!143
This commit is contained in:
Jordan Petridis 2020-06-21 15:11:41 +00:00
commit 0bc379bf42
10 changed files with 24 additions and 24 deletions

View File

@ -84,7 +84,7 @@ pub enum DataError {
FeedRedirect(Source), FeedRedirect(Source),
#[fail(display = "Feed is up to date")] #[fail(display = "Feed is up to date")]
FeedNotModified(Source), FeedNotModified(Source),
#[fail(display = "Error occured while Parsing an Episode. Reason: {}", reason)] #[fail(display = "Error occurred while Parsing an Episode. Reason: {}", reason)]
ParseEpisodeError { reason: String, parent_id: i32 }, ParseEpisodeError { reason: String, parent_id: i32 },
#[fail(display = "Episode was not changed and thus skipped.")] #[fail(display = "Episode was not changed and thus skipped.")]
EpisodeNotChanged, EpisodeNotChanged,

View File

@ -198,7 +198,7 @@ mod tests {
}) })
.collect(); .collect();
// Index the channes // Index the channels
let stream_ = stream::iter_ok(feeds).for_each(|x| x.index()); let stream_ = stream::iter_ok(feeds).for_each(|x| x.index());
tokio::run(stream_.map_err(|_| ())); tokio::run(stream_.map_err(|_| ()));

View File

@ -111,7 +111,7 @@ pub use crate::models::{Episode, EpisodeWidgetModel, Show, ShowCoverModel, Sourc
/// It originates from the Tor-browser UA. /// It originates from the Tor-browser UA.
pub const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0"; pub const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0";
/// [XDG Base Direcotory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths. /// [XDG Base Directory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths.
#[allow(missing_debug_implementations)] #[allow(missing_debug_implementations)]
pub mod xdg_dirs { pub mod xdg_dirs {
use std::path::PathBuf; use std::path::PathBuf;
@ -137,7 +137,7 @@ pub mod xdg_dirs {
PODCASTS_XDG.create_cache_directory(PODCASTS_XDG.get_cache_home()).unwrap() PODCASTS_XDG.create_cache_directory(PODCASTS_XDG.get_cache_home()).unwrap()
}; };
/// GNOME Podcasts Download Direcotry `PathBuf`. /// GNOME Podcasts Download Directory `PathBuf`.
pub static ref DL_DIR: PathBuf = { pub static ref DL_DIR: PathBuf = {
PODCASTS_XDG.create_data_directory("Downloads").unwrap() PODCASTS_XDG.create_data_directory("Downloads").unwrap()
}; };

View File

@ -255,7 +255,7 @@ impl NewEpisodeMinimal {
return Err(err); return Err(err);
}; };
// Default to rfc2822 represantation of epoch 0. // Default to rfc2822 representation of epoch 0.
let date = parse_rfc822(item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000")); let date = parse_rfc822(item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"));
// Should treat information from the rss feeds as invalid by default. // Should treat information from the rss feeds as invalid by default.
// Case: "Thu, 05 Aug 2016 06:00:00 -0400" <-- Actually that was friday. // Case: "Thu, 05 Aug 2016 06:00:00 -0400" <-- Actually that was friday.
@ -342,7 +342,7 @@ mod tests {
use std::io::BufReader; use std::io::BufReader;
// TODO: Add tests for other feeds too. // TODO: Add tests for other feeds too.
// Especially if you find an *intresting* generated feed. // Especially if you find an *interesting* generated feed.
// Known prebuilt expected objects. // Known prebuilt expected objects.
lazy_static! { lazy_static! {

View File

@ -216,7 +216,7 @@ impl Source {
self = self.save()?; self = self.save()?;
debug!("Updated Source: {:#?}", &self); debug!("Updated Source: {:#?}", &self);
info!("Feed url of Source {}, was updated succesfully.", self.id()); info!("Feed url of Source {}, was updated successfully.", self.id());
} }
Ok(self) Ok(self)

View File

@ -43,7 +43,7 @@ use failure::Error;
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
// FIXME: Make it a Diesel model // FIXME: Make it a Diesel model
/// Represents an `outline` xml element as per the `OPML` [specification][spec] /// Represents an `outline` xml element as per the `OPML` [specification][spec]
/// not `RSS` related sub-elements are ommited. /// not `RSS` related sub-elements are omitted.
/// ///
/// [spec]: http://dev.opml.org/spec2.html /// [spec]: http://dev.opml.org/spec2.html
pub struct Opml { pub struct Opml {
@ -82,7 +82,7 @@ pub fn import_from_file<P: AsRef<Path>>(path: P) -> Result<Vec<Source>, DataErro
import_to_db(content.as_slice()).map_err(From::from) import_to_db(content.as_slice()).map_err(From::from)
} }
/// Export a file to `P`, taking the feeds from the database and outputing /// Export a file to `P`, taking the feeds from the database and outputting
/// them in opml format. /// them in opml format.
pub fn export_from_db<P: AsRef<Path>>(path: P, export_title: &str) -> Result<(), Error> { pub fn export_from_db<P: AsRef<Path>>(path: P, export_title: &str) -> Result<(), Error> {
let file = File::create(path)?; let file = File::create(path)?;
@ -163,7 +163,7 @@ pub fn export_to_file<F: Write>(file: F, export_title: &str) -> Result<(), Error
Ok(()) Ok(())
} }
/// Extracts the `outline` elemnts from a reader `R` and returns a `HashSet` of `Opml` structs. /// Extracts the `outline` elements from a reader `R` and returns a `HashSet` of `Opml` structs.
pub fn extract_sources<R: Read>(reader: R) -> Result<HashSet<Opml>, reader::Error> { pub fn extract_sources<R: Read>(reader: R) -> Result<HashSet<Opml>, reader::Error> {
let mut list = HashSet::new(); let mut list = HashSet::new();
let parser = reader::EventReader::new(reader); let parser = reader::EventReader::new(reader);

View File

@ -56,7 +56,7 @@ fn download_checker() -> Result<(), DataError> {
Ok(()) Ok(())
} }
/// Delete watched `episodes` that have exceded their liftime after played. /// Delete watched `episodes` that have exceeded their lifetime after played.
fn played_cleaner(cleanup_date: DateTime<Utc>) -> Result<(), DataError> { fn played_cleaner(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
let mut episodes = dbqueries::get_played_cleaner_episodes()?; let mut episodes = dbqueries::get_played_cleaner_episodes()?;
let now_utc = cleanup_date.timestamp() as i32; let now_utc = cleanup_date.timestamp() as i32;
@ -68,7 +68,7 @@ fn played_cleaner(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
let limit = ep.played().unwrap(); let limit = ep.played().unwrap();
if now_utc > limit { if now_utc > limit {
delete_local_content(ep) delete_local_content(ep)
.map(|_| info!("Episode {:?} was deleted succesfully.", ep.local_uri())) .map(|_| info!("Episode {:?} was deleted successfully.", ep.local_uri()))
.map_err(|err| error!("Error: {}", err)) .map_err(|err| error!("Error: {}", err))
.map_err(|_| error!("Failed to delete file: {:?}", ep.local_uri())) .map_err(|_| error!("Failed to delete file: {:?}", ep.local_uri()))
.ok(); .ok();
@ -144,11 +144,11 @@ pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
// TODO: Write Tests // TODO: Write Tests
pub fn delete_show(pd: &Show) -> Result<(), DataError> { pub fn delete_show(pd: &Show) -> Result<(), DataError> {
dbqueries::remove_feed(pd)?; dbqueries::remove_feed(pd)?;
info!("{} was removed succesfully.", pd.title()); info!("{} was removed successfully.", pd.title());
let fold = get_download_folder(pd.title())?; let fold = get_download_folder(pd.title())?;
fs::remove_dir_all(&fold)?; fs::remove_dir_all(&fold)?;
info!("All the content at, {} was removed succesfully", &fold); info!("All the content at, {} was removed successfully", &fold);
Ok(()) Ok(())
} }

View File

@ -54,7 +54,7 @@ pub trait DownloadProgress {
// Sorry to those who will have to work with that code. // Sorry to those who will have to work with that code.
// Would much rather use a crate, // Would much rather use a crate,
// or bindings for a lib like youtube-dl(python), // or bindings for a lib like youtube-dl(python),
// But cant seem to find one. // But can't seem to find one.
// TODO: Write unit-tests. // TODO: Write unit-tests.
fn download_into( fn download_into(
dir: &str, dir: &str,
@ -64,7 +64,7 @@ fn download_into(
) -> Result<String, DownloadError> { ) -> Result<String, DownloadError> {
info!("GET request to: {}", url); info!("GET request to: {}", url);
// Haven't included the loop check as // Haven't included the loop check as
// Steal the Stars would tigger it as // Steal the Stars would trigger it as
// it has a loop back before giving correct url // it has a loop back before giving correct url
let policy = RedirectPolicy::custom(|attempt| { let policy = RedirectPolicy::custom(|attempt| {
info!("Redirect Attempt URL: {:?}", attempt.url()); info!("Redirect Attempt URL: {:?}", attempt.url());
@ -104,7 +104,7 @@ fn download_into(
.and_then(|h| h.to_str().ok()) .and_then(|h| h.to_str().ok())
.map(From::from); .map(From::from);
ct_len.map(|x| info!("File Lenght: {}", x)); ct_len.map(|x| info!("File Length: {}", x));
ct_type.map(|x| info!("Content Type: {}", x)); ct_type.map(|x| info!("Content Type: {}", x));
let ext = get_ext(ct_type).unwrap_or_else(|| String::from("unknown")); let ext = get_ext(ct_type).unwrap_or_else(|| String::from("unknown"));
@ -131,7 +131,7 @@ fn download_into(
let target = format!("{}/{}.{}", dir, file_title, ext); let target = format!("{}/{}.{}", dir, file_title, ext);
// Rename/move the tempfile into a permanent place upon success. // Rename/move the tempfile into a permanent place upon success.
rename(out_file, &target)?; rename(out_file, &target)?;
info!("Downloading of {} completed succesfully.", &target); info!("Downloading of {} completed successfully.", &target);
Ok(target) Ok(target)
} }
@ -219,10 +219,10 @@ pub fn get_episode(
progress, progress,
)?; )?;
// If download succedes set episode local_uri to dlpath. // If download succeeds set episode local_uri to dlpath.
ep.set_local_uri(Some(&path)); ep.set_local_uri(Some(&path));
// Over-write episode lenght // Over-write episode length
let size = fs::metadata(path); let size = fs::metadata(path);
if let Ok(s) = size { if let Ok(s) = size {
ep.set_length(Some(s.len() as i32)) ep.set_length(Some(s.len() as i32))

View File

@ -51,13 +51,13 @@
<release version="0.4.5" date="2018-08-31"> <release version="0.4.5" date="2018-08-31">
<description> <description>
<p> <p>
Podcasts 0.4.5 brings a month of bug fixes, performance improvements and initial tranlations support. Podcasts 0.4.5 brings a month of bug fixes, performance improvements and initial translations support.
</p> </p>
<ul> <ul>
<li>Finish, Polish, Turkish, Spanish, German, Galician, Indonesian and Korean Translations were added.</li> <li>Finish, Polish, Turkish, Spanish, German, Galician, Indonesian and Korean Translations were added.</li>
<li>Views now adapt better to different window sizes, thanks to libhandy HdyColumn</li> <li>Views now adapt better to different window sizes, thanks to libhandy HdyColumn</li>
<li>The update indacator was moved to an In-App notification</li> <li>The update indacator was moved to an In-App notification</li>
<li>Performance improvments when loading Show Cover images.</li> <li>Performance improvements when loading Show Cover images.</li>
<li>Improved handling of HTTP Redirects</li> <li>Improved handling of HTTP Redirects</li>
</ul> </ul>
</description> </description>

View File

@ -88,7 +88,7 @@ use crate::i18n::i18n;
pub(crate) fn lazy_load<T, C, F, W, U>( pub(crate) fn lazy_load<T, C, F, W, U>(
data: T, data: T,
container: WeakRef<C>, container: WeakRef<C>,
mut contructor: F, mut constructor: F,
callback: U, callback: U,
) where ) where
T: IntoIterator + 'static, T: IntoIterator + 'static,
@ -104,7 +104,7 @@ pub(crate) fn lazy_load<T, C, F, W, U>(
None => return, None => return,
}; };
let widget = contructor(x); let widget = constructor(x);
container.add(&widget); container.add(&widget);
widget.show(); widget.show();
}; };