reorganise modules

This commit is contained in:
Aaron Manning
2025-09-01 10:07:28 +10:00
parent 2b55c5b0af
commit 5291730793
5 changed files with 181 additions and 170 deletions

View File

@@ -1,92 +1,21 @@
use std::fs;
use std::path;
use std::borrow::Cow;
use std::iter::Iterator;
use std::collections::{HashMap, BTreeMap, HashSet};
use std::{
fs,
path,
borrow::Cow,
collections::HashSet,
};
use anyhow::Context;
use sanitise_file_name::sanitise;
use crate::folders;
use crate::rss;
#[derive(Debug, Default, serde::Serialize, serde::Deserialize)]
pub(crate) struct Specification<'a> {
files: HashMap<Cow<'a, str>, Cow<'a, path::Path>>,
/// This is a collection of episodes, where each entry contains a `Vec` of
/// episodes to allow for the possibility that multiple episodes have the
/// same timestamp.
feed: BTreeMap<chrono::NaiveDateTime, Vec<Episode<'a>>>,
image_url: Option<Cow<'a, str>>,
}
impl<'a> Specification<'a> {
pub(crate) fn read_from_with_default(path: &path::Path) -> Result<Self, anyhow::Error> {
Ok(if path.is_file() {
toml::from_str(&fs::read_to_string(&path)?[..])?
} else {
Specification::default()
})
}
pub(crate) fn read_from(path: &path::Path) -> Result<Self, anyhow::Error> {
Ok(if path.is_file() {
toml::from_str(&fs::read_to_string(&path)?[..])?
} else {
anyhow::bail!("could not find specification for the desired podcast")
})
}
pub(crate) fn write_to(&self, path: &path::Path) -> Result<(), anyhow::Error> {
Ok(fs::write(path, toml::to_string(self)?.as_bytes())?)
}
pub(crate) fn feed_iter(&self) -> impl Iterator<Item = (&chrono::NaiveDateTime, &Vec<Episode<'a>>)> {
self.feed.iter()
}
pub(crate) fn feed_iter_mut(&mut self) -> impl Iterator<Item = (&chrono::NaiveDateTime, &mut Vec<Episode<'a>>)> {
self.feed.iter_mut()
}
pub(crate) fn path_from_id(&self, id: &str) -> Option<&path::Path> {
self.files.get(id).map(|v| &**v)
}
pub(crate) fn feed(&self) -> &BTreeMap<chrono::NaiveDateTime, Vec<Episode<'a>>> {
&self.feed
}
pub(crate) fn into_feed_and_files(self) -> (BTreeMap<chrono::NaiveDateTime, Vec<Episode<'a>>>, HashMap<Cow<'a, str>, Cow<'a, path::Path>>) {
(self.feed, self.files)
}
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub (crate) struct Episode<'a> {
/// Episode title.
title: Cow<'a, str>,
/// Show notes pulled from description or summary tag.
show_notes: Option<Cow<'a, str>>,
/// This is the GUID or the URL if the GUID is not present.
id: Cow<'a, str>,
/// If the episode exists in the latest version of the feed.
current: bool,
/// Flag to keep track of which episodes have been listened to.
#[serde(default)]
pub(crate) listened: bool,
}
impl<'a> Episode<'a> {
pub (crate) fn title(&self) -> &str {
self.title.as_ref()
}
pub(crate) fn id(&self) -> &str {
&self.id
}
}
use crate::{
rss,
folders,
manage::{
Specification,
Episode,
},
};
fn download_to_file(url: &str, path: &path::Path) -> anyhow::Result<()> {
let response = minreq::get(url)
@@ -169,7 +98,7 @@ fn update_artwork<'a, 'b>(
_ => None,
};
match (&spec.image_url, image_url) {
match (spec.image_url.as_deref(), image_url) {
// They match, so no need to change anything
(Some(old), Some(new)) if old == new => (),
// New and different URL
@@ -268,7 +197,7 @@ pub(crate) fn update_podcast_from_feed(
let id = guid.unwrap_or(url);
match spec.files.get(id) {
match spec.path_from_id(id) {
// File already downloaded
Some(path) => {
// File has been deleted by another process but the specification hasn't been updated
@@ -315,30 +244,15 @@ pub(crate) fn update_podcast_from_feed(
Ok(()) => {
let file_path = file_path.canonicalize().unwrap();
spec.files.insert(
Cow::from(id.to_owned()),
Cow::from(file_path.strip_prefix(&output).unwrap().to_owned()),
);
spec.insert_into_files(
id.to_owned(),
file_path.strip_prefix(&output).unwrap().to_owned(),
)?;
let episode = Episode {
show_notes: description,
id: Cow::from(id.to_owned()),
current: true,
title,
listened: false,
};
let episode = Episode::new_downloaded(title, description, id.to_owned());
match spec.feed.get_mut(&item.published) {
Some(existing) => {
existing.push(episode)
},
None => {
spec.feed.insert(
item.published,
vec![episode],
);
}
}
spec.insert_into_feed(item.published, episode);
// Update the file as we go, but only if a change has occured
spec.write_to(&spec_file)?;
@@ -354,9 +268,9 @@ pub(crate) fn update_podcast_from_feed(
let mut feed_change = false;
// Setting episodes which have been removed to no longer be current
for (_, episodes) in &mut spec.feed {
for (_, episodes) in spec.feed_iter_mut() {
for episode in episodes {
if !current_episodes.contains(episode.id.as_ref()) {
if !current_episodes.contains(episode.id()) {
episode.current = false;
feed_change = true;
}