main
1use anyhow::{Context, Result};
2use chrono::{DateTime, Utc};
3use quick_xml::de::from_str;
4use serde::Deserialize;
5use std::fs;
6use std::path::PathBuf;
7
8#[derive(Debug, Clone)]
9#[allow(dead_code)]
10pub enum FeedState {
11 Loading,
12 Loaded,
13 Error(String),
14}
15
16#[derive(Debug, Clone)]
17pub struct Feed {
18 pub name: String,
19 pub url: String,
20 pub episodes: Vec<Episode>,
21 pub state: FeedState,
22}
23
24#[derive(Debug, Clone)]
25pub struct Episode {
26 pub title: String,
27 pub description: String,
28 pub enclosure_url: String,
29 pub published_at: DateTime<Utc>,
30 pub _duration: Option<String>,
31 pub is_cached: bool,
32 pub local_path: Option<String>,
33}
34
35#[derive(Debug, Deserialize)]
36struct RssChannel {
37 #[serde(rename = "item")]
38 items: Vec<RssItem>,
39}
40
41#[derive(Debug, Deserialize)]
42struct RssItem {
43 title: Option<String>,
44 description: Option<String>,
45 enclosure: Option<RssEnclosure>,
46 #[serde(rename = "pubDate")]
47 pub_date: Option<String>,
48 #[serde(rename = "duration", default)]
49 duration: Option<String>,
50}
51
52#[derive(Debug, Deserialize)]
53struct RssEnclosure {
54 #[serde(rename = "@url")]
55 url: String,
56}
57
58#[derive(Debug, Deserialize)]
59struct Rss {
60 channel: RssChannel,
61}
62
63impl Feed {
64 pub fn new(name: String, url: String) -> Self {
65 Self {
66 name,
67 url,
68 episodes: Vec::new(),
69 state: FeedState::Loading,
70 }
71 }
72
73 pub fn fetch_episodes(&mut self) -> Result<()> {
74 self.state = FeedState::Loading;
75
76 match self.try_fetch_episodes() {
77 Ok(()) => {
78 self.state = FeedState::Loaded;
79 Ok(())
80 }
81 Err(e) => {
82 self.state = FeedState::Error(e.to_string());
83 Err(e)
84 }
85 }
86 }
87
88 fn try_fetch_episodes(&mut self) -> Result<()> {
89 let response = reqwest::blocking::get(&self.url)
90 .with_context(|| format!("Failed to fetch feed: {}", self.url))?;
91
92 let content = response
93 .text()
94 .with_context(|| "Failed to read response body")?;
95
96 let rss: Rss = from_str(&content).with_context(|| "Failed to parse RSS feed")?;
97
98 self.episodes = rss
99 .channel
100 .items
101 .into_iter()
102 .filter_map(|item| self.parse_episode(item))
103 .collect();
104
105 // Sort episodes by date (newest first)
106 self.episodes
107 .sort_by(|a, b| b.published_at.cmp(&a.published_at));
108
109 // Scan for cached episodes and merge them
110 self.scan_and_merge_cached_episodes();
111
112 Ok(())
113 }
114
115 fn scan_and_merge_cached_episodes(&mut self) {
116 // First, mark RSS episodes that are cached
117 self.mark_cached_episodes();
118
119 // Then, add cache-only episodes that aren't in RSS
120 self.add_cached_only_episodes();
121
122 // Sort again after merging
123 self.episodes
124 .sort_by(|a, b| b.published_at.cmp(&a.published_at));
125 }
126
127 fn mark_cached_episodes(&mut self) {
128 let cache_dir = self.get_cache_directory();
129 if !cache_dir.exists() {
130 return;
131 }
132
133 // Collect cache paths first to avoid borrowing issues
134 let cache_paths: Vec<_> = self
135 .episodes
136 .iter()
137 .map(|ep| self.get_cache_path_for_url(&ep.enclosure_url))
138 .collect();
139
140 for (episode, cache_path) in self.episodes.iter_mut().zip(cache_paths.iter()) {
141 if cache_path.exists() && cache_path.metadata().map(|m| m.len() > 0).unwrap_or(false) {
142 episode.is_cached = true;
143 episode.local_path = Some(cache_path.to_string_lossy().to_string());
144 }
145 }
146 }
147
148 fn add_cached_only_episodes(&mut self) {
149 let cache_dir = self.get_cache_directory();
150 if !cache_dir.exists() {
151 return;
152 }
153
154 if let Ok(entries) = fs::read_dir(&cache_dir) {
155 for entry in entries.flatten() {
156 let path = entry.path();
157 if path.is_file() && self.is_audio_file(&path) {
158 // Check if this cached file is already represented in RSS episodes
159 let filename = path.file_name().unwrap().to_string_lossy();
160 let already_exists = self.episodes.iter().any(|ep| {
161 ep.local_path
162 .as_ref()
163 .map(|p| p.contains(&*filename))
164 .unwrap_or(false)
165 });
166
167 if !already_exists {
168 // Create a cache-only episode
169 if let Some(cached_episode) = self.create_cached_episode(&path) {
170 self.episodes.push(cached_episode);
171 }
172 }
173 }
174 }
175 }
176 }
177
178 fn get_cache_directory(&self) -> PathBuf {
179 let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/tmp"));
180 home.join("Music").join(&self.name)
181 }
182
183 fn get_cache_path_for_url(&self, url: &str) -> PathBuf {
184 let cache_dir = self.get_cache_directory();
185 let filename = url.split('/').last().unwrap_or("episode.mp3");
186 let filename = filename.split('?').next().unwrap_or(filename);
187 let safe_filename = filename
188 .chars()
189 .map(|c| {
190 if c.is_alphanumeric() || c == '.' || c == '-' {
191 c
192 } else {
193 '_'
194 }
195 })
196 .collect::<String>();
197 cache_dir.join(safe_filename)
198 }
199
200 fn is_audio_file(&self, path: &PathBuf) -> bool {
201 if let Some(ext) = path.extension() {
202 let ext = ext.to_string_lossy().to_lowercase();
203 matches!(ext.as_str(), "mp3" | "m4a" | "aac" | "ogg" | "flac" | "wav")
204 } else {
205 false
206 }
207 }
208
209 fn create_cached_episode(&self, path: &PathBuf) -> Option<Episode> {
210 let filename = path.file_stem()?.to_string_lossy();
211 let title = filename.replace('_', " ").replace('-', " ");
212
213 let published_at = path
214 .metadata()
215 .ok()
216 .and_then(|m| m.modified().ok())
217 .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
218 .map(|d| DateTime::from_timestamp(d.as_secs() as i64, 0).unwrap_or_else(|| Utc::now()))
219 .unwrap_or_else(|| Utc::now());
220
221 Some(Episode {
222 title,
223 description: "Cached episode (no RSS data)".to_string(),
224 enclosure_url: format!("file://{}", path.display()), // Local file URL
225 published_at,
226 _duration: None,
227 is_cached: true,
228 local_path: Some(path.to_string_lossy().to_string()),
229 })
230 }
231
232 fn parse_episode(&self, item: RssItem) -> Option<Episode> {
233 let title = item.title?;
234 let enclosure_url = item.enclosure?.url;
235
236 let published_at = item
237 .pub_date
238 .and_then(|date_str| {
239 // Try parsing RFC 2822 format first
240 DateTime::parse_from_rfc2822(&date_str)
241 .map(|dt| dt.with_timezone(&Utc))
242 .ok()
243 })
244 .unwrap_or_else(|| Utc::now());
245
246 Some(Episode {
247 title,
248 description: item.description.unwrap_or_default(),
249 enclosure_url,
250 published_at,
251 _duration: item.duration,
252 is_cached: false, // Will be updated by scan_cached_episodes
253 local_path: None,
254 })
255 }
256}