2025-07-02 20:11:43 -04:00
|
|
|
|
use super::files::*;
|
|
|
|
|
|
use super::net::*;
|
2025-07-03 20:57:10 -04:00
|
|
|
|
use chrono::DateTime;
|
2025-07-25 19:03:50 -04:00
|
|
|
|
use chrono::Utc;
|
2025-12-04 15:20:12 -05:00
|
|
|
|
use rusqlite::Row;
|
2025-07-25 19:03:50 -04:00
|
|
|
|
//Maybe use a different time?
|
|
|
|
|
|
use rusqlite::{Connection, Result};
|
|
|
|
|
|
use std::path::PathBuf;
|
2025-01-10 07:32:12 -05:00
|
|
|
|
/*
|
|
|
|
|
|
Cache is the in-memory database
|
|
|
|
|
|
Any changes/updates are written to file database
|
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
const DB_LOCATION: &str = "rsscar.db";
|
|
|
|
|
|
|
2025-07-02 20:11:43 -04:00
|
|
|
|
fn get_db_path() -> PathBuf {
|
2025-05-29 12:07:36 -04:00
|
|
|
|
get_data_directory().join(DB_LOCATION)
|
|
|
|
|
|
}
|
2025-01-10 07:32:12 -05:00
|
|
|
|
|
2025-07-25 19:03:50 -04:00
|
|
|
|
fn get_db() -> Connection {
|
2025-07-03 20:57:37 -04:00
|
|
|
|
Connection::open(get_db_path()).unwrap()
|
|
|
|
|
|
}
|
2025-07-04 11:38:49 -04:00
|
|
|
|
//url needs to be from the feed URL NOT the url in the channel itself!!
|
2025-07-03 20:57:10 -04:00
|
|
|
|
const FEEDS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'feeds' (
|
2025-01-10 07:32:12 -05:00
|
|
|
|
'feedID' INTEGER NOT NULL,
|
|
|
|
|
|
'title' TEXT NOT NULL,
|
|
|
|
|
|
'description' TEXT,
|
2025-09-07 04:03:59 -04:00
|
|
|
|
'icon' text,
|
2025-07-25 18:40:50 -04:00
|
|
|
|
'url' text not null unique,
|
2025-07-03 11:25:23 -04:00
|
|
|
|
'subscribed' INTEGER NOT NULL default 0,
|
2025-01-10 07:32:12 -05:00
|
|
|
|
'last_updated' TEXT ,
|
|
|
|
|
|
PRIMARY KEY('feedID')
|
|
|
|
|
|
);";
|
2025-07-02 20:11:43 -04:00
|
|
|
|
|
2025-07-03 20:57:10 -04:00
|
|
|
|
const FEEDS_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'subscribed_feeds_idx' ON 'feeds' (
|
2025-01-10 07:32:12 -05:00
|
|
|
|
'feedID' ASC
|
|
|
|
|
|
) WHERE 'subscribed' = 1;";
|
2025-05-28 14:53:05 -04:00
|
|
|
|
|
|
|
|
|
|
/* */
|
2025-07-03 20:57:10 -04:00
|
|
|
|
const ITEMS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'items' (
|
2025-01-10 07:32:12 -05:00
|
|
|
|
'itemID' INTEGER NOT NULL,
|
2025-07-04 11:38:49 -04:00
|
|
|
|
'feedID' INTEGER NOT NULL,
|
2025-01-10 07:32:12 -05:00
|
|
|
|
'title' TEXT NOT NULL,
|
2025-09-07 04:03:59 -04:00
|
|
|
|
'icon' text,
|
2025-05-29 12:07:36 -04:00
|
|
|
|
'url' text not null unique on conflict replace,
|
2025-01-10 07:32:12 -05:00
|
|
|
|
'description' TEXT,
|
|
|
|
|
|
'content' TEXT,
|
|
|
|
|
|
'read' INTEGER DEFAULT 0,
|
2025-07-23 12:38:05 -04:00
|
|
|
|
'date' text default '',
|
|
|
|
|
|
'media' text default '',
|
2025-07-04 11:38:49 -04:00
|
|
|
|
PRIMARY KEY('itemID'),
|
|
|
|
|
|
FOREIGN KEY('feedID') REFERENCES 'feeds'('feedID')
|
2025-01-10 07:32:12 -05:00
|
|
|
|
);";
|
2025-07-03 20:57:10 -04:00
|
|
|
|
const ITEMS_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'items_idx' on 'items'('itemID' ASC);";
|
2025-01-10 07:32:12 -05:00
|
|
|
|
|
2025-07-25 19:03:50 -04:00
|
|
|
|
const ITEMS_FEED_INDEX_CREATE: &str =
|
|
|
|
|
|
"CREATE INDEX IF NOT EXISTS 'item_feed_idx' on 'items'('feedID' ASC);";
|
2025-07-04 22:14:24 -04:00
|
|
|
|
|
2025-07-03 20:57:37 -04:00
|
|
|
|
const DB_RESET: &str = "
|
|
|
|
|
|
drop table items;
|
2025-07-04 11:38:49 -04:00
|
|
|
|
drop table feeds;
|
2025-07-03 20:57:37 -04:00
|
|
|
|
";
|
|
|
|
|
|
|
2025-07-25 19:03:50 -04:00
|
|
|
|
pub fn reset() {
|
2025-07-03 20:57:37 -04:00
|
|
|
|
println!("⚠️WARNING⚠️\nResetting Database");
|
|
|
|
|
|
let conn = get_db();
|
|
|
|
|
|
match conn.execute_batch(DB_RESET) {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
Ok(_) => {
|
|
|
|
|
|
println!("Database successfully wiped.")
|
|
|
|
|
|
}
|
|
|
|
|
|
Err(e) => {
|
|
|
|
|
|
panic!("Error erasing database.\nError: {0}", e)
|
|
|
|
|
|
}
|
2025-07-03 20:57:37 -04:00
|
|
|
|
}
|
|
|
|
|
|
conn.close().unwrap();
|
|
|
|
|
|
initialize();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-10 07:32:12 -05:00
|
|
|
|
pub fn initialize() {
|
2025-07-02 20:11:43 -04:00
|
|
|
|
let path = get_db_path();
|
2025-07-19 21:38:41 -04:00
|
|
|
|
println!("Database at {} initialized", path.to_string_lossy());
|
2025-07-03 20:57:37 -04:00
|
|
|
|
let conn = get_db();
|
2025-07-03 20:57:10 -04:00
|
|
|
|
conn.execute(FEEDS_TABLE_CREATE, []).unwrap();
|
|
|
|
|
|
conn.execute(FEEDS_INDEX_CREATE, []).unwrap();
|
|
|
|
|
|
conn.execute(ITEMS_TABLE_CREATE, []).unwrap();
|
|
|
|
|
|
conn.execute(ITEMS_INDEX_CREATE, []).unwrap();
|
2025-07-25 19:03:50 -04:00
|
|
|
|
conn.execute(ITEMS_FEED_INDEX_CREATE, []).unwrap();
|
2025-07-03 20:57:10 -04:00
|
|
|
|
conn.close().unwrap();
|
2025-01-10 07:32:12 -05:00
|
|
|
|
println!("Database Initialized.")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-07 04:03:59 -04:00
|
|
|
|
pub fn get_feed_id_by_url(url: &str) -> Option<usize> {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
let conn = get_db();
|
2025-09-07 04:03:59 -04:00
|
|
|
|
let mut stmt = conn
|
|
|
|
|
|
.prepare("select feedID from feeds where url=?1")
|
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
match stmt.query_row([url], |row| row.get(0)) {
|
|
|
|
|
|
Ok(i) => Some(i),
|
|
|
|
|
|
Err(_) => None,
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
pub fn add_feed(url: &str) -> Option<usize> {
|
2025-01-10 07:32:12 -05:00
|
|
|
|
let feed = load_rss(url).unwrap();
|
2025-07-02 20:11:43 -04:00
|
|
|
|
let time = Utc::now().to_rfc2822();
|
2025-09-07 04:03:59 -04:00
|
|
|
|
let image = if let Some(i) = feed.image() {
|
|
|
|
|
|
i.url().to_owned()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
"".to_owned()
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let mut id = get_feed_id_by_url(url);
|
|
|
|
|
|
let conn = get_db();
|
|
|
|
|
|
match id {
|
|
|
|
|
|
Some(i) => {
|
|
|
|
|
|
match conn.execute(
|
|
|
|
|
|
"update feeds set last_updated=?1,icon=?3,title=?4,description=?5 where feedID = ?2;",
|
|
|
|
|
|
[time,i.to_string(),image,feed.title.to_owned(),feed.description.to_owned()]
|
|
|
|
|
|
){
|
|
|
|
|
|
Ok(_) => {println!("Updated feed.");}
|
|
|
|
|
|
Err(e) => {println!("Error updating feed.\n{}",e);}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
None => {
|
|
|
|
|
|
match conn.execute(
|
|
|
|
|
|
"insert into feeds(title,url,icon,description,last_updated) values(?1,?2,?3,?4,?5)",
|
|
|
|
|
|
[
|
|
|
|
|
|
feed.title.to_owned(),
|
|
|
|
|
|
url.to_string(),
|
|
|
|
|
|
image,
|
|
|
|
|
|
feed.description.to_owned(),
|
|
|
|
|
|
time,
|
|
|
|
|
|
],
|
|
|
|
|
|
) {
|
|
|
|
|
|
Ok(_) => {
|
|
|
|
|
|
id = get_feed_id_by_url(url)
|
|
|
|
|
|
}
|
|
|
|
|
|
Err(e) => {
|
|
|
|
|
|
println!("Couldn't add feed:{}\nError:{}", url, e);
|
|
|
|
|
|
return None;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
;
|
2025-07-25 18:40:50 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-07-25 19:03:50 -04:00
|
|
|
|
|
2025-09-07 04:03:59 -04:00
|
|
|
|
match id {
|
|
|
|
|
|
Some(i) => {
|
|
|
|
|
|
store_items(feed, i);
|
|
|
|
|
|
Some(i)
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
None => {None}
|
|
|
|
|
|
}
|
2025-01-10 07:32:12 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-07 04:03:59 -04:00
|
|
|
|
fn remove_items(feed_id: usize) -> Result<usize> {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
let conn = get_db();
|
|
|
|
|
|
conn.execute("delete from items where feedID = ?1", [feed_id])
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn remove_feed(feed_id: usize) {
|
|
|
|
|
|
let _ = remove_items(feed_id);
|
|
|
|
|
|
let conn = get_db();
|
2025-09-07 04:03:59 -04:00
|
|
|
|
match conn.execute("delete from feeds where feedID = ?1", [feed_id]) {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
Ok(_) => {}
|
|
|
|
|
|
Err(e) => {
|
2025-09-07 04:03:59 -04:00
|
|
|
|
println!("Failed to delete feed by id: {}\nError:{}", feed_id, e);
|
2025-07-25 19:03:50 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-12-05 12:45:45 -05:00
|
|
|
|
fn modify_date(s: &str) -> Option<String> {
|
|
|
|
|
|
match DateTime::parse_from_rfc2822(s){
|
|
|
|
|
|
Ok(t) => {Some(t.to_rfc3339())}
|
|
|
|
|
|
Err(_) => None
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|
2025-07-25 19:03:50 -04:00
|
|
|
|
pub fn store_items(feed: rss::Channel, feed_id: usize) {
|
2025-05-29 12:07:36 -04:00
|
|
|
|
let conn = Connection::open(get_db_path()).unwrap();
|
2025-07-19 20:42:07 -04:00
|
|
|
|
feed.items.into_iter().for_each(|i: rss::Item| {
|
2025-07-23 12:38:05 -04:00
|
|
|
|
let t = i.clone();
|
2025-09-07 04:03:59 -04:00
|
|
|
|
let image = match i.itunes_ext() {
|
|
|
|
|
|
Some(ext) => match ext.image() {
|
|
|
|
|
|
Some(img) => img.to_owned(),
|
|
|
|
|
|
None => "".to_owned(),
|
|
|
|
|
|
},
|
|
|
|
|
|
None => "".to_owned(),
|
|
|
|
|
|
};
|
|
|
|
|
|
match conn.execute(
|
|
|
|
|
|
"insert into items(url,title,description,content,feedID,date,media,icon)
|
|
|
|
|
|
values(?1,?2,?3,?4,?5,?6,?7,?8)",
|
2025-07-02 20:11:43 -04:00
|
|
|
|
[
|
2025-07-19 20:42:07 -04:00
|
|
|
|
i.link,
|
2025-12-05 12:45:45 -05:00
|
|
|
|
match i.title {
|
|
|
|
|
|
Some(t) => Some(t.clone()),
|
|
|
|
|
|
None => None
|
|
|
|
|
|
},
|
|
|
|
|
|
match i.description {
|
|
|
|
|
|
Some(d) => Some(d.clone()),
|
|
|
|
|
|
None => None
|
|
|
|
|
|
},
|
|
|
|
|
|
match i.content {
|
|
|
|
|
|
Some(c) => {Some(c.clone())},
|
|
|
|
|
|
None => {None}
|
|
|
|
|
|
},
|
2025-07-23 12:38:05 -04:00
|
|
|
|
Some(feed_id.to_string()),
|
2025-12-05 12:45:45 -05:00
|
|
|
|
match i.pub_date {
|
|
|
|
|
|
Some(s) => {modify_date(&s)},
|
|
|
|
|
|
None => {None}
|
|
|
|
|
|
},
|
2025-07-23 12:38:05 -04:00
|
|
|
|
{
|
|
|
|
|
|
match t.enclosure() {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
Some(e) => Some(e.url().to_owned()),
|
|
|
|
|
|
None => Some("".to_owned()),
|
2025-07-23 12:38:05 -04:00
|
|
|
|
}
|
2025-07-25 19:03:50 -04:00
|
|
|
|
},
|
2025-09-07 04:03:59 -04:00
|
|
|
|
Some(image),
|
2025-07-02 20:11:43 -04:00
|
|
|
|
],
|
2025-09-07 04:03:59 -04:00
|
|
|
|
) {
|
|
|
|
|
|
Ok(_) => {}
|
|
|
|
|
|
Err(e) => {
|
|
|
|
|
|
println!("Failed to add item.\n{}", e)
|
|
|
|
|
|
}
|
|
|
|
|
|
};
|
2025-07-02 20:11:43 -04:00
|
|
|
|
});
|
2025-07-03 20:57:10 -04:00
|
|
|
|
conn.close().unwrap();
|
2025-12-04 15:20:12 -05:00
|
|
|
|
println!("Finished storing items")
|
2025-01-10 07:32:12 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-25 19:03:50 -04:00
|
|
|
|
#[derive(Debug, Clone)]
|
2025-07-03 20:57:37 -04:00
|
|
|
|
pub struct FeedItem {
|
|
|
|
|
|
pub item_id: usize,
|
|
|
|
|
|
pub title: String,
|
|
|
|
|
|
pub url: String,
|
|
|
|
|
|
pub icon: Option<String>,
|
|
|
|
|
|
pub description: Option<String>,
|
2025-07-23 12:38:05 -04:00
|
|
|
|
pub content: Option<String>,
|
|
|
|
|
|
pub date: Option<String>,
|
2025-07-25 19:03:50 -04:00
|
|
|
|
pub media: Option<String>, //date missing! needed for ordering!!!
|
2025-07-03 20:57:37 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-12-04 15:20:12 -05:00
|
|
|
|
|
|
|
|
|
|
fn row2feed_item(row: &Row) -> FeedItem{
|
|
|
|
|
|
FeedItem {
|
|
|
|
|
|
item_id: row.get(0).unwrap(),
|
|
|
|
|
|
title: row.get(1).unwrap_or("NO TITLE!".to_owned()),
|
|
|
|
|
|
url: row.get(2).unwrap(),
|
|
|
|
|
|
date: row.get(3).unwrap(),
|
|
|
|
|
|
media: row.get(4).unwrap(),
|
|
|
|
|
|
description: row.get(5).unwrap(),
|
|
|
|
|
|
content: row.get(6).unwrap(),
|
|
|
|
|
|
icon: None,
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn get_all_items() -> Vec<FeedItem> {
|
|
|
|
|
|
let conn = get_db();
|
2025-12-05 12:45:45 -05:00
|
|
|
|
let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items order by date desc limit 50 ").unwrap();
|
2025-12-04 15:20:12 -05:00
|
|
|
|
let items: Result<Vec<FeedItem>> = stmt.query_map([],|row| {
|
|
|
|
|
|
Ok(row2feed_item(row))
|
|
|
|
|
|
}).unwrap().collect();
|
|
|
|
|
|
match items {
|
|
|
|
|
|
Ok(i) => i,
|
|
|
|
|
|
Err(_) => {
|
|
|
|
|
|
panic!("No Items found!")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-25 19:03:50 -04:00
|
|
|
|
pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem> {
|
2025-07-03 20:57:37 -04:00
|
|
|
|
let conn = get_db();
|
2025-07-23 12:38:05 -04:00
|
|
|
|
let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items where feedID = ?1 order by date(date)").unwrap();
|
2025-07-25 19:03:50 -04:00
|
|
|
|
let items: Result<Vec<FeedItem>> = stmt
|
|
|
|
|
|
.query_map([feed_id], |row| {
|
2025-12-04 15:20:12 -05:00
|
|
|
|
Ok(row2feed_item(row))
|
2025-07-03 20:57:37 -04:00
|
|
|
|
})
|
2025-07-25 19:03:50 -04:00
|
|
|
|
.unwrap()
|
|
|
|
|
|
.collect();
|
2025-07-03 20:57:37 -04:00
|
|
|
|
match items {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
Ok(i) => i,
|
|
|
|
|
|
Err(_) => {
|
|
|
|
|
|
panic!("No items for this feed\nFeedID:{}", feed_id)
|
|
|
|
|
|
}
|
2025-07-03 20:57:37 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-19 20:42:07 -04:00
|
|
|
|
pub fn get_item(item_id: usize) -> FeedItem {
|
|
|
|
|
|
let conn = get_db();
|
2025-07-23 12:38:05 -04:00
|
|
|
|
let mut stmt = conn.prepare("select itemID,title,url,icon,date,media,description,content from items where itemID = ?1").unwrap();
|
2025-07-25 19:03:50 -04:00
|
|
|
|
let item: FeedItem = stmt
|
|
|
|
|
|
.query_one([item_id], |row| {
|
|
|
|
|
|
Ok(FeedItem {
|
|
|
|
|
|
item_id: row.get(0).unwrap(),
|
|
|
|
|
|
title: row.get(1).unwrap(),
|
|
|
|
|
|
url: row.get(2).unwrap(),
|
|
|
|
|
|
icon: row.get(3).unwrap(),
|
|
|
|
|
|
date: row.get(4).unwrap(),
|
|
|
|
|
|
media: row.get(5).unwrap(),
|
|
|
|
|
|
description: row.get(6).unwrap(),
|
|
|
|
|
|
content: row.get(7).unwrap(),
|
|
|
|
|
|
})
|
2025-07-19 20:42:07 -04:00
|
|
|
|
})
|
2025-07-25 19:03:50 -04:00
|
|
|
|
.unwrap();
|
2025-07-19 20:42:07 -04:00
|
|
|
|
item
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-02 20:11:43 -04:00
|
|
|
|
pub struct Feed {
|
2025-07-03 20:57:37 -04:00
|
|
|
|
pub feed_id: usize,
|
2025-07-03 11:25:23 -04:00
|
|
|
|
pub title: String,
|
|
|
|
|
|
pub description: Option<String>,
|
|
|
|
|
|
pub icon: Option<String>,
|
|
|
|
|
|
pub url: String,
|
|
|
|
|
|
pub subscribed: bool,
|
2025-07-03 20:57:10 -04:00
|
|
|
|
pub last_updated: Option<DateTime<Utc>>,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-25 19:03:50 -04:00
|
|
|
|
fn time_string_conversion(str: String) -> Option<DateTime<Utc>> {
|
2025-07-03 20:57:10 -04:00
|
|
|
|
match DateTime::parse_from_rfc2822(&str) {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
Ok(dt) => Some(dt.to_utc()),
|
|
|
|
|
|
Err(_) => None,
|
|
|
|
|
|
}
|
2025-07-02 20:11:43 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn get_feeds() -> Vec<Feed> {
|
2025-07-03 20:57:37 -04:00
|
|
|
|
let conn = get_db();
|
|
|
|
|
|
let mut stmt = conn.prepare("select feedID,title,description,icon,url,subscribed,last_updated from feeds order by last_updated desc").unwrap();
|
2025-07-02 20:11:43 -04:00
|
|
|
|
let rows: Result<Vec<Feed>> = stmt
|
|
|
|
|
|
.query_map([], |row| {
|
|
|
|
|
|
Ok(Feed {
|
2025-07-03 20:57:10 -04:00
|
|
|
|
feed_id: row.get(0).unwrap(),
|
2025-07-02 20:11:43 -04:00
|
|
|
|
title: row.get(1).unwrap(),
|
|
|
|
|
|
description: row.get(2).unwrap(),
|
|
|
|
|
|
icon: row.get(3).unwrap(),
|
|
|
|
|
|
url: row.get(4).unwrap(),
|
2025-07-25 19:03:50 -04:00
|
|
|
|
subscribed: row.get::<_, bool>(5).unwrap(),
|
|
|
|
|
|
last_updated: time_string_conversion(row.get(6).unwrap()),
|
2025-07-02 20:11:43 -04:00
|
|
|
|
})
|
2025-07-25 19:03:50 -04:00
|
|
|
|
})
|
|
|
|
|
|
.unwrap()
|
|
|
|
|
|
.collect();
|
2025-07-02 20:11:43 -04:00
|
|
|
|
match rows {
|
2025-07-25 19:03:50 -04:00
|
|
|
|
Ok(r) => r,
|
|
|
|
|
|
Err(_) => {
|
|
|
|
|
|
panic!("No idea what causes this")
|
2025-07-02 20:11:43 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-01-10 07:32:12 -05:00
|
|
|
|
pub fn update_feeds() {
|
2025-07-04 11:38:49 -04:00
|
|
|
|
todo!()
|
2025-07-02 20:11:43 -04:00
|
|
|
|
}
|