rss-tool/src/db.rs
2025-12-05 12:45:45 -05:00

351 lines
10 KiB
Rust
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

use super::files::*;
use super::net::*;
use chrono::DateTime;
use chrono::Utc;
use rusqlite::Row;
//Maybe use a different time?
use rusqlite::{Connection, Result};
use std::path::PathBuf;
/*
Cache is the in-memory database
Any changes/updates are written to file database
*/
const DB_LOCATION: &str = "rsscar.db";
fn get_db_path() -> PathBuf {
get_data_directory().join(DB_LOCATION)
}
fn get_db() -> Connection {
Connection::open(get_db_path()).unwrap()
}
//url needs to be from the feed URL NOT the url in the channel itself!!
const FEEDS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'feeds' (
'feedID' INTEGER NOT NULL,
'title' TEXT NOT NULL,
'description' TEXT,
'icon' text,
'url' text not null unique,
'subscribed' INTEGER NOT NULL default 0,
'last_updated' TEXT ,
PRIMARY KEY('feedID')
);";
const FEEDS_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'subscribed_feeds_idx' ON 'feeds' (
'feedID' ASC
) WHERE 'subscribed' = 1;";
/* */
const ITEMS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'items' (
'itemID' INTEGER NOT NULL,
'feedID' INTEGER NOT NULL,
'title' TEXT NOT NULL,
'icon' text,
'url' text not null unique on conflict replace,
'description' TEXT,
'content' TEXT,
'read' INTEGER DEFAULT 0,
'date' text default '',
'media' text default '',
PRIMARY KEY('itemID'),
FOREIGN KEY('feedID') REFERENCES 'feeds'('feedID')
);";
const ITEMS_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'items_idx' on 'items'('itemID' ASC);";
const ITEMS_FEED_INDEX_CREATE: &str =
"CREATE INDEX IF NOT EXISTS 'item_feed_idx' on 'items'('feedID' ASC);";
const DB_RESET: &str = "
drop table items;
drop table feeds;
";
pub fn reset() {
println!("WARNING⚠\nResetting Database");
let conn = get_db();
match conn.execute_batch(DB_RESET) {
Ok(_) => {
println!("Database successfully wiped.")
}
Err(e) => {
panic!("Error erasing database.\nError: {0}", e)
}
}
conn.close().unwrap();
initialize();
}
pub fn initialize() {
let path = get_db_path();
println!("Database at {} initialized", path.to_string_lossy());
let conn = get_db();
conn.execute(FEEDS_TABLE_CREATE, []).unwrap();
conn.execute(FEEDS_INDEX_CREATE, []).unwrap();
conn.execute(ITEMS_TABLE_CREATE, []).unwrap();
conn.execute(ITEMS_INDEX_CREATE, []).unwrap();
conn.execute(ITEMS_FEED_INDEX_CREATE, []).unwrap();
conn.close().unwrap();
println!("Database Initialized.")
}
pub fn get_feed_id_by_url(url: &str) -> Option<usize> {
let conn = get_db();
let mut stmt = conn
.prepare("select feedID from feeds where url=?1")
.unwrap();
match stmt.query_row([url], |row| row.get(0)) {
Ok(i) => Some(i),
Err(_) => None,
}
}
pub fn add_feed(url: &str) -> Option<usize> {
let feed = load_rss(url).unwrap();
let time = Utc::now().to_rfc2822();
let image = if let Some(i) = feed.image() {
i.url().to_owned()
} else {
"".to_owned()
};
let mut id = get_feed_id_by_url(url);
let conn = get_db();
match id {
Some(i) => {
match conn.execute(
"update feeds set last_updated=?1,icon=?3,title=?4,description=?5 where feedID = ?2;",
[time,i.to_string(),image,feed.title.to_owned(),feed.description.to_owned()]
){
Ok(_) => {println!("Updated feed.");}
Err(e) => {println!("Error updating feed.\n{}",e);}
}
}
None => {
match conn.execute(
"insert into feeds(title,url,icon,description,last_updated) values(?1,?2,?3,?4,?5)",
[
feed.title.to_owned(),
url.to_string(),
image,
feed.description.to_owned(),
time,
],
) {
Ok(_) => {
id = get_feed_id_by_url(url)
}
Err(e) => {
println!("Couldn't add feed:{}\nError:{}", url, e);
return None;
}
}
;
}
}
match id {
Some(i) => {
store_items(feed, i);
Some(i)
}
None => {None}
}
}
fn remove_items(feed_id: usize) -> Result<usize> {
let conn = get_db();
conn.execute("delete from items where feedID = ?1", [feed_id])
}
pub fn remove_feed(feed_id: usize) {
let _ = remove_items(feed_id);
let conn = get_db();
match conn.execute("delete from feeds where feedID = ?1", [feed_id]) {
Ok(_) => {}
Err(e) => {
println!("Failed to delete feed by id: {}\nError:{}", feed_id, e);
}
}
}
fn modify_date(s: &str) -> Option<String> {
match DateTime::parse_from_rfc2822(s){
Ok(t) => {Some(t.to_rfc3339())}
Err(_) => None
}
}
pub fn store_items(feed: rss::Channel, feed_id: usize) {
let conn = Connection::open(get_db_path()).unwrap();
feed.items.into_iter().for_each(|i: rss::Item| {
let t = i.clone();
let image = match i.itunes_ext() {
Some(ext) => match ext.image() {
Some(img) => img.to_owned(),
None => "".to_owned(),
},
None => "".to_owned(),
};
match conn.execute(
"insert into items(url,title,description,content,feedID,date,media,icon)
values(?1,?2,?3,?4,?5,?6,?7,?8)",
[
i.link,
match i.title {
Some(t) => Some(t.clone()),
None => None
},
match i.description {
Some(d) => Some(d.clone()),
None => None
},
match i.content {
Some(c) => {Some(c.clone())},
None => {None}
},
Some(feed_id.to_string()),
match i.pub_date {
Some(s) => {modify_date(&s)},
None => {None}
},
{
match t.enclosure() {
Some(e) => Some(e.url().to_owned()),
None => Some("".to_owned()),
}
},
Some(image),
],
) {
Ok(_) => {}
Err(e) => {
println!("Failed to add item.\n{}", e)
}
};
});
conn.close().unwrap();
println!("Finished storing items")
}
#[derive(Debug, Clone)]
pub struct FeedItem {
pub item_id: usize,
pub title: String,
pub url: String,
pub icon: Option<String>,
pub description: Option<String>,
pub content: Option<String>,
pub date: Option<String>,
pub media: Option<String>, //date missing! needed for ordering!!!
}
fn row2feed_item(row: &Row) -> FeedItem{
FeedItem {
item_id: row.get(0).unwrap(),
title: row.get(1).unwrap_or("NO TITLE!".to_owned()),
url: row.get(2).unwrap(),
date: row.get(3).unwrap(),
media: row.get(4).unwrap(),
description: row.get(5).unwrap(),
content: row.get(6).unwrap(),
icon: None,
}
}
pub fn get_all_items() -> Vec<FeedItem> {
let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items order by date desc limit 50 ").unwrap();
let items: Result<Vec<FeedItem>> = stmt.query_map([],|row| {
Ok(row2feed_item(row))
}).unwrap().collect();
match items {
Ok(i) => i,
Err(_) => {
panic!("No Items found!")
}
}
}
pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem> {
let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items where feedID = ?1 order by date(date)").unwrap();
let items: Result<Vec<FeedItem>> = stmt
.query_map([feed_id], |row| {
Ok(row2feed_item(row))
})
.unwrap()
.collect();
match items {
Ok(i) => i,
Err(_) => {
panic!("No items for this feed\nFeedID:{}", feed_id)
}
}
}
pub fn get_item(item_id: usize) -> FeedItem {
let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,icon,date,media,description,content from items where itemID = ?1").unwrap();
let item: FeedItem = stmt
.query_one([item_id], |row| {
Ok(FeedItem {
item_id: row.get(0).unwrap(),
title: row.get(1).unwrap(),
url: row.get(2).unwrap(),
icon: row.get(3).unwrap(),
date: row.get(4).unwrap(),
media: row.get(5).unwrap(),
description: row.get(6).unwrap(),
content: row.get(7).unwrap(),
})
})
.unwrap();
item
}
pub struct Feed {
pub feed_id: usize,
pub title: String,
pub description: Option<String>,
pub icon: Option<String>,
pub url: String,
pub subscribed: bool,
pub last_updated: Option<DateTime<Utc>>,
}
fn time_string_conversion(str: String) -> Option<DateTime<Utc>> {
match DateTime::parse_from_rfc2822(&str) {
Ok(dt) => Some(dt.to_utc()),
Err(_) => None,
}
}
pub fn get_feeds() -> Vec<Feed> {
let conn = get_db();
let mut stmt = conn.prepare("select feedID,title,description,icon,url,subscribed,last_updated from feeds order by last_updated desc").unwrap();
let rows: Result<Vec<Feed>> = stmt
.query_map([], |row| {
Ok(Feed {
feed_id: row.get(0).unwrap(),
title: row.get(1).unwrap(),
description: row.get(2).unwrap(),
icon: row.get(3).unwrap(),
url: row.get(4).unwrap(),
subscribed: row.get::<_, bool>(5).unwrap(),
last_updated: time_string_conversion(row.get(6).unwrap()),
})
})
.unwrap()
.collect();
match rows {
Ok(r) => r,
Err(_) => {
panic!("No idea what causes this")
}
}
}
pub fn update_feeds() {
todo!()
}