2025-05-29 12:07:36 -04:00
|
|
|
use std::path::PathBuf;
|
|
|
|
|
2025-01-10 07:32:12 -05:00
|
|
|
use rusqlite::{params, Connection, Result};
|
2025-05-29 12:07:36 -04:00
|
|
|
use crate::files;
|
|
|
|
|
2025-01-10 07:32:12 -05:00
|
|
|
use super::net::{*};
|
2025-05-29 12:07:36 -04:00
|
|
|
use super::files::{*};
|
2025-01-10 07:32:12 -05:00
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
Cache is the in-memory database
|
|
|
|
Any changes/updates are written to file database
|
|
|
|
*/
|
|
|
|
|
2025-05-29 12:07:36 -04:00
|
|
|
|
2025-01-10 07:32:12 -05:00
|
|
|
struct Item{
|
|
|
|
title: String,
|
|
|
|
content: Option<String>
|
|
|
|
}
|
|
|
|
|
|
|
|
const DB_LOCATION: &str = "rsscar.db";
|
|
|
|
|
2025-05-29 12:07:36 -04:00
|
|
|
fn get_db_path() -> PathBuf{
|
|
|
|
get_data_directory().join(DB_LOCATION)
|
|
|
|
}
|
2025-01-10 07:32:12 -05:00
|
|
|
|
|
|
|
const feeds_table_create: &str = "CREATE TABLE IF NOT EXISTS 'feeds' (
|
|
|
|
'feedID' INTEGER NOT NULL,
|
|
|
|
'title' TEXT NOT NULL,
|
|
|
|
'description' TEXT,
|
|
|
|
'icon' BLOB,
|
2025-05-29 12:07:36 -04:00
|
|
|
'url' text not null unique on conflict replace,
|
2025-01-10 07:32:12 -05:00
|
|
|
'subscribed' INTEGER,
|
|
|
|
'last_updated' TEXT ,
|
|
|
|
PRIMARY KEY('feedID')
|
|
|
|
);";
|
2025-05-14 23:01:06 -04:00
|
|
|
|
2025-01-10 07:32:12 -05:00
|
|
|
const feeds_index_create: &str = "CREATE INDEX IF NOT EXISTS 'subscribed_feeds_idx' ON 'feeds' (
|
|
|
|
'feedID' ASC
|
|
|
|
) WHERE 'subscribed' = 1;";
|
2025-05-28 14:53:05 -04:00
|
|
|
|
|
|
|
/* */
|
2025-01-10 07:32:12 -05:00
|
|
|
const items_table_create: &str = "CREATE TABLE IF NOT EXISTS 'items' (
|
|
|
|
'itemID' INTEGER NOT NULL,
|
|
|
|
'title' TEXT NOT NULL,
|
|
|
|
'icon' BLOB,
|
2025-05-29 12:07:36 -04:00
|
|
|
'url' text not null unique on conflict replace,
|
2025-01-10 07:32:12 -05:00
|
|
|
'description' TEXT,
|
|
|
|
'content' TEXT,
|
|
|
|
'read' INTEGER DEFAULT 0,
|
|
|
|
PRIMARY KEY('itemID')
|
|
|
|
);";
|
|
|
|
const items_index_create: &str = "CREATE INDEX IF NOT EXISTS 'items_idx' on 'items'('itemID' ASC);";
|
|
|
|
|
|
|
|
pub fn initialize() {
|
2025-05-29 12:07:36 -04:00
|
|
|
|
|
|
|
let conn = Connection::open(get_db_path()).unwrap();
|
2025-01-10 07:32:12 -05:00
|
|
|
conn.execute(feeds_table_create,[]).unwrap();
|
|
|
|
conn.execute(feeds_index_create,[]).unwrap();
|
|
|
|
conn.execute(items_table_create,[]).unwrap();
|
|
|
|
conn.execute(items_index_create,[]).unwrap();
|
|
|
|
conn.close();
|
|
|
|
println!("Database Initialized.")
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2025-05-29 12:07:36 -04:00
|
|
|
pub fn add_feed(url: &str) {
|
|
|
|
let conn = Connection::open(get_db_path()).unwrap();
|
2025-01-10 07:32:12 -05:00
|
|
|
let feed = load_rss(url).unwrap();
|
|
|
|
let new_feed = feed.clone();
|
|
|
|
conn.execute("insert into feeds(title,url,description) values(?1,?2,?3)",
|
|
|
|
[feed.title,url.to_owned(),feed.description]).unwrap();
|
|
|
|
conn.close();
|
|
|
|
store_items(new_feed);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn store_items(feed: rss::Channel) {
|
2025-05-29 12:07:36 -04:00
|
|
|
let conn = Connection::open(get_db_path()).unwrap();
|
2025-01-10 07:32:12 -05:00
|
|
|
feed.items.iter().for_each(|i: &rss::Item|{
|
|
|
|
conn.execute("insert into items(url,title,description,content) values(?1,?2,?3,?4)",[
|
|
|
|
i.link.clone(),
|
|
|
|
i.title.clone(),
|
|
|
|
i.description.clone(),
|
|
|
|
i.content.clone()]
|
|
|
|
).unwrap();
|
|
|
|
});
|
|
|
|
conn.close();
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn return_item() -> String{
|
2025-05-29 12:07:36 -04:00
|
|
|
let conn = Connection::open(get_db_path()).unwrap();
|
2025-05-14 23:01:06 -04:00
|
|
|
let item = conn.query_row("select title,content from items where rowid=?1",[488],|row|{
|
2025-01-10 07:32:12 -05:00
|
|
|
Ok(
|
|
|
|
Item { title: row.get(0).unwrap(), content: row.get(1).unwrap() }
|
|
|
|
)
|
|
|
|
}).unwrap();
|
|
|
|
match item.content {
|
|
|
|
Some(content) => content,
|
|
|
|
None => panic!()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct ReturnedFeedURLs{
|
|
|
|
url: String
|
|
|
|
}
|
|
|
|
pub fn update_feeds() {
|
|
|
|
//get feeds
|
2025-05-29 12:07:36 -04:00
|
|
|
let conn = Connection::open(get_db_path()).unwrap();
|
2025-01-10 07:32:12 -05:00
|
|
|
let mut stmt = conn.prepare("select url from feeds").unwrap();
|
|
|
|
let rows = stmt.query_map([],|row| {
|
|
|
|
Ok(ReturnedFeedURLs{
|
|
|
|
url:row.get(0).unwrap()
|
|
|
|
})
|
|
|
|
}).unwrap();
|
|
|
|
let mut urls: Vec<String> = Vec::new();
|
|
|
|
for feed in rows{
|
|
|
|
let url = feed.unwrap().url.clone();
|
|
|
|
urls.push(url);
|
|
|
|
}
|
|
|
|
stmt.finalize();
|
|
|
|
conn.close();
|
|
|
|
|
|
|
|
for u in urls {
|
|
|
|
store_items(load_rss(&u).unwrap());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
//for each feed
|
|
|
|
// insert items into database
|
|
|
|
|
|
|
|
//close out
|
|
|
|
}
|