rss-tool/src/db.rs

182 lines
4.8 KiB
Rust
Raw Normal View History

use std::path::PathBuf;
use std::str::FromStr;
use crate::files;
use rusqlite::{params, Connection, Result};
use super::files::*;
use super::net::*;
use chrono::Utc; //Maybe use a different time?
/*
Cache is the in-memory database
Any changes/updates are written to file database
*/
struct Item {
title: String,
content: Option<String>,
}
const DB_LOCATION: &str = "rsscar.db";
fn get_db_path() -> PathBuf {
get_data_directory().join(DB_LOCATION)
}
const feeds_table_create: &str = "CREATE TABLE IF NOT EXISTS 'feeds' (
'feedID' INTEGER NOT NULL,
'title' TEXT NOT NULL,
'description' TEXT,
'icon' BLOB,
'url' text not null unique on conflict replace,
'subscribed' INTEGER,
'last_updated' TEXT ,
PRIMARY KEY('feedID')
);";
const feeds_index_create: &str = "CREATE INDEX IF NOT EXISTS 'subscribed_feeds_idx' ON 'feeds' (
'feedID' ASC
) WHERE 'subscribed' = 1;";
2025-05-28 14:53:05 -04:00
/* */
const items_table_create: &str = "CREATE TABLE IF NOT EXISTS 'items' (
'itemID' INTEGER NOT NULL,
'title' TEXT NOT NULL,
'icon' BLOB,
'url' text not null unique on conflict replace,
'description' TEXT,
'content' TEXT,
'read' INTEGER DEFAULT 0,
PRIMARY KEY('itemID')
);";
const items_index_create: &str = "CREATE INDEX IF NOT EXISTS 'items_idx' on 'items'('itemID' ASC);";
pub fn initialize() {
let path = get_db_path();
println!("Database at {} initialized", path.as_os_str().display());
let conn = Connection::open(path).unwrap();
conn.execute(feeds_table_create, []).unwrap();
conn.execute(feeds_index_create, []).unwrap();
conn.execute(items_table_create, []).unwrap();
conn.execute(items_index_create, []).unwrap();
conn.close();
println!("Database Initialized.")
}
pub fn add_feed(url: &str) {
let conn = Connection::open(get_db_path()).unwrap();
let feed = load_rss(url).unwrap();
let new_feed = feed.clone();
let time = Utc::now().to_rfc2822();
conn.execute(
"insert into feeds(title,url,description,last_updated) values(?1,?2,?3,?4)",
[feed.title, url.to_owned(), feed.description, time],
)
.unwrap();
conn.close();
store_items(new_feed);
}
pub fn store_items(feed: rss::Channel) {
let conn = Connection::open(get_db_path()).unwrap();
feed.items.iter().for_each(|i: &rss::Item| {
conn.execute(
"insert into items(url,title,description,content) values(?1,?2,?3,?4)",
[
i.link.clone(),
i.title.clone(),
i.description.clone(),
i.content.clone(),
],
)
.unwrap();
});
conn.close();
}
pub fn return_item() -> String {
let conn = Connection::open(get_db_path()).unwrap();
let item = conn
.query_row(
"select title,content from items where rowid=?1",
[],
|row| {
Ok(Item {
title: row.get(0).unwrap(),
content: row.get(1).unwrap(),
})
},
)
.unwrap();
match item.content {
Some(content) => content,
None => panic!(),
}
}
pub struct Feed {
feedID: u8,
title: String,
description: Option<String>,
icon: Option<String>,
url: String,
subscribed: Option<String>, //needs to be bool
last_updated: Option<String>,
}
pub fn get_feeds() -> Vec<Feed> {
let conn = Connection::open(get_db_path()).unwrap();
let mut stmt = conn.prepare("select feedID,title,description,icon,url,subscribed,last_updated from feeds").unwrap();
let rows: Result<Vec<Feed>> = stmt
.query_map([], |row| {
Ok(Feed {
feedID: row.get(0).unwrap(),
title: row.get(1).unwrap(),
description: row.get(2).unwrap(),
icon: row.get(3).unwrap(),
url: row.get(4).unwrap(),
subscribed: row.get(5).unwrap(),
last_updated: row.get(6).unwrap(),
})
}).unwrap().collect();
match rows {
Ok(r) => {
println!("Feed found\n{}",r[0].title);
r
}
Err(e) => {panic!()}
}
}
struct ReturnedFeedURLs {
url: String,
}
pub fn update_feeds() {
//get feeds
let conn = Connection::open(get_db_path()).unwrap();
let mut stmt = conn.prepare("select url from feeds").unwrap();
let rows = stmt
.query_map([], |row| {
Ok(ReturnedFeedURLs {
url: row.get(0).unwrap(),
})
})
.unwrap();
let mut urls: Vec<String> = Vec::new();
for feed in rows {
let url = feed.unwrap().url.clone();
urls.push(url);
}
stmt.finalize();
conn.close();
for u in urls {
store_items(load_rss(&u).unwrap());
}
//for each feed
// insert items into database
//close out
}