Absolute basic feed management: Removing feeds!

This commit is contained in:
Gabriel 2025-07-25 19:03:50 -04:00
parent 8183feb229
commit afd34b416a
3 changed files with 107 additions and 71 deletions

124
src/db.rs
View file

@ -1,22 +1,22 @@
use std::path::PathBuf;
use rusqlite::{ Connection, Result};
use super::files::*; use super::files::*;
use super::net::*; use super::net::*;
use chrono::Utc; //Maybe use a different time?
use chrono::DateTime; use chrono::DateTime;
use chrono::Utc;
//Maybe use a different time?
use rusqlite::{Connection, Result};
use std::path::PathBuf;
/* /*
Cache is the in-memory database Cache is the in-memory database
Any changes/updates are written to file database Any changes/updates are written to file database
*/ */
const DB_LOCATION: &str = "rsscar.db"; const DB_LOCATION: &str = "rsscar.db";
fn get_db_path() -> PathBuf { fn get_db_path() -> PathBuf {
get_data_directory().join(DB_LOCATION) get_data_directory().join(DB_LOCATION)
} }
fn get_db() -> Connection{ fn get_db() -> Connection {
Connection::open(get_db_path()).unwrap() Connection::open(get_db_path()).unwrap()
} }
//url needs to be from the feed URL NOT the url in the channel itself!! //url needs to be from the feed URL NOT the url in the channel itself!!
@ -52,26 +52,29 @@ const ITEMS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'items' (
);"; );";
const ITEMS_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'items_idx' on 'items'('itemID' ASC);"; const ITEMS_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'items_idx' on 'items'('itemID' ASC);";
const ITEMS_FEED_INDEX_CREATE: &str = "CREATE INDEX IF NOT EXISTS 'item_feed_idx' on 'items'('feedID' ASC);"; const ITEMS_FEED_INDEX_CREATE: &str =
"CREATE INDEX IF NOT EXISTS 'item_feed_idx' on 'items'('feedID' ASC);";
const DB_RESET: &str = " const DB_RESET: &str = "
drop table items; drop table items;
drop table feeds; drop table feeds;
"; ";
pub fn reset(){ pub fn reset() {
println!("WARNING⚠\nResetting Database"); println!("WARNING⚠\nResetting Database");
let conn = get_db(); let conn = get_db();
match conn.execute_batch(DB_RESET) { match conn.execute_batch(DB_RESET) {
Ok(_) => {println!("Database successfully wiped.")} Ok(_) => {
Err(e) => {panic!("Error erasing database.\nError: {0}",e)} println!("Database successfully wiped.")
}
Err(e) => {
panic!("Error erasing database.\nError: {0}", e)
}
} }
conn.close().unwrap(); conn.close().unwrap();
initialize(); initialize();
} }
pub fn initialize() { pub fn initialize() {
let path = get_db_path(); let path = get_db_path();
println!("Database at {} initialized", path.to_string_lossy()); println!("Database at {} initialized", path.to_string_lossy());
@ -80,14 +83,13 @@ pub fn initialize() {
conn.execute(FEEDS_INDEX_CREATE, []).unwrap(); conn.execute(FEEDS_INDEX_CREATE, []).unwrap();
conn.execute(ITEMS_TABLE_CREATE, []).unwrap(); conn.execute(ITEMS_TABLE_CREATE, []).unwrap();
conn.execute(ITEMS_INDEX_CREATE, []).unwrap(); conn.execute(ITEMS_INDEX_CREATE, []).unwrap();
conn.execute(ITEMS_FEED_INDEX_CREATE,[]).unwrap(); conn.execute(ITEMS_FEED_INDEX_CREATE, []).unwrap();
conn.close().unwrap(); conn.close().unwrap();
println!("Database Initialized.") println!("Database Initialized.")
} }
pub fn add_feed(url: &str) -> Option<usize> { pub fn add_feed(url: &str) -> Option<usize> {
let conn = Connection::open(get_db_path()).unwrap(); let conn = get_db();
let feed = load_rss(url).unwrap(); let feed = load_rss(url).unwrap();
let new_feed = feed.clone(); let new_feed = feed.clone();
let time = Utc::now().to_rfc2822(); let time = Utc::now().to_rfc2822();
@ -95,25 +97,40 @@ pub fn add_feed(url: &str) -> Option<usize> {
"insert into feeds(title,url,description,last_updated) values(?1,?2,?3,?4)", "insert into feeds(title,url,description,last_updated) values(?1,?2,?3,?4)",
[feed.title, url.to_string(), feed.description, time], [feed.title, url.to_string(), feed.description, time],
) { ) {
Ok(_) => { Ok(_) => {}
}
Err(e) => { Err(e) => {
println!("Couldn't add feed:{}\nError:{}",url,e); println!("Couldn't add feed:{}\nError:{}", url, e);
return None; return None;
} }
} }
let mut stmt = conn.prepare("select feedID from feeds where url=?1").unwrap(); let mut stmt = conn
let id: usize = stmt.query_row([url],|row| { .prepare("select feedID from feeds where url=?1")
row.get(0) .unwrap();
}).unwrap(); let id: usize = stmt.query_row([url], |row| row.get(0)).unwrap();
//need to get the feed_id from the DB and then make sure items are mapped to feed //need to get the feed_id from the DB and then make sure items are mapped to feed
store_items(new_feed,id); store_items(new_feed, id);
Some(id) Some(id)
} }
pub fn store_items(feed: rss::Channel,feed_id: usize) { fn remove_items(feed_id: usize) -> Result<usize>{
let conn = get_db();
conn.execute("delete from items where feedID = ?1", [feed_id])
}
pub fn remove_feed(feed_id: usize) {
let _ = remove_items(feed_id);
let conn = get_db();
match conn.execute("delete from feeds where feedID = ?1",[feed_id]) {
Ok(_) => {}
Err(e) => {
println!("Failed to delete feed by id: {}\nError:{}",feed_id,e);
}
}
}
pub fn store_items(feed: rss::Channel, feed_id: usize) {
let conn = Connection::open(get_db_path()).unwrap(); let conn = Connection::open(get_db_path()).unwrap();
feed.items.into_iter().for_each(|i: rss::Item| { feed.items.into_iter().for_each(|i: rss::Item| {
let t = i.clone(); let t = i.clone();
@ -129,10 +146,10 @@ pub fn store_items(feed: rss::Channel,feed_id: usize) {
i.pub_date.clone(), i.pub_date.clone(),
{ {
match t.enclosure() { match t.enclosure() {
Some(e) => {Some(e.url().to_owned())} Some(e) => Some(e.url().to_owned()),
None => {Some("".to_owned())} None => Some("".to_owned()),
}
} }
},
], ],
) )
.ok(); .ok();
@ -140,8 +157,7 @@ pub fn store_items(feed: rss::Channel,feed_id: usize) {
conn.close().unwrap(); conn.close().unwrap();
} }
#[derive(Debug, Clone)]
#[derive(Debug,Clone)]
pub struct FeedItem { pub struct FeedItem {
pub item_id: usize, pub item_id: usize,
pub title: String, pub title: String,
@ -150,16 +166,15 @@ pub struct FeedItem {
pub description: Option<String>, pub description: Option<String>,
pub content: Option<String>, pub content: Option<String>,
pub date: Option<String>, pub date: Option<String>,
pub media: Option<String> pub media: Option<String>, //date missing! needed for ordering!!!
//date missing! needed for ordering!!!
} }
pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem>{ pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem> {
let conn = get_db(); let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items where feedID = ?1 order by date(date)").unwrap(); let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items where feedID = ?1 order by date(date)").unwrap();
let items:Result<Vec<FeedItem>> = stmt.query_map([feed_id], |row| { let items: Result<Vec<FeedItem>> = stmt
Ok(FeedItem{ .query_map([feed_id], |row| {
Ok(FeedItem {
item_id: row.get(0).unwrap(), item_id: row.get(0).unwrap(),
title: row.get(1).unwrap(), title: row.get(1).unwrap(),
url: row.get(2).unwrap(), url: row.get(2).unwrap(),
@ -169,19 +184,23 @@ pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem>{
content: row.get(6).unwrap(), content: row.get(6).unwrap(),
icon: None, icon: None,
}) })
}).unwrap().collect(); })
.unwrap()
.collect();
match items { match items {
Ok(i) => {i}, Ok(i) => i,
Err(_) => {panic!("No items for this feed\nFeedID:{}",feed_id)} Err(_) => {
panic!("No items for this feed\nFeedID:{}", feed_id)
}
} }
} }
pub fn get_item(item_id: usize) -> FeedItem { pub fn get_item(item_id: usize) -> FeedItem {
let conn = get_db(); let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,icon,date,media,description,content from items where itemID = ?1").unwrap(); let mut stmt = conn.prepare("select itemID,title,url,icon,date,media,description,content from items where itemID = ?1").unwrap();
let item:FeedItem = stmt.query_one([item_id],|row| { let item: FeedItem = stmt
Ok( .query_one([item_id], |row| {
FeedItem{ Ok(FeedItem {
item_id: row.get(0).unwrap(), item_id: row.get(0).unwrap(),
title: row.get(1).unwrap(), title: row.get(1).unwrap(),
url: row.get(2).unwrap(), url: row.get(2).unwrap(),
@ -191,7 +210,8 @@ pub fn get_item(item_id: usize) -> FeedItem {
description: row.get(6).unwrap(), description: row.get(6).unwrap(),
content: row.get(7).unwrap(), content: row.get(7).unwrap(),
}) })
}).unwrap(); })
.unwrap();
item item
} }
@ -205,10 +225,10 @@ pub struct Feed {
pub last_updated: Option<DateTime<Utc>>, pub last_updated: Option<DateTime<Utc>>,
} }
fn time_string_conversion(str: String) -> Option<DateTime<Utc>>{ fn time_string_conversion(str: String) -> Option<DateTime<Utc>> {
match DateTime::parse_from_rfc2822(&str) { match DateTime::parse_from_rfc2822(&str) {
Ok(dt) => {Some(dt.to_utc())}, Ok(dt) => Some(dt.to_utc()),
Err(_) => {None} Err(_) => None,
} }
} }
@ -223,15 +243,17 @@ pub fn get_feeds() -> Vec<Feed> {
description: row.get(2).unwrap(), description: row.get(2).unwrap(),
icon: row.get(3).unwrap(), icon: row.get(3).unwrap(),
url: row.get(4).unwrap(), url: row.get(4).unwrap(),
subscribed: row.get::<_,bool>(5).unwrap(), subscribed: row.get::<_, bool>(5).unwrap(),
last_updated:time_string_conversion(row.get(6).unwrap()), last_updated: time_string_conversion(row.get(6).unwrap()),
}) })
}).unwrap().collect(); })
.unwrap()
.collect();
match rows { match rows {
Ok(r) => { Ok(r) => r,
r Err(_) => {
panic!("No idea what causes this")
} }
Err(_) => {panic!("No idea what causes this")}
} }
} }
pub fn update_feeds() { pub fn update_feeds() {

View file

@ -56,6 +56,7 @@ pub enum Message {
ChangePage(Page), ChangePage(Page),
LoadFeed(usize), LoadFeed(usize),
AddFeed(String), AddFeed(String),
RemoveFeed(usize),
LoadItem(usize), LoadItem(usize),
FieldUpdated(AppField, String), FieldUpdated(AppField, String),
LinkClicked(Url), LinkClicked(Url),
@ -73,6 +74,11 @@ async fn add_feed_background(url: String) -> String {
db::add_feed(&url); db::add_feed(&url);
"Done adding feed".to_string() "Done adding feed".to_string()
} }
async fn remove_feed_background(id:usize) -> String {
println!("Removing feed");
db::remove_feed(id);
"Done removing feed".to_owned()
}
fn update(state: &mut State, mes: Message) -> Task<Message> { fn update(state: &mut State, mes: Message) -> Task<Message> {
match mes { match mes {
@ -85,10 +91,14 @@ fn update(state: &mut State, mes: Message) -> Task<Message> {
state.page = Page::FeedView; state.page = Page::FeedView;
Task::none() Task::none()
} }
Message::AddFeed(f) => { Message::AddFeed(f) => {
state.feed_input = "".to_string(); state.feed_input = "".to_string();
Task::perform(add_feed_background(f.to_string()), Message::Done) Task::perform(add_feed_background(f.to_string()), Message::Done)
} }
Message::RemoveFeed(id) => {
Task::perform(remove_feed_background(id), Message::Done)
}
Message::LinkClicked(l) => { Message::LinkClicked(l) => {
println!("Link clicked: {}", l); println!("Link clicked: {}", l);

View file

@ -1,6 +1,7 @@
use super::db; use super::db;
use super::ui; use super::ui;
use iced::widget::markdown; use iced::widget::markdown;
use iced::widget::row;
use iced::widget::scrollable; use iced::widget::scrollable;
use iced::widget::Column; use iced::widget::Column;
use iced::Theme; use iced::Theme;
@ -18,7 +19,10 @@ pub fn list_feeds() -> iced::widget::Column<'static, Message> {
feeds feeds
.iter() .iter()
.map(|f| { .map(|f| {
button(text(f.title.clone())).on_press(Message::LoadFeed(f.feed_id)) row!(
button(text(f.title.clone())).on_press(Message::LoadFeed(f.feed_id)),
button(text("Remove feed")).on_press(Message::RemoveFeed(f.feed_id))
)
}) })
.map(Element::from), .map(Element::from),
) )