Update cargo, and begin the fundamentals of multi-network media!

This commit is contained in:
Gabriel 2026-02-16 11:15:27 -05:00
parent 8dbda35b18
commit ba0dde7ec8
7 changed files with 113 additions and 36 deletions

View file

@ -5,7 +5,7 @@ edition = "2021"
[dependencies] [dependencies]
iced = { git = "https://github.com/iced-rs/iced", branch = "master", features = ["image", "markdown", "svg"] } iced = { git = "https://github.com/iced-rs/iced", branch = "master", features = ["image", "markdown", "svg"] }
reqwest = { version = "0.12", features= ["blocking","socks"]} reqwest = { version = "0.12", features= ["blocking","socks","gzip"]}
rss = "2.0" rss = "2.0"
rusqlite = {version=">=0.34",features=['bundled']} rusqlite = {version=">=0.34",features=['bundled']}
scraper = "0.23.1" scraper = "0.23.1"
@ -14,6 +14,8 @@ chrono = "0.4.41"
rss_content = { git = "https://code.gabe.rocks/gabriel/rss_content", version = "0.1.1" } rss_content = { git = "https://code.gabe.rocks/gabriel/rss_content", version = "0.1.1" }
url = "2.5.4" url = "2.5.4"
opml = "1.1.6" opml = "1.1.6"
sha1 = "0.10.6"
bytes = "1.11.1"
#rfd = "0.15.4" (for importing files) #rfd = "0.15.4" (for importing files)
[profile.dev] [profile.dev]
debug=true debug=true

View file

@ -86,11 +86,13 @@ pub fn initialize() {
conn.execute(ITEMS_TABLE_CREATE, []).unwrap(); conn.execute(ITEMS_TABLE_CREATE, []).unwrap();
conn.execute(ITEMS_INDEX_CREATE, []).unwrap(); conn.execute(ITEMS_INDEX_CREATE, []).unwrap();
conn.execute(ITEMS_FEED_INDEX_CREATE, []).unwrap(); conn.execute(ITEMS_FEED_INDEX_CREATE, []).unwrap();
conn.pragma_update(None, "journal_mode", &"WAL").unwrap();
conn.pragma_update(None, "busy_timeout", &5000).unwrap();
conn.close().unwrap(); conn.close().unwrap();
println!("Database Initialized.") println!("Database Initialized.")
} }
pub fn get_feed_id_by_url(url: &str) -> Option<usize> { pub fn get_feed_id_by_url(url: &str) -> Option<i64> {
let conn = get_db(); let conn = get_db();
let mut stmt = conn let mut stmt = conn
.prepare("select feedID from feeds where url=?1") .prepare("select feedID from feeds where url=?1")
@ -100,7 +102,7 @@ pub fn get_feed_id_by_url(url: &str) -> Option<usize> {
Err(_) => None, Err(_) => None,
} }
} }
pub fn add_feed(url: &str) -> Option<usize> { pub fn add_feed(url: &str) -> Option<i64> {
let mut feed: Channel; let mut feed: Channel;
match load_rss(url) { match load_rss(url) {
Some(f) => { Some(f) => {
@ -164,12 +166,14 @@ pub fn add_feed(url: &str) -> Option<usize> {
} }
} }
fn remove_items(feed_id: usize) -> Result<usize> { fn remove_items(feed_id: i64) -> Result<i64> {
let conn = get_db(); let conn = get_db();
conn.execute("delete from items where feedID = ?1", [feed_id]) let c =conn.execute("delete from items where feedID = ?1", [feed_id])?;
Ok(c as i64)
} }
pub fn remove_feed(feed_id: usize) { pub fn remove_feed(feed_id: i64) {
let _ = remove_items(feed_id); let _ = remove_items(feed_id);
let conn = get_db(); let conn = get_db();
match conn.execute("delete from feeds where feedID = ?1", [feed_id]) { match conn.execute("delete from feeds where feedID = ?1", [feed_id]) {
@ -187,7 +191,7 @@ fn modify_date(s: &str) -> Option<String> {
} }
} }
pub fn store_items(feed: rss::Channel, feed_id: usize) { pub fn store_items(feed: rss::Channel, feed_id: i64) {
let conn = Connection::open(get_db_path()).unwrap(); let conn = Connection::open(get_db_path()).unwrap();
feed.items.into_iter().for_each(|i: rss::Item| { feed.items.into_iter().for_each(|i: rss::Item| {
let t = i.clone(); let t = i.clone();
@ -241,7 +245,7 @@ pub fn store_items(feed: rss::Channel, feed_id: usize) {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FeedItem { pub struct FeedItem {
pub item_id: usize, pub item_id: i64,
pub title: String, pub title: String,
pub url: String, pub url: String,
pub icon: Option<String>, pub icon: Option<String>,
@ -279,7 +283,7 @@ pub fn get_all_items() -> Vec<FeedItem> {
} }
} }
pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem> { pub fn get_feed_items(feed_id: i64) -> Vec<FeedItem> {
let conn = get_db(); let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items where feedID = ?1 order by date(date)").unwrap(); let mut stmt = conn.prepare("select itemID,title,url,date,media,description,content from items where feedID = ?1 order by date(date)").unwrap();
let items: Result<Vec<FeedItem>> = stmt let items: Result<Vec<FeedItem>> = stmt
@ -296,7 +300,7 @@ pub fn get_feed_items(feed_id: usize) -> Vec<FeedItem> {
} }
} }
pub fn get_item(item_id: usize) -> FeedItem { pub fn get_item(item_id: i64) -> FeedItem {
let conn = get_db(); let conn = get_db();
let mut stmt = conn.prepare("select itemID,title,url,icon,date,media,description,content from items where itemID = ?1").unwrap(); let mut stmt = conn.prepare("select itemID,title,url,icon,date,media,description,content from items where itemID = ?1").unwrap();
let item: FeedItem = stmt let item: FeedItem = stmt
@ -317,7 +321,7 @@ pub fn get_item(item_id: usize) -> FeedItem {
} }
pub struct Feed { pub struct Feed {
pub feed_id: usize, pub feed_id: i64,
pub title: String, pub title: String,
pub description: Option<String>, pub description: Option<String>,
pub icon: Option<String>, pub icon: Option<String>,

View file

@ -1,7 +1,11 @@
use std::{fs,io}; use std::{fs,io::{self, Write}, path::PathBuf};
use directories::ProjectDirs; use directories::ProjectDirs;
use iced::widget::image::Handle;
use super::net;
use sha1::{Digest,Sha1};
// OS neutral path for application data
pub fn get_data_directory() -> std::path::PathBuf { pub fn get_data_directory() -> std::path::PathBuf {
let dirs = ProjectDirs::from("rocks","gabe","RSSCar").expect("Failed to get paths"); let dirs = ProjectDirs::from("rocks","gabe","RSSCar").expect("Failed to get paths");
match fs::create_dir(dirs.data_dir()){ match fs::create_dir(dirs.data_dir()){
@ -19,6 +23,34 @@ pub fn get_cache_directory() -> std::path::PathBuf {
Err(e) if e.kind() == io::ErrorKind::AlreadyExists => {} Err(e) if e.kind() == io::ErrorKind::AlreadyExists => {}
Err(_) => {println!("Error creating cache directory")} Err(_) => {println!("Error creating cache directory")}
}; };
dirs.config_dir().to_owned() dirs.cache_dir().to_owned()
} }
pub fn get_image(url: &str) -> Option<Handle> {
let cache:PathBuf = get_cache_directory();
let mut hasher = Sha1::new();
hasher.update(url.as_bytes());
let filename = format!("{:x}.img",hasher.finalize());
let path = cache.join(filename);
if path.exists() {
return Some(Handle::from_path(path));
}
match net::get_bytes(url) {
Some(b) => {
let mut file = fs::File::create(&path).unwrap();
file.write_all(&b).unwrap();
Some(Handle::from(path))
}
None => {
println!("Failed to download image.");
None
}
}
}

View file

@ -1,10 +1,13 @@
use core::panic; use core::panic;
use iced::widget::image::Handle; use iced::{font::load, widget::image::Handle};
use reqwest::{self, blocking::Client, header::USER_AGENT, Error, Proxy}; use reqwest::{self, blocking::Client, header::USER_AGENT, Error, Proxy};
use rss::Channel; use rss::Channel;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::{path::PathBuf, str::FromStr};
use url::Url; use url::Url;
use crate::files;
const DEFAULT_TOR_PROXY: &str = "socks5h://127.0.0.1:9050"; const DEFAULT_TOR_PROXY: &str = "socks5h://127.0.0.1:9050";
const DEFAULT_I2P_PROXY: &str = "socks5h://127.0.0.1:4447"; const DEFAULT_I2P_PROXY: &str = "socks5h://127.0.0.1:4447";
@ -86,7 +89,7 @@ fn get_client(network: Network) -> Result<Client, Error> {
} }
} }
fn get_content(url: &str) -> Option<String> { pub fn get_content(url: &str) -> Option<String> {
let client = get_client(url_network(url)).unwrap(); let client = get_client(url_network(url)).unwrap();
let res = client.get(url).header(USER_AGENT, "RSS Reader").send(); let res = client.get(url).header(USER_AGENT, "RSS Reader").send();
match res { match res {
@ -98,6 +101,18 @@ fn get_content(url: &str) -> Option<String> {
} }
} }
pub fn get_bytes(url: &str) -> Option<bytes::Bytes> {
let client = get_client(url_network(url)).unwrap();
let res = client.get(url).header(USER_AGENT, "RSS Reader").send();
match res {
Ok(resp) => match resp.bytes() {
Ok(body) => return Some(body),
Err(_) => return None,
},
Err(_) => return None,
}
}
pub fn retrieve_opml(url: &str) -> Vec<Url> { pub fn retrieve_opml(url: &str) -> Vec<Url> {
match get_content(url) { match get_content(url) {
Some(c) => match opml::OPML::from_str(&c) { Some(c) => match opml::OPML::from_str(&c) {
@ -166,18 +181,28 @@ pub fn is_feed(url: &str) -> bool {
pub fn load_rss(url: &str) -> Option<Channel> { pub fn load_rss(url: &str) -> Option<Channel> {
let client = get_client(url_network(url)).unwrap(); let client = get_client(url_network(url)).unwrap();
let res = client.get(url).header(USER_AGENT, "RSS Reader").send(); let res = client.get(url).header(USER_AGENT, "RSS Reader").send();
match res { match res {
Ok(resp) => match resp.bytes() { Ok(resp) => {
Ok(body) => match Channel::read_from(&*body) { match resp.bytes() {
Ok(channel) => Some(channel), Ok(body) => {
match Channel::read_from(&*body) {
Ok(channel) => Some(channel),
Err(e) => {
panic!("Error parsing feed:\n{}", e);
}
}
},
Err(e) => { Err(e) => {
panic!("Error parsing feed:\n{}", e); println!("Failed to load feed: {}", e);
if url_network(url) == Network::I2P {
println!("Retrying I2P URL:{}", url);
return load_rss(url); //horrible
}
return None;
} }
},
Err(_) => {
panic!("Empty response")
} }
}, }
Err(err) => { Err(err) => {
println!("Error loading feed.:{}", err); println!("Error loading feed.:{}", err);
return None; return None;
@ -208,6 +233,7 @@ pub fn download_image(url: &str) -> Option<iced::widget::image::Handle> {
match reqwest::blocking::get(url) { match reqwest::blocking::get(url) {
Ok(r) => { Ok(r) => {
let img: Handle = Handle::from_bytes(r.bytes().unwrap()); let img: Handle = Handle::from_bytes(r.bytes().unwrap());
Some(img) Some(img)
} }
Err(_) => { Err(_) => {

View file

@ -1,3 +1,5 @@
use crate::files;
use super::net; use super::net;
#[test] #[test]
@ -15,4 +17,13 @@ fn load_feeds() {
for f in feeds { for f in feeds {
println!("Feed found:{}",f.title()); println!("Feed found:{}",f.title());
} }
}
#[test]
fn display_image() {
let i = "http://gabriel262me3lgv3w7xohtesg3laoojmtye644pwirhdm73qmedmsqd.onion/gabriel.avif";
files::get_image(i);
} }

View file

@ -45,11 +45,12 @@ pub enum Page {
pub struct State { pub struct State {
pub page: Page, pub page: Page,
pub current_feed: usize, pub current_feed: i64,
pub current_item: Option<FeedItem>, pub current_item: Option<FeedItem>,
pub item_description: Vec<Content>, pub item_description: Vec<Content>,
pub item_content: Vec<Content>, pub item_content: Vec<Content>,
pub feed_input: String, pub feed_input: String,
pub opml_input: String
} }
impl Default for State { impl Default for State {
fn default() -> Self { fn default() -> Self {
@ -60,6 +61,7 @@ impl Default for State {
item_description: Vec::new(), item_description: Vec::new(),
item_content: Vec::new(), item_content: Vec::new(),
feed_input: String::from(""), feed_input: String::from(""),
opml_input: String::from("")
} }
} }
} }
@ -67,10 +69,10 @@ impl Default for State {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Message { pub enum Message {
ChangePage(Page), ChangePage(Page),
LoadFeed(usize), LoadFeed(i64),
AddFeed(String), AddFeed(String),
RemoveFeed(usize), RemoveFeed(i64),
LoadItem(usize), LoadItem(i64),
ProcessOPML(String), ProcessOPML(String),
FieldUpdated(AppField, String), FieldUpdated(AppField, String),
LinkClicked(String), LinkClicked(String),
@ -100,7 +102,7 @@ async fn add_feed_background(url: String) -> String {
db::add_feed(&url); db::add_feed(&url);
"Done adding feed".to_string() "Done adding feed".to_string()
} }
async fn remove_feed_background(id:usize) -> String { async fn remove_feed_background(id:i64) -> String {
println!("Removing feed"); println!("Removing feed");
db::remove_feed(id); db::remove_feed(id);
"Done removing feed".to_owned() "Done removing feed".to_owned()
@ -119,7 +121,7 @@ fn update(state: &mut State, mes: Message) -> Task<Message> {
} }
Message::ProcessOPML(url) => { Message::ProcessOPML(url) => {
state.feed_input = "".to_string(); state.opml_input = "".to_string();
Task::perform(add_multiple_feeds_background(url), Message::Done) Task::perform(add_multiple_feeds_background(url), Message::Done)
} }
@ -160,7 +162,7 @@ fn update(state: &mut State, mes: Message) -> Task<Message> {
state.feed_input = value; state.feed_input = value;
}, },
AppField::OPMLInput => { AppField::OPMLInput => {
state.feed_input = value; state.opml_input = value;
} }
} }
Task::none() Task::none()
@ -274,10 +276,10 @@ fn testing(state: &State) -> Element<'_, Message> {
.spacing(5) .spacing(5)
.padding(10), .padding(10),
row!( row!(
text_input("OPML Url",&state.feed_input) text_input("OPML Url",&state.opml_input)
.on_input(|val| Message::FieldUpdated(AppField::OPMLInput,val)) .on_input(|val| Message::FieldUpdated(AppField::OPMLInput,val))
.width(300), .width(300),
button("Add feeds from .OPML").on_press(Message::ProcessOPML(state.feed_input.clone())) button("Add feeds from .OPML").on_press(Message::ProcessOPML(state.opml_input.clone()))
), ),
button("Wipe DB").on_press(Message::ResetDB), button("Wipe DB").on_press(Message::ResetDB),
button("go back!").on_press(Message::ChangePage(Page::Home)) button("go back!").on_press(Message::ChangePage(Page::Home))

View file

@ -43,7 +43,7 @@ pub fn list_feeds() -> iced::widget::Column<'static, Message> {
.padding(15) .padding(15)
} }
pub fn list_items(feed_id: usize) -> iced::widget::Column<'static, Message> { pub fn list_items(feed_id: i64) -> iced::widget::Column<'static, Message> {
let items: Vec<db::FeedItem> = db::get_feed_items(feed_id); let items: Vec<db::FeedItem> = db::get_feed_items(feed_id);
column( column(
items items
@ -146,7 +146,7 @@ pub fn navbar(state: &ui::State) -> Element<Message> {
.into() .into()
} }
pub fn list_item(id: usize, title: String, description: String) -> Column<'static, Message> { pub fn list_item(id: i64, title: String, description: String) -> Column<'static, Message> {
Column::new() Column::new()
.push(button(Text::new(title)).on_press(Message::LoadItem(id))) .push(button(Text::new(title)).on_press(Message::LoadItem(id)))
.push(text(description)) .push(text(description))