Compare commits

..

No commits in common. "24374a266deb427daea90a32a27f03d047ab888d" and "240cb19589bc9d42fbae4331562e0f24413c108c" have entirely different histories.

3 changed files with 33 additions and 94 deletions

View file

@ -24,7 +24,7 @@ const FEEDS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'feeds' (
'feedID' INTEGER NOT NULL, 'feedID' INTEGER NOT NULL,
'title' TEXT NOT NULL, 'title' TEXT NOT NULL,
'description' TEXT, 'description' TEXT,
'icon' text, 'icon' BLOB,
'url' text not null unique, 'url' text not null unique,
'subscribed' INTEGER NOT NULL default 0, 'subscribed' INTEGER NOT NULL default 0,
'last_updated' TEXT , 'last_updated' TEXT ,
@ -40,7 +40,7 @@ const ITEMS_TABLE_CREATE: &str = "CREATE TABLE IF NOT EXISTS 'items' (
'itemID' INTEGER NOT NULL, 'itemID' INTEGER NOT NULL,
'feedID' INTEGER NOT NULL, 'feedID' INTEGER NOT NULL,
'title' TEXT NOT NULL, 'title' TEXT NOT NULL,
'icon' text, 'icon' BLOB,
'url' text not null unique on conflict replace, 'url' text not null unique on conflict replace,
'description' TEXT, 'description' TEXT,
'content' TEXT, 'content' TEXT,
@ -88,68 +88,29 @@ pub fn initialize() {
println!("Database Initialized.") println!("Database Initialized.")
} }
pub fn get_feed_id_by_url(url: &str) -> Option<usize> {
let conn = get_db();
let mut stmt = conn
.prepare("select feedID from feeds where url=?1")
.unwrap();
match stmt.query_row([url], |row| row.get(0)) {
Ok(i) => Some(i),
Err(_) => None,
}
}
pub fn add_feed(url: &str) -> Option<usize> { pub fn add_feed(url: &str) -> Option<usize> {
let feed = load_rss(url).unwrap();
let time = Utc::now().to_rfc2822();
let image = if let Some(i) = feed.image() {
i.url().to_owned()
} else {
"".to_owned()
};
let mut id = get_feed_id_by_url(url);
let conn = get_db(); let conn = get_db();
match id { let feed = load_rss(url).unwrap();
Some(i) => { let new_feed = feed.clone();
let time = Utc::now().to_rfc2822();
match conn.execute( match conn.execute(
"update feeds set last_updated=?1,icon=?3,title=?4,description=?5 where feedID = ?2;", "insert into feeds(title,url,description,last_updated) values(?1,?2,?3,?4)",
[time,i.to_string(),image,feed.title.to_owned(),feed.description.to_owned()] [feed.title, url.to_string(), feed.description, time],
) { ) {
Ok(_) => {println!("Updated feed.");} Ok(_) => {}
Err(e) => {println!("Error updating feed.\n{}",e);}
}
}
None => {
match conn.execute(
"insert into feeds(title,url,icon,description,last_updated) values(?1,?2,?3,?4,?5)",
[
feed.title.to_owned(),
url.to_string(),
image,
feed.description.to_owned(),
time,
],
) {
Ok(_) => {
id = get_feed_id_by_url(url)
}
Err(e) => { Err(e) => {
println!("Couldn't add feed:{}\nError:{}", url, e); println!("Couldn't add feed:{}\nError:{}", url, e);
return None; return None;
} }
} }
;
}
}
match id { let mut stmt = conn
Some(i) => { .prepare("select feedID from feeds where url=?1")
store_items(feed, i); .unwrap();
Some(i) let id: usize = stmt.query_row([url], |row| row.get(0)).unwrap();
//need to get the feed_id from the DB and then make sure items are mapped to feed
} store_items(new_feed, id);
None => {None} Some(id)
}
} }
fn remove_items(feed_id: usize) -> Result<usize>{ fn remove_items(feed_id: usize) -> Result<usize>{
@ -166,22 +127,16 @@ pub fn remove_feed(feed_id: usize) {
println!("Failed to delete feed by id: {}\nError:{}",feed_id,e); println!("Failed to delete feed by id: {}\nError:{}",feed_id,e);
} }
} }
} }
pub fn store_items(feed: rss::Channel, feed_id: usize) { pub fn store_items(feed: rss::Channel, feed_id: usize) {
let conn = Connection::open(get_db_path()).unwrap(); let conn = Connection::open(get_db_path()).unwrap();
feed.items.into_iter().for_each(|i: rss::Item| { feed.items.into_iter().for_each(|i: rss::Item| {
let t = i.clone(); let t = i.clone();
let image = match i.itunes_ext() { conn.execute(
Some(ext) => match ext.image() { "insert into items(url,title,description,content,feedID,date,media)
Some(img) => img.to_owned(), values(?1,?2,?3,?4,?5,?6,?7)",
None => "".to_owned(),
},
None => "".to_owned(),
};
match conn.execute(
"insert into items(url,title,description,content,feedID,date,media,icon)
values(?1,?2,?3,?4,?5,?6,?7,?8)",
[ [
i.link, i.link,
i.title, i.title,
@ -195,14 +150,9 @@ pub fn store_items(feed: rss::Channel, feed_id: usize) {
None => Some("".to_owned()), None => Some("".to_owned()),
} }
}, },
Some(image),
], ],
) { )
Ok(_) => {} .ok();
Err(e) => {
println!("Failed to add item.\n{}", e)
}
};
}); });
conn.close().unwrap(); conn.close().unwrap();
} }

View file

@ -11,14 +11,3 @@ pub fn get_data_directory() -> std::path::PathBuf {
}; };
dirs.data_dir().to_owned() dirs.data_dir().to_owned()
} }
pub fn get_cache_directory() -> std::path::PathBuf {
let dirs = ProjectDirs::from("rocks","gabe","RSSCar").expect("Failed to get paths");
match fs::create_dir(dirs.cache_dir()){
Ok(_) => {}
Err(e) if e.kind() == io::ErrorKind::AlreadyExists => {}
Err(_) => {println!("Error creating cache directory")}
};
dirs.config_dir().to_owned()
}

View file

@ -54,7 +54,7 @@ pub fn list_items(feed_id: usize) -> iced::widget::Column<'static,Message> {
.padding(15) .padding(15)
} }
pub fn content_area(content: &'_ Vec<Content>) -> iced::widget::Container<'_, Message> { pub fn content_area(content: &Vec<Content>) -> iced::widget::Container<Message> {
let mut children: Vec<Element<Message>> = Vec::new(); let mut children: Vec<Element<Message>> = Vec::new();
for c in content { for c in content {
match c { match c {
@ -81,7 +81,7 @@ pub fn content_area(content: &'_ Vec<Content>) -> iced::widget::Container<'_, Me
} }
pub fn media_view(state: &'_ ui::State) -> Element<'_, Message> { pub fn media_view(state: &ui::State) -> Element<Message> {
match state.current_item.clone().unwrap().media { match state.current_item.clone().unwrap().media {
Some(m) => { Some(m) => {
if m.len() > 0{ if m.len() > 0{
@ -98,7 +98,7 @@ pub fn media_view(state: &'_ ui::State) -> Element<'_, Message> {
} }
} }
pub fn content_view(state: &'_ ui::State) -> iced::widget::Scrollable<'_, Message> { pub fn content_view(state: &ui::State) -> iced::widget::Scrollable<Message> {
let item = state.current_item.clone().unwrap(); let item = state.current_item.clone().unwrap();
scrollable( scrollable(