From 070c55a95bdc91c3ed7cdaab00968ba7d8e55d47 Mon Sep 17 00:00:00 2001 From: Julia Lange Date: Fri, 6 Feb 2026 13:03:31 -0800 Subject: [PATCH] db, add item and functions to get items Creates item. It's pretty barebowns until I implement fetching. I also added get functions for feed and channel to get items with. --- koucha/src/db.rs | 3 + koucha/src/db/channel.rs | 33 ++++++++++- koucha/src/db/feed.rs | 18 ++++++ koucha/src/db/item.rs | 122 +++++++++++++++++++++++++++++++++++++++ koucha/src/test_utils.rs | 5 ++ 5 files changed, 179 insertions(+), 2 deletions(-) create mode 100644 koucha/src/db/item.rs diff --git a/koucha/src/db.rs b/koucha/src/db.rs index df11ecc..4964fd9 100644 --- a/koucha/src/db.rs +++ b/koucha/src/db.rs @@ -4,6 +4,8 @@ mod feed; pub use feed::Feed; mod channel; pub use channel::Channel; +mod item; +pub use item::Item; macro_rules! define_key { ($name:ident) => { @@ -15,3 +17,4 @@ macro_rules! define_key { define_key!(UserKey); define_key!(FeedKey); define_key!(ChannelKey); +define_key!(ItemKey); diff --git a/koucha/src/db/channel.rs b/koucha/src/db/channel.rs index f8642f5..0f38f3e 100644 --- a/koucha/src/db/channel.rs +++ b/koucha/src/db/channel.rs @@ -3,7 +3,11 @@ use chrono::{DateTime, Utc}; use crate::{ Result, AdapterPool, - db::ChannelKey, + db::{ + ChannelKey, + Item, + item::UnparsedItem, + }, }; pub struct UnparsedChannel { @@ -74,6 +78,17 @@ impl Channel { link_str, link_str // We use the url as a placeholder title ).fetch_one(&pool.0).await?.parse() } + + pub async fn get_items(&self, pool: &AdapterPool) -> Result> { + sqlx::query_as!( + UnparsedItem, + "SELECT id as `id!`, channel_id, fetched_at, title, description, + content + FROM items + WHERE channel_id = ?", + self.key.0 + ).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect() + } } #[cfg(test)] @@ -81,7 +96,7 @@ mod tests { use super::*; use crate::{ test_utils::{ - FEED1, FEED2, CHANNEL_TITLE, CHANNEL_DESC, + FEED1, FEED2, CHANNEL_TITLE, CHANNEL_DESC, ITEM_GUID, ITEM_GUID2, setup_adapter, setup_channel, }, @@ -176,4 +191,18 @@ mod tests { assert_eq!(channels.len(), 2); } + + #[tokio::test] + async fn get_items() { + let adapter = setup_adapter().await; + let pool = adapter.get_pool(); + let channel = setup_channel(pool).await; + + Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap(); + Item::get_or_create(pool, channel.key(), ITEM_GUID2).await.unwrap(); + + let items = channel.get_items(pool).await.unwrap(); + + assert_eq!(items.len(), 2); + } } diff --git a/koucha/src/db/feed.rs b/koucha/src/db/feed.rs index da645e6..a162190 100644 --- a/koucha/src/db/feed.rs +++ b/koucha/src/db/feed.rs @@ -5,8 +5,10 @@ use crate::{ Channel, ChannelKey, FeedKey, + Item, UserKey, channel::UnparsedChannel, + item::UnparsedItem, }, }; @@ -77,6 +79,22 @@ impl Feed { Ok(()) } + pub async fn get_items( + &self, pool: &AdapterPool, limit: u8, offset: u32 + ) -> Result> { + sqlx::query_as!( + UnparsedItem, + "SELECT i.id as `id!`, i.channel_id, i.fetched_at, i.title, i.description, + i.content + FROM items i + JOIN feed_items fi on i.id = fi.item_id + WHERE feed_id = ? AND archived = FALSE + ORDER BY score DESC + LIMIT ? OFFSET ?", + self.key.0, limit, offset + ).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect() + } + pub async fn get_channels( &self, pool: &AdapterPool ) -> Result> { diff --git a/koucha/src/db/item.rs b/koucha/src/db/item.rs new file mode 100644 index 0000000..31ee857 --- /dev/null +++ b/koucha/src/db/item.rs @@ -0,0 +1,122 @@ +use crate::{ + Result, + AdapterPool, + db::{ + ChannelKey, + ItemKey, + }, +}; +use chrono::{DateTime, Utc}; + +pub struct UnparsedItem { + pub id: i64, + pub channel_id: i64, + pub fetched_at: Option, + + pub title: Option, + pub description: Option, + pub content: Option, +} + +impl UnparsedItem { + pub fn parse(self) -> Result { + Ok(Item { + key: ItemKey(self.id), + channel_id: ChannelKey(self.channel_id), + fetched_at: match self.fetched_at { + Some(dt_str) => Some(DateTime::parse_from_rfc2822(&dt_str)? + .with_timezone(&Utc)), + None => None, + }, + + title: self.title, + description: self.description, + content: self.content, + }) + } +} + +pub struct Item { + key: ItemKey, + channel_id: ChannelKey, + + fetched_at: Option>, + title: Option, + description: Option, + content: Option, +} + +impl Item { + pub fn key(&self) -> ItemKey { self.key } + pub fn channel(&self) -> ChannelKey { self.channel_id } + pub fn title(&self) -> Option<&str> { self.title.as_deref() } + pub fn description(&self) -> Option<&str> { self.description.as_deref() } + pub fn content(&self) -> Option<&str> { self.content.as_deref() } + + pub async fn get_or_create( + pool: &AdapterPool, from_channel: ChannelKey, guid: &str + ) -> Result { + + let item = sqlx::query_as!( + UnparsedItem, + "INSERT INTO items (channel_id, guid) + VALUES (?, ?) + ON CONFLICT(channel_id, guid) DO UPDATE SET channel_id = channel_id + RETURNING id as `id!`, channel_id, fetched_at, title, description, + content", + from_channel.0, guid + ).fetch_one(&pool.0).await?.parse(); + + item + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::{ + ITEM_GUID, ITEM_TITLE, ITEM_DESC, ITEM_CONT, + setup_adapter, + setup_channel, + }; + use chrono::TimeZone; + + // UnparsedItem tests + #[test] + fn parse_unparsed_item() { + const ITEM_ID: i64 = 1; + const CHANNEL_ID: i64 = 1; + + let date: DateTime = Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap(); + let raw_item = UnparsedItem { + id: ITEM_ID, + channel_id: CHANNEL_ID, + fetched_at: Some(date.to_rfc2822()), + title: Some(ITEM_TITLE.to_string()), + description: Some(ITEM_DESC.to_string()), + content: Some(ITEM_CONT.to_string()), + }; + let item = raw_item.parse().unwrap(); + + assert_eq!(item.key.0, ITEM_ID); + assert_eq!(item.channel_id.0, CHANNEL_ID); + assert_eq!(item.fetched_at, Some(date)); + assert_eq!(item.title, Some(ITEM_TITLE.to_string())); + assert_eq!(item.description, Some(ITEM_DESC.to_string())); + assert_eq!(item.content, Some(ITEM_CONT.to_string())); + + } + + // Item Tests + #[tokio::test] + async fn get_or_create_duplicate() { + let adapter = setup_adapter().await; + let pool = adapter.get_pool(); + let channel = setup_channel(pool).await; + + let item1 = Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap(); + let item2 = Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap(); + + assert_eq!(item1.key(), item2.key()); + } +} diff --git a/koucha/src/test_utils.rs b/koucha/src/test_utils.rs index d02643c..6a21ca9 100644 --- a/koucha/src/test_utils.rs +++ b/koucha/src/test_utils.rs @@ -25,6 +25,11 @@ pub const FEED_TITLE: &str = "My Feed!"; pub const FEED_TITLE2: &str = "My Second Feed!"; pub const CHANNEL_TITLE: &str = "My Channel!"; pub const CHANNEL_DESC: &str = "My Channel's description"; +pub const ITEM_GUID: &str = "item-guid"; +pub const ITEM_GUID2: &str = "item-guid2"; +pub const ITEM_TITLE: &str = "My Item!"; +pub const ITEM_DESC: &str = "My Item's description"; +pub const ITEM_CONT: &str = "The content of my Item"; pub fn get_datetime() -> DateTime { Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap()