Koucha/koucha/src/db/feed.rs

163 lines
3.7 KiB
Rust
Raw Normal View History

2026-01-20 16:08:36 -08:00
use crate::{
AdapterPool,
Result,
db::{
Channel,
2026-01-20 16:36:14 -08:00
ChannelId,
FeedId,
Item,
UserId,
channel::UnparsedChannel,
item::UnparsedItem,
2026-01-20 16:36:14 -08:00
},
2026-01-20 16:08:36 -08:00
};
2026-01-20 16:36:14 -08:00
pub struct UnparsedFeed {
pub id: i64,
pub title: String,
}
impl UnparsedFeed {
pub fn parse(self) -> Result<Feed> {
Ok(Feed {
id: FeedId(self.id),
title: self.title,
})
}
}
2026-01-20 16:08:36 -08:00
pub struct Feed {
2026-01-21 12:47:47 -08:00
id: FeedId,
title: String,
2026-01-20 16:08:36 -08:00
}
impl Feed {
2026-01-21 12:47:47 -08:00
pub fn id(&self) -> FeedId { self.id }
pub fn title(&self) -> &str { &self.title }
pub async fn get(
pool: &AdapterPool, id: FeedId
) -> Result<Self> {
let feed = sqlx::query_as!(
UnparsedFeed,
"SELECT id, title FROM feeds WHERE id = ?",
id.0
).fetch_one(&pool.0).await?.parse();
feed
}
2026-01-20 16:36:14 -08:00
pub async fn create(
pool: &AdapterPool, user_id: UserId, title: &str
2026-01-20 16:36:14 -08:00
) -> Result<Self> {
let int_id = i64::from(user_id);
2026-01-20 16:36:14 -08:00
let new_feed = sqlx::query_as!(
UnparsedFeed,
"INSERT INTO feeds (user_id, title)
VALUES (?, ?)
2026-01-21 12:47:47 -08:00
RETURNING id as `id!`, title",
int_id, title
).fetch_one(&pool.0).await?.parse();
2026-01-20 16:36:14 -08:00
new_feed
}
pub async fn update_title(
pool: &AdapterPool, id: FeedId, new_title: &str
) -> Result<()> {
sqlx::query!(
"UPDATE feeds SET title = ? WHERE id = ?",
new_title, id.0
).execute(&pool.0).await?;
Ok(())
}
2026-01-20 16:36:14 -08:00
pub async fn add_channel(
&self, pool: &AdapterPool, channel_id: ChannelId
2026-01-20 16:36:14 -08:00
) -> Result<()> {
let int_channel_id = i64::from(channel_id);
2026-01-20 16:36:14 -08:00
sqlx::query!(
"INSERT INTO feed_channels (feed_id, channel_id)
VALUES (?, ?)",
self.id.0, int_channel_id
).execute(&pool.0).await?;
2026-01-20 16:36:14 -08:00
Ok(())
}
2026-01-20 16:08:36 -08:00
pub async fn get_items(
&self, pool: &AdapterPool, limit: u8, offset: u32
2026-01-20 16:08:36 -08:00
) -> Result<Vec<Item>> {
let items: Result<Vec<Item>> = sqlx::query_as!(
UnparsedItem,
"SELECT i.id as `id!`, i.channel_id, i.guid, i.fetched_at, i.title,
i.description, i.content
FROM items i
JOIN feed_items fi on i.id = fi.item_id
2026-01-20 16:08:36 -08:00
WHERE feed_id = ? AND archived = FALSE
ORDER BY score DESC
LIMIT ? OFFSET ?",
self.id.0, limit, offset
).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect();
2026-01-20 16:08:36 -08:00
items
2026-01-20 16:08:36 -08:00
}
pub async fn get_channels(
&self, pool: &AdapterPool
2026-01-20 16:08:36 -08:00
) -> Result<Vec<Channel>> {
let channels: Result<Vec<Channel>> = sqlx::query_as!(
UnparsedChannel,
"SELECT c.id as `id!`, c.title, c.link, c.description, c.last_fetched
FROM channels c
JOIN feed_channels fc on c.id = fc.channel_id
WHERE fc.feed_id = ?",
self.id.0
).fetch_all(&pool.0).await?.into_iter()
2026-01-20 16:08:36 -08:00
.map(UnparsedChannel::parse).collect();
channels
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
Adapter,
AdapterBuilder,
db::User,
};
async fn setup_adapter() -> Adapter {
AdapterBuilder::new()
.database_url("sqlite::memory:")
.create().await.unwrap()
}
#[tokio::test]
async fn create_feed() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, "Alice").await.unwrap();
let feed = Feed::create(pool, user.id(), "Tech News").await.unwrap();
assert_eq!(feed.title(), "Tech News");
assert!(feed.id().0 > 0);
}
#[tokio::test]
async fn test_update_title() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, "Alice").await.unwrap();
let feed = Feed::create(pool, user.id(), "Tech News").await.unwrap();
Feed::update_title(pool, feed.id(), "Technology").await.unwrap();
let updated = Feed::get(pool, feed.id()).await.unwrap();
assert_eq!(updated.title(), "Technology");
}
}