Compare commits

..

5 commits

Author SHA1 Message Date
22871f5789
Fetching data with channels 2026-01-20 11:37:00 -08:00
f42e558db9
Added content fields to initial schema 2026-01-20 11:36:26 -08:00
41badb6c9f
Adapter and User 2026-01-16 13:13:20 -08:00
e2153c2eed
Sql initial schema 2026-01-16 13:13:02 -08:00
985f724267
Add sqlx 2026-01-15 15:45:08 -08:00
6 changed files with 1227 additions and 21 deletions

View file

@ -24,6 +24,8 @@
rustc rustc
cargo cargo
rust-analyzer rust-analyzer
sqlx-cli
]; ];
RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}"; RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}";

1
koucha/.gitignore vendored
View file

@ -1 +1,2 @@
*.db
/target /target

952
koucha/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,3 +8,5 @@ axum = "0.8.8"
reqwest = "0.13.1" reqwest = "0.13.1"
rss = "2.0.12" rss = "2.0.12"
tokio = { version = "1.49.0", features = ["full"] } tokio = { version = "1.49.0", features = ["full"] }
sqlx = { version = "0.8.6", features = [ "runtime-tokio", "sqlite" ] }
chrono = "0.4.43"

View file

@ -0,0 +1,57 @@
-- Add migration script here
PRAGMA foreign_keys = ON;
CREATE TABLE users (
id INTEGER PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);
CREATE TABLE channels (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL,
link TEXT UNIQUE NOT NULL,
description TEXT,
last_fetched TEXT
);
CREATE TABLE items (
id INTEGER PRIMARY KEY,
channel_id INTEGER NOT NULL,
guid TEXT NOT NULL,
fetched_at TEXT NOT NULL,
title TEXT,
description TEXT,
content TEXT,
UNIQUE(channel_id, guid),
FOREIGN KEY (channel_id) REFERENCES channels(id)
);
CREATE TABLE feeds (
id INTEGER PRIMARY KEY,
user_id INTEGER NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id)
);
CREATE TABLE feed_channels (
feed_id INTEGER NOT NULL,
channel_id INTEGER NOT NULL,
-- Decay settings will go here
PRIMARY KEY (feed_id, channel_id),
FOREIGN KEY (feed_id) REFERENCES feeds(id),
FOREIGN KEY (channel_id) REFERENCES channels(id)
);
CREATE TABLE feed_items (
item_id INTEGER NOT NULL,
feed_id INTEGER NOT NULL,
score INTEGER NOT NULL,
archived BOOLEAN DEFAULT FALSE,
PRIMARY KEY (item_id, feed_id),
FOREIGN KEY (feed_id) REFERENCES feeds(id),
FOREIGN KEY (item_id) REFERENCES items(id)
);
CREATE INDEX idx_feed_items_score
ON feed_items(feed_id, archived, score DESC);

View file

@ -1,21 +1,227 @@
use std::error::Error; use std::{
use reqwest::{ error::Error,
IntoUrl, hash::{Hash, Hasher},
Client, };
use reqwest::Url;
use chrono::{
Utc,
DateTime,
}; };
use rss::Channel as RawChannel;
pub struct Channel { type Result<T> = std::result::Result<T, Box<dyn Error>>;
pub channel: rss::Channel,
pub struct AdapterOptions {
database_url: String,
} }
pub async fn fetch_channel<T: IntoUrl>( impl AdapterOptions {
client: &Client, url: T) -> Result<Channel, Box<dyn Error>> { pub fn new() -> Self {
let content = client.get(url) Self {
database_url: "sqlite:test.db".to_string(),
}
}
pub fn database_url(mut self, url: &str) -> Self {
self.database_url = url.to_string();
self
}
pub async fn create(self) -> Result<Adapter> {
let db = sqlx::sqlite::SqlitePoolOptions::new()
.connect(&self.database_url).await?;
let client = reqwest::Client::new();
Ok(Adapter { db, client })
}
}
pub struct Adapter {
db: sqlx::SqlitePool,
client: reqwest::Client,
}
impl Adapter {
pub async fn get_all_users(&self) -> Result<Vec<User>> {
let users = sqlx::query_as!(
User,
"SELECT id, name FROM users"
).fetch_all(&self.db).await?;
Ok(users)
}
// pub async fn update_channels(&self) -> Result<()> {
//
// }
//
// async fn get_all_channels(&self) -> Result<Vec<impl Channel>> {
// let users = sqlx::query_as!(
// Channel,
// "SELECT id FROM channels"
// ).fetch_all(&self.db).await?;
//
// Ok(users)
// }
fn get_pool(&self) -> &sqlx::SqlitePool { &self.db }
fn get_client(&self) -> &reqwest::client { &self.client }
}
pub struct User {
id: i64,
name: String,
}
impl User {
// async fn get_by_id(adapter: &Adapter, id: i64) -> Result<Self> {
// let user = sqlx::query!("SELECT name FROM users WHERE id = ?", id)
// .fetch_one(adapter.get_pool()).await?;
//
// Ok(Self {
// id: id,
// name: user.name,
// })
// }
pub async fn create(adapter: &Adapter, name: &str) -> Result<Self> {
let result = sqlx::query!("INSERT INTO users (name) VALUES (?)", name)
.execute(adapter.get_pool()).await?;
Ok(Self {
id: result.last_insert_rowid(),
name: name.to_string()
})
}
pub async fn change_name(
&mut self, adapter: &Adapter, new_name: &str) -> Result<()> {
sqlx::query!(
"UPDATE users SET name = ? WHERE id = ?",
new_name, self.id
).execute(adapter.get_pool()).await?;
self.name = new_name.to_string();
Ok(())
}
pub async fn get_feeds(&self, adapter: &Adapter) -> Result<Vec<Feed>> {
let feeds = sqlx::query_as!(
Feed,
"SELECT id FROM feeds WHERE user_id = ?",
self.id
).fetch_all(adapter.get_pool()).await?;
Ok(feeds)
}
pub fn name(&self) -> &str { &self.name }
}
pub struct Feed {
id: i64,
}
impl Feed {
pub async fn get_items(
&self, adapter: &Adapter, limit: u8, offset: u32) -> Result<Vec<Item>> {
let items = sqlx::query_as!(
Item,
"SELECT item_id as id FROM feed_items
WHERE feed_id = ? AND archived = FALSE
ORDER BY score DESC
LIMIT ? OFFSET ?",
self.id, limit, offset
).fetch_all(adapter.get_pool()).await?;
Ok(items)
}
pub async fn get_channels(&self, adapter: &Adapter) -> Result<Vec<Channel>> {
let db_channels = sqlx::query!(
"SELECT c.id as `id!`, c.title, c.link, c.description, c.last_fetched
FROM channels c
JOIN feed_channels fc on c.id = fc.channel_id
WHERE fc.feed_id = ?",
self.id
).fetch_all(adapter.get_pool()).await?;
let mut channels = Vec::with_capacity(db_channels.len());
for db_channel in db_channels {
channels.push(Channel {
id: db_channel.id,
title: db_channel.title,
link: Url::parse(&db_channel.link)?,
description: db_channel.description,
last_fetched: db_channel.last_fetched.as_deref()
.map(DateTime::parse_from_rfc2822)
.transpose()?
.map(|dt| dt.with_timezone(&Utc)),
})
}
Ok(channels)
}
}
pub struct Channel {
id: i64,
title: String,
link: Url,
description: Option<String>,
last_fetched: Option<DateTime<Utc>>,
}
impl Channel {
pub async fn fetch(mut self, adapter: &Adapter) -> Result<Self> {
let bytestream = adapter.get_client().get(self.link.clone())
.send().await? .send().await?
.bytes().await?; .bytes().await?;
let raw_channel = RawChannel::read_from(&content[..])?; let rss_channel = rss::Channel::read_from(&bytestream[..])?;
println!("{}", raw_channel.title); self.title = rss_channel.title;
Ok(Channel { channel: raw_channel }) self.link = Url::parse(&rss_channel.link)?;
self.description = Some(rss_channel.description);
let now = Utc::now();
self.last_fetched = Some(now);
sqlx::query!(
"UPDATE channels
SET title = ?, link = ?, description = ?,
last_fetched = ?
WHERE id = ?",
self.title, self.link.as_str(), self.description, now.to_rfc2822(),
self.id
).execute(adapter.get_pool()).await?;
fn get_or_create_guid(item: &rss::Item) -> String {
if let Some(guid) = item.guid() {
return guid.value().to_string();
}
let mut hasher = std::collections::hash_map::DefaultHasher::new();
item.link().unwrap_or("").hash(&mut hasher);
item.title().unwrap_or("").hash(&mut hasher);
item.description().unwrap_or("").hash(&mut hasher);
format!("gen-{:x}", hasher.finish())
}
for item in rss_channel.items {
sqlx::query!(
"INSERT OR IGNORE INTO items
(channel_id, guid, fetched_at, title, description, content)
VALUES (?, ?, ?, ?, ?, ?)",
self.id, get_or_create_guid(&item), now.to_rfc2822(),
item.title().unwrap_or(""), item.description().unwrap_or(""),
item.content().unwrap_or("")
)
.execute(adapter.get_pool())
.await?;
}
Ok(self)
}
}
pub struct Item {
id: i64,
} }