Compare commits

...

8 commits

Author SHA1 Message Date
bab52687c7
db&score, feed_channel impl & score from db
Adds a feed_channel implementation which reflects a feed's specific
settings per channel.

Also added an UnparsedTimedScore to score which allows parsing to score
from db in a controlled fashion.
2026-02-06 15:01:21 -08:00
fcb03ead1f
fetch, create fetch mod and AdapterClient
Creates a fetch mod for fetching remote rss content.

This implementation is barebones and not great since it doesn't pass any
compression or timestamp information.
2026-02-06 15:01:21 -08:00
070c55a95b
db, add item and functions to get items
Creates item. It's pretty barebowns until I implement fetching.

I also added get functions for feed and channel to get items with.
2026-02-06 15:01:21 -08:00
2dc4c7bc99
db, add channel and feed; user.get_feeds
Adds the primary functionality for channel and feed

Also adds a function to user to get the feeds associated with a user.
2026-02-06 15:01:21 -08:00
99321e9a5d
db, add core user functionality and tests; db mod
Adds all the basic user functions, and tests for them.

This is also intializes the DB Module, which will have more things in it
2026-02-06 15:01:21 -08:00
b30eefd516
score&test, Tests for a42853a & test_utils mod
Tests for all the core functionality implemented to score in a42853a.

Optimistically adding a test_utils mod for shared test code
2026-02-06 15:01:21 -08:00
dac085756a
db&adapter, add sqlx + schema & add adapter
Adds SQLX for database management, and an Adapter interface for
interacting with it. Through the type "AdapterPool"

Creates an initial_schema with everything I think I'll need.
2026-02-06 15:01:17 -08:00
a42853ac5a
score, initialize core functionality and types
Adds Chrono package

Adds a "Result" type to lib.rs

creates helpful type aliases for Score, Boost, and Gravity.

Creates a "TimedScore" which is a value and time data for calculating
score over time. There are two forms of this, Boosted and Decaying.
Since a Score can either be decaying over time or have been boosted
recently and be kept from decaying for some time.

Also implements a BoostedScore and a DecayingScore, which are both
TimedScores which represent the different states. These are important
because you can't boost a BoostedScore for instance.
2026-02-05 12:13:49 -08:00
16 changed files with 4746 additions and 5 deletions

8
flake.lock generated
View file

@ -2,16 +2,16 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1768127708, "lastModified": 1769933782,
"narHash": "sha256-1Sm77VfZh3mU0F5OqKABNLWxOuDeHIlcFjsXeeiPazs=", "narHash": "sha256-GlZemJ2dxhXMMq6TNyt588OFv4/jIt3J1QVBO9MspBE=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "ffbc9f8cbaacfb331b6017d5a5abb21a492c9a38", "rev": "64728753f1a42c81c5688a136a6bee173665acc9",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-unstable", "ref": "nixos-25.11-small",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View file

@ -2,7 +2,7 @@
description = "Koucha rust flake"; description = "Koucha rust flake";
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11-small";
}; };
outputs = { self, nixpkgs }: outputs = { self, nixpkgs }:
@ -24,6 +24,8 @@
rustc rustc
cargo cargo
rust-analyzer rust-analyzer
sqlx-cli
]; ];
RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}"; RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}";

1
koucha/.gitignore vendored
View file

@ -1 +1,2 @@
*.db
/target /target

2980
koucha/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -4,3 +4,8 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
reqwest = "0.13.1"
rss = "2.0.12"
tokio = { version = "1.49.0", features = ["full"] }
sqlx = { version = "0.8.6", features = [ "runtime-tokio", "sqlite" ] }
chrono = "0.4.43"

View file

@ -0,0 +1,62 @@
-- Add migration script here
PRAGMA foreign_keys = ON;
CREATE TABLE users (
id INTEGER PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);
CREATE TABLE channels (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL,
link TEXT UNIQUE NOT NULL,
description TEXT,
last_fetched TEXT
);
CREATE TABLE items (
id INTEGER PRIMARY KEY,
channel_id INTEGER NOT NULL,
guid TEXT NOT NULL,
fetched_at TEXT,
title TEXT,
description TEXT,
content TEXT,
UNIQUE(channel_id, guid),
FOREIGN KEY (channel_id) REFERENCES channels(id)
);
CREATE TABLE feeds (
id INTEGER PRIMARY KEY,
user_id INTEGER NOT NULL,
title TEXT NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id)
);
CREATE TABLE feed_channels (
feed_id INTEGER NOT NULL,
channel_id INTEGER NOT NULL,
initial_score INTEGER,
gravity INTEGER,
boost INTEGER,
PRIMARY KEY (feed_id, channel_id),
FOREIGN KEY (feed_id) REFERENCES feeds(id),
FOREIGN KEY (channel_id) REFERENCES channels(id)
);
CREATE TABLE feed_items (
item_id INTEGER NOT NULL,
feed_id INTEGER NOT NULL,
score INTEGER NOT NULL,
last_updated TEXT NOT NULL,
boosted_at TEXT,
archived BOOLEAN DEFAULT FALSE,
PRIMARY KEY (item_id, feed_id),
FOREIGN KEY (feed_id) REFERENCES feeds(id),
FOREIGN KEY (item_id) REFERENCES items(id)
);
CREATE INDEX idx_feed_items_score
ON feed_items(feed_id, archived, score DESC);

30
koucha/src/db.rs Normal file
View file

@ -0,0 +1,30 @@
mod user;
pub use user::User;
mod feed;
pub use feed::Feed;
mod feed_channel;
pub use feed_channel::FeedChannel;
mod channel;
pub use channel::Channel;
mod item;
pub use item::Item;
macro_rules! define_key {
($name:ident) => {
#[derive(PartialEq, Debug, Copy, Clone)]
pub struct $name(i64);
};
($name:ident, $($field:ident : $type:ty),* $(,)?) => {
#[derive(PartialEq, Debug, Copy, Clone)]
pub struct $name {
$($field: $type),*
}
};
}
define_key!(UserKey);
define_key!(FeedKey);
define_key!(FeedChannelKey, feed_key: FeedKey, channel_key: ChannelKey);
define_key!(ChannelKey);
define_key!(ItemKey);

243
koucha/src/db/channel.rs Normal file
View file

@ -0,0 +1,243 @@
use reqwest::Url;
use chrono::{DateTime, Utc};
use crate::{
Result,
AdapterPool,
db::{
ChannelKey,
Item,
FeedChannel,
feed_channel::UnparsedFeedChannel,
item::UnparsedItem,
},
};
pub struct UnparsedChannel {
pub id: i64,
pub title: String,
pub link: String,
pub description: Option<String>,
pub last_fetched: Option<String>,
}
impl UnparsedChannel {
pub fn parse(self) -> Result<Channel> {
Ok(Channel {
key: ChannelKey(self.id),
title: self.title,
link: Url::parse(&self.link)?,
description: self.description,
last_fetched: self.last_fetched.as_deref()
.map(DateTime::parse_from_rfc2822)
.transpose()?
.map(|dt| dt.with_timezone(&Utc)),
})
}
}
pub struct Channel {
key: ChannelKey,
title: String,
link: Url,
description: Option<String>,
last_fetched: Option<DateTime<Utc>>,
}
impl Channel {
pub fn key(&self) -> ChannelKey { self.key }
pub fn title(&self) -> &str { &self.title }
pub fn link(&self) -> &Url { &self.link }
pub fn description(&self) -> Option<&str> { self.description.as_deref() }
pub fn last_fetched(&self) -> Option<DateTime<Utc>> { self.last_fetched }
pub async fn get_all(pool: &AdapterPool) -> Result<Vec<Self>> {
sqlx::query_as!(
UnparsedChannel,
"SELECT id, title, link, description, last_fetched FROM channels"
).fetch_all(&pool.0).await?.into_iter().map(UnparsedChannel::parse).collect()
}
pub async fn get(pool: &AdapterPool, key: ChannelKey) -> Result<Self> {
sqlx::query_as!(
UnparsedChannel,
"SELECT id, title, link, description, last_fetched
FROM channels
WHERE id = ?",
key.0
).fetch_one(&pool.0).await?.parse()
}
pub async fn get_or_create(
pool: &AdapterPool, link: Url
) -> Result<Self> {
let link_str = link.as_str();
sqlx::query_as!(
UnparsedChannel,
"INSERT INTO channels (title, link)
VALUES(?, ?)
ON CONFLICT(link) DO UPDATE SET link = link
RETURNING id, title, link, description, last_fetched",
link_str, link_str // We use the url as a placeholder title
).fetch_one(&pool.0).await?.parse()
}
async fn get_feed_channels(
&self, pool: &AdapterPool
) -> Result<Vec<FeedChannel>> {
sqlx::query_as!(
UnparsedFeedChannel,
"SELECT channel_id, feed_id, initial_score, gravity, boost
FROM feed_channels
WHERE channel_id = ?",
self.key.0
).fetch_all(&pool.0).await?.into_iter()
.map(UnparsedFeedChannel::parse).collect()
}
pub async fn get_items(&self, pool: &AdapterPool) -> Result<Vec<Item>> {
sqlx::query_as!(
UnparsedItem,
"SELECT id as `id!`, channel_id, fetched_at, title, description,
content
FROM items
WHERE channel_id = ?",
self.key.0
).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
db::{Feed, User},
test_utils::{
FEED1, FEED2, CHANNEL_TITLE, CHANNEL_DESC, USERNAME, FEED_TITLE,
FEED_TITLE2, ITEM_GUID, ITEM_GUID2,
setup_adapter,
setup_channel,
},
};
use chrono::TimeZone;
#[test]
fn parse_unparsed_item() {
const CHANNEL_ID: i64 = 1;
let date: DateTime<Utc> = Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap();
let raw_channel = UnparsedChannel {
id: CHANNEL_ID,
title: CHANNEL_TITLE.to_string(),
link: FEED1.to_string(),
description: Some(CHANNEL_DESC.to_string()),
last_fetched: Some(date.to_rfc2822()),
};
let channel = raw_channel.parse().unwrap();
assert_eq!(channel.key.0, CHANNEL_ID);
assert_eq!(channel.title, CHANNEL_TITLE);
assert_eq!(channel.link.as_str(), FEED1);
assert_eq!(channel.description, Some(CHANNEL_DESC.to_string()));
assert_eq!(channel.last_fetched, Some(date));
}
#[tokio::test]
async fn get_all() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let url1 = Url::parse(FEED1).unwrap();
let url2 = Url::parse(FEED2).unwrap();
Channel::get_or_create(pool, url1).await.unwrap();
Channel::get_or_create(pool, url2).await.unwrap();
let channels = Channel::get_all(pool).await.unwrap();
assert_eq!(channels.len(), 2);
}
#[tokio::test]
async fn get() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let channel_a = setup_channel(pool).await;
let channel_b = Channel::get(pool, channel_a.key()).await.unwrap();
assert_eq!(channel_a.key, channel_b.key);
assert_eq!(channel_a.title, channel_b.title);
assert_eq!(channel_a.link, channel_b.link);
assert_eq!(channel_a.last_fetched, channel_b.last_fetched);
assert_eq!(channel_a.description, channel_b.description);
}
#[tokio::test]
async fn create() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let url_feed = Url::parse(FEED1).unwrap();
let channel = Channel::get_or_create(pool, url_feed).await.unwrap();
assert!(channel.key().0 > 0);
assert_eq!(channel.link().as_str(), FEED1);
assert!(channel.title().len() > 0);
}
#[tokio::test]
async fn create_duplicate_returns_existing() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let url_feed = Url::parse(FEED1).unwrap();
let channel1 = Channel::get_or_create(pool, url_feed.clone()).await.unwrap();
let channel2 = Channel::get_or_create(pool, url_feed).await.unwrap();
assert_eq!(channel1.key(), channel2.key());
}
#[tokio::test]
async fn get_all_channels() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let url_feed1 = Url::parse(FEED1).unwrap();
let url_feed2 = Url::parse(FEED2).unwrap();
Channel::get_or_create(pool, url_feed1).await.unwrap();
Channel::get_or_create(pool, url_feed2).await.unwrap();
let channels = Channel::get_all(pool).await.unwrap();
assert_eq!(channels.len(), 2);
}
#[tokio::test]
async fn get_feed_channels() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let channel = setup_channel(pool).await;
let user = User::create(pool, USERNAME).await.unwrap();
let feed1 = Feed::create(pool, user.key(), FEED_TITLE).await.unwrap();
let feed2 = Feed::create(pool, user.key(), FEED_TITLE2).await.unwrap();
feed1.add_channel(pool, channel.key).await.unwrap();
feed2.add_channel(pool, channel.key).await.unwrap();
let fc_list = channel.get_feed_channels(pool).await.unwrap();
assert_eq!(fc_list.len(), 2);
}
#[tokio::test]
async fn get_items() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let channel = setup_channel(pool).await;
Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap();
Item::get_or_create(pool, channel.key(), ITEM_GUID2).await.unwrap();
let items = channel.get_items(pool).await.unwrap();
assert_eq!(items.len(), 2);
}
}

209
koucha/src/db/feed.rs Normal file
View file

@ -0,0 +1,209 @@
use crate::{
AdapterPool,
Result,
db::{
Channel,
ChannelKey,
FeedKey,
Item,
UserKey,
channel::UnparsedChannel,
item::UnparsedItem,
},
};
pub struct UnparsedFeed {
pub id: i64,
pub title: String,
}
impl UnparsedFeed {
pub fn parse(self) -> Result<Feed> {
Ok(Feed {
key: FeedKey(self.id),
title: self.title,
})
}
}
pub struct Feed {
key: FeedKey,
title: String,
}
impl Feed {
pub fn key(&self) -> FeedKey { self.key }
pub fn title(&self) -> &str { &self.title }
pub async fn get(
pool: &AdapterPool, key: FeedKey
) -> Result<Self> {
sqlx::query_as!(
UnparsedFeed,
"SELECT id, title FROM feeds WHERE id = ?",
key.0
).fetch_one(&pool.0).await?.parse()
}
pub async fn create(
pool: &AdapterPool, user_key: UserKey, title: &str
) -> Result<Self> {
sqlx::query_as!(
UnparsedFeed,
"INSERT INTO feeds (user_id, title)
VALUES (?, ?)
RETURNING id as `id!`, title",
user_key.0, title
).fetch_one(&pool.0).await?.parse()
}
pub async fn update_title(
pool: &AdapterPool, key: FeedKey, new_title: &str
) -> Result<()> {
sqlx::query!(
"UPDATE feeds SET title = ? WHERE id = ?",
new_title, key.0
).execute(&pool.0).await?;
Ok(())
}
pub async fn add_channel(
&self, pool: &AdapterPool, channel_key: ChannelKey
) -> Result<()> {
sqlx::query!(
"INSERT INTO feed_channels (feed_id, channel_id)
VALUES (?, ?)",
self.key.0, channel_key.0
).execute(&pool.0).await?;
Ok(())
}
pub async fn get_items(
&self, pool: &AdapterPool, limit: u8, offset: u32
) -> Result<Vec<Item>> {
sqlx::query_as!(
UnparsedItem,
"SELECT i.id as `id!`, i.channel_id, i.fetched_at, i.title, i.description,
i.content
FROM items i
JOIN feed_items fi on i.id = fi.item_id
WHERE feed_id = ? AND archived = FALSE
ORDER BY score DESC
LIMIT ? OFFSET ?",
self.key.0, limit, offset
).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect()
}
pub async fn get_channels(
&self, pool: &AdapterPool
) -> Result<Vec<Channel>> {
sqlx::query_as!(
UnparsedChannel,
"SELECT c.id as `id!`, c.title, c.link, c.description, c.last_fetched
FROM channels c
JOIN feed_channels fc on c.id = fc.channel_id
WHERE fc.feed_id = ?",
self.key.0
).fetch_all(&pool.0).await?.into_iter()
.map(UnparsedChannel::parse).collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
db::User,
test_utils::{
FEED_TITLE, USERNAME, FEED1, FEED2,
setup_adapter,
setup_feed,
setup_channel,
}
};
use reqwest::Url;
#[test]
fn parse() {
const FID: i64 = 1;
let uf = UnparsedFeed {
id: FID,
title: FEED_TITLE.to_string(),
};
let f = uf.parse().unwrap();
assert_eq!(f.key.0, FID);
assert_eq!(f.title, FEED_TITLE);
}
#[tokio::test]
async fn get() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let feed = setup_feed(pool).await;
let gotten_feed = Feed::get(pool, feed.key).await.unwrap();
assert_eq!(feed.key, gotten_feed.key);
assert_eq!(feed.title, gotten_feed.title);
}
#[tokio::test]
async fn create() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, USERNAME).await.unwrap();
let feed = Feed::create(pool, user.key(), FEED_TITLE).await.unwrap();
assert!(feed.key().0 > 0);
assert_eq!(feed.title(), FEED_TITLE);
}
#[tokio::test]
async fn update_title() {
const NEW_FEED_TITLE: &str = "My NEW feed!";
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let feed = setup_feed(pool).await;
Feed::update_title(pool, feed.key(), NEW_FEED_TITLE).await.unwrap();
let updated = Feed::get(pool, feed.key()).await.unwrap();
assert_eq!(updated.title(), NEW_FEED_TITLE);
}
#[tokio::test]
async fn add_channel() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let feed = setup_feed(pool).await;
let channel = setup_channel(pool).await;
feed.add_channel(pool, channel.key()).await.unwrap();
let channels = feed.get_channels(pool).await.unwrap();
let gotten_channel = &channels[0];
assert_eq!(gotten_channel.key().0, channel.key().0);
}
#[tokio::test]
async fn get_channels() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let feed = setup_feed(pool).await;
let url1 = Url::parse(FEED1).unwrap();
let channel1 = Channel::get_or_create(pool, url1).await.unwrap();
let url2 = Url::parse(FEED2).unwrap();
let channel2 = Channel::get_or_create(pool, url2).await.unwrap();
feed.add_channel(pool, channel1.key()).await.unwrap();
feed.add_channel(pool, channel2.key()).await.unwrap();
let channels = feed.get_channels(pool).await.unwrap();
assert_eq!(channels.len(), 2);
}
}

View file

@ -0,0 +1,188 @@
use crate::{
Result,
AdapterPool,
db::{
Channel,
ChannelKey,
Feed,
FeedKey,
FeedChannelKey,
Item,
},
score::{
Score,
Gravity,
Boost,
},
};
use chrono::{Utc, DateTime};
pub struct UnparsedFeedChannel {
pub channel_id: i64,
pub feed_id: i64,
pub initial_score: Option<i64>,
pub gravity: Option<i64>,
pub boost: Option<i64>,
}
impl UnparsedFeedChannel {
pub fn parse(self) -> Result<FeedChannel> {
Ok(FeedChannel {
key: FeedChannelKey {
feed_key: FeedKey(self.feed_id),
channel_key: ChannelKey(self.channel_id),
},
initial_score: Score::new(self.initial_score),
gravity: Gravity::new(self.gravity),
boost: Boost::new(self.boost),
})
}
}
pub struct FeedChannel {
key: FeedChannelKey,
initial_score: Score,
gravity: Gravity,
boost: Boost,
}
impl FeedChannel {
pub async fn get_channel(&self, pool: &AdapterPool) -> Result<Channel> {
Channel::get(pool, self.key.channel_key).await
}
pub async fn get_feed(&self, pool: &AdapterPool) -> Result<Feed> {
Feed::get(pool, self.key.feed_key).await
}
pub async fn add_item(
&self, pool: &AdapterPool, item: &Item
) -> Result<()> {
self.add_item_at(pool, item, Utc::now()).await
}
async fn add_item_at(
&self, pool: &AdapterPool, item: &Item, add_at: DateTime<Utc>
) -> Result<()> {
let int_item_id = item.key().0;
let int_initial_score = i64::from(self.initial_score);
let string_last_updated = add_at.to_rfc2822();
sqlx::query!(
"INSERT OR IGNORE INTO feed_items (feed_id, item_id, score, last_updated)
VALUES (?, ?, ?, ?)",
self.key.feed_key.0, int_item_id, int_initial_score, string_last_updated
).execute(&pool.0).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use reqwest::Url;
use crate::{
db::{
Channel,
FeedKey,
User
},
test_utils::{
FEED1, setup_adapter, get_datetime
},
};
#[test]
fn parse() {
const CID: i64 = 1;
const FID: i64 = 2;
const IS: i64 = 3;
const G: i64 = 4;
const B: i64 = 5;
let ufc = UnparsedFeedChannel {
channel_id: CID,
feed_id: FID,
initial_score: Some(IS),
gravity: Some(G),
boost: Some(B),
};
let fc = ufc.parse().unwrap();
assert_eq!(fc.key.channel_key.0, CID);
assert_eq!(fc.key.feed_key.0, FID);
assert_eq!(i64::from(fc.initial_score), IS);
assert_eq!(i64::from(fc.gravity), G);
assert_eq!(i64::from(fc.boost), B);
}
// FeedChannel Tests
#[tokio::test]
async fn get_channel() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let url = Url::parse(FEED1).unwrap();
let channel = Channel::get_or_create(pool, url).await.unwrap();
let fc = FeedChannel {
key: FeedChannelKey {
feed_key: FeedKey(1), // Fake Feed
channel_key: channel.key(),
},
initial_score: Score::new(None),
gravity: Gravity::new(None),
boost: Boost::new(None),
};
let channel_from_fc = fc.get_channel(pool).await.unwrap();
assert_eq!(channel_from_fc.key(), channel.key());
}
#[tokio::test]
async fn get_feed() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, "Alice").await.unwrap();
let feed = Feed::create(pool, user.key(), "My Feed").await.unwrap();
let fc = FeedChannel {
key: FeedChannelKey {
feed_key: feed.key(),
channel_key: ChannelKey(1), // Fake Channel
},
initial_score: Score::new(None),
gravity: Gravity::new(None),
boost: Boost::new(None),
};
let feed_from_fc = fc.get_feed(pool).await.unwrap();
assert_eq!(feed_from_fc.key(), feed.key());
}
#[tokio::test]
pub async fn add_item() {
let dt = get_datetime();
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, "Alice").await.unwrap();
let feed = Feed::create(pool, user.key(), "My Feed").await.unwrap();
let url = Url::parse(FEED1).unwrap();
let channel = Channel::get_or_create(pool, url).await.unwrap();
let fc = FeedChannel {
key: FeedChannelKey {
feed_key: feed.key(),
channel_key: channel.key(),
},
initial_score: Score::new(None),
gravity: Gravity::new(None),
boost: Boost::new(None),
};
let item = Item::get_or_create(pool, channel.key(), "item-guid").await.unwrap();
fc.add_item_at(pool, &item, dt).await.unwrap();
let items = feed.get_items(pool, 1, 0).await.unwrap();
assert_eq!(items[0].key(), item.key());
}
}

122
koucha/src/db/item.rs Normal file
View file

@ -0,0 +1,122 @@
use crate::{
Result,
AdapterPool,
db::{
ChannelKey,
ItemKey,
},
};
use chrono::{DateTime, Utc};
pub struct UnparsedItem {
pub id: i64,
pub channel_id: i64,
pub fetched_at: Option<String>,
pub title: Option<String>,
pub description: Option<String>,
pub content: Option<String>,
}
impl UnparsedItem {
pub fn parse(self) -> Result<Item> {
Ok(Item {
key: ItemKey(self.id),
channel_id: ChannelKey(self.channel_id),
fetched_at: match self.fetched_at {
Some(dt_str) => Some(DateTime::parse_from_rfc2822(&dt_str)?
.with_timezone(&Utc)),
None => None,
},
title: self.title,
description: self.description,
content: self.content,
})
}
}
pub struct Item {
key: ItemKey,
channel_id: ChannelKey,
fetched_at: Option<DateTime<Utc>>,
title: Option<String>,
description: Option<String>,
content: Option<String>,
}
impl Item {
pub fn key(&self) -> ItemKey { self.key }
pub fn channel(&self) -> ChannelKey { self.channel_id }
pub fn title(&self) -> Option<&str> { self.title.as_deref() }
pub fn description(&self) -> Option<&str> { self.description.as_deref() }
pub fn content(&self) -> Option<&str> { self.content.as_deref() }
pub async fn get_or_create(
pool: &AdapterPool, from_channel: ChannelKey, guid: &str
) -> Result<Self> {
let item = sqlx::query_as!(
UnparsedItem,
"INSERT INTO items (channel_id, guid)
VALUES (?, ?)
ON CONFLICT(channel_id, guid) DO UPDATE SET channel_id = channel_id
RETURNING id as `id!`, channel_id, fetched_at, title, description,
content",
from_channel.0, guid
).fetch_one(&pool.0).await?.parse();
item
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::{
ITEM_GUID, ITEM_TITLE, ITEM_DESC, ITEM_CONT,
setup_adapter,
setup_channel,
};
use chrono::TimeZone;
// UnparsedItem tests
#[test]
fn parse_unparsed_item() {
const ITEM_ID: i64 = 1;
const CHANNEL_ID: i64 = 1;
let date: DateTime<Utc> = Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap();
let raw_item = UnparsedItem {
id: ITEM_ID,
channel_id: CHANNEL_ID,
fetched_at: Some(date.to_rfc2822()),
title: Some(ITEM_TITLE.to_string()),
description: Some(ITEM_DESC.to_string()),
content: Some(ITEM_CONT.to_string()),
};
let item = raw_item.parse().unwrap();
assert_eq!(item.key.0, ITEM_ID);
assert_eq!(item.channel_id.0, CHANNEL_ID);
assert_eq!(item.fetched_at, Some(date));
assert_eq!(item.title, Some(ITEM_TITLE.to_string()));
assert_eq!(item.description, Some(ITEM_DESC.to_string()));
assert_eq!(item.content, Some(ITEM_CONT.to_string()));
}
// Item Tests
#[tokio::test]
async fn get_or_create_duplicate() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let channel = setup_channel(pool).await;
let item1 = Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap();
let item2 = Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap();
assert_eq!(item1.key(), item2.key());
}
}

200
koucha/src/db/user.rs Normal file
View file

@ -0,0 +1,200 @@
use crate::{
Result,
AdapterPool,
db::{
UserKey,
Feed,
feed::UnparsedFeed,
},
};
pub struct UnparsedUser {
pub id: i64,
pub name: String,
}
impl UnparsedUser {
pub fn parse(self) -> Result<User> {
Ok(User {
key: UserKey(self.id),
name: self.name
})
}
}
pub struct User {
key: UserKey,
name: String,
}
impl User {
pub fn key(&self) -> UserKey { self.key }
pub fn name(&self) -> &str { &self.name }
pub async fn get(pool: &AdapterPool, key: UserKey) -> Result<Self> {
sqlx::query_as!(
UnparsedUser,
"SELECT id, name FROM users WHERE id = ?",
key.0
).fetch_one(&pool.0).await?.parse()
}
pub async fn get_all(pool: &AdapterPool) -> Result<Vec<Self>> {
sqlx::query_as!(
UnparsedUser,
"SELECT id, name FROM users"
).fetch_all(&pool.0).await?.into_iter().map(UnparsedUser::parse).collect()
}
pub async fn create(pool: &AdapterPool, name: &str) -> Result<Self> {
sqlx::query_as!(
UnparsedUser,
"INSERT INTO users (name)
VALUES (?)
RETURNING id, name",
name
).fetch_one(&pool.0).await?.parse()
}
pub async fn update_name(
pool: &AdapterPool, key: UserKey, new_name: &str
) -> Result<()> {
sqlx::query!(
"UPDATE users SET name = ? WHERE id = ?",
new_name, key.0
).execute(&pool.0).await?;
Ok(())
}
pub async fn get_feeds(&self, pool: &AdapterPool) -> Result<Vec<Feed>> {
let feeds: Result<Vec<Feed>> = sqlx::query_as!(
UnparsedFeed,
"SELECT id, title FROM feeds WHERE user_id = ?",
self.key.0
).fetch_all(&pool.0).await?.into_iter()
.map(UnparsedFeed::parse).collect();
feeds
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
db::Feed,
test_utils::{
USERNAME, USERNAME2, FEED_TITLE, FEED_TITLE2,
setup_adapter,
},
};
#[test]
fn parse() {
const UID: i64 = 1;
let unparsed_user = UnparsedUser {
id: UID,
name: USERNAME.to_string(),
};
let user = unparsed_user.parse().unwrap();
assert_eq!(user.key.0, UID);
assert_eq!(user.name, USERNAME);
}
#[tokio::test]
async fn get() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let new_user = User::create(pool, USERNAME).await.unwrap();
let fetched_user = User::get(pool, new_user.key).await.unwrap();
assert_eq!(fetched_user.name, USERNAME);
assert_eq!(fetched_user.key.0, 1);
}
#[tokio::test]
async fn get_all() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
User::create(pool, USERNAME).await.unwrap();
User::create(pool, USERNAME2).await.unwrap();
let users = User::get_all(pool).await.unwrap();
assert_eq!(users.len(), 2);
assert!(users.iter().any(|u| u.name == USERNAME));
assert!(users.iter().any(|u| u.name == USERNAME2));
}
#[tokio::test]
async fn create_user() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, USERNAME).await.unwrap();
assert_eq!(user.name, USERNAME);
assert_eq!(user.key.0, 1);
}
#[tokio::test]
async fn create_duplicate_user() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
User::create(pool, USERNAME).await.unwrap();
let duplicate_user = User::create(pool, USERNAME).await;
assert!(duplicate_user.is_err());
}
#[tokio::test]
async fn update_name() {
const NEW_USERNAME: &str = "Alicia";
assert!(NEW_USERNAME != USERNAME);
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, USERNAME).await.unwrap();
User::update_name(pool, user.key, NEW_USERNAME).await.unwrap();
let updated = User::get(pool, user.key).await.unwrap();
assert_eq!(updated.name, NEW_USERNAME);
}
#[tokio::test]
async fn update_name_to_duplicate() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user1 = User::create(pool, USERNAME).await.unwrap();
User::create(pool, USERNAME2).await.unwrap();
let status = User::update_name(pool, user1.key, USERNAME2).await;
assert!(status.is_err());
}
#[tokio::test]
async fn get_feeds() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, USERNAME).await.unwrap();
Feed::create(pool, user.key, FEED_TITLE).await.unwrap();
Feed::create(pool, user.key, FEED_TITLE2).await.unwrap();
let feeds = user.get_feeds(pool).await.unwrap();
assert_eq!(feeds.len(), 2);
assert!(feeds.iter().any(|f| f.title() == FEED_TITLE));
assert!(feeds.iter().any(|f| f.title() == FEED_TITLE2));
}
#[tokio::test]
async fn get_feeds_empty() {
let adapter = setup_adapter().await;
let pool = adapter.get_pool();
let user = User::create(pool, USERNAME).await.unwrap();
let feeds = user.get_feeds(pool).await.unwrap();
assert_eq!(feeds.len(), 0);
}
}

151
koucha/src/fetch.rs Normal file
View file

@ -0,0 +1,151 @@
use crate::{
Result,
db::Channel,
AdapterClient,
};
use reqwest::Url;
use chrono::{DateTime, Utc};
use std::hash::{Hash, Hasher};
pub struct FetchedRSSItem {
guid: String,
title: String,
description: String,
content: String,
}
impl FetchedRSSItem {
pub fn guid(&self) -> &str { &self.guid }
pub fn title(&self) -> &str { &self.title }
pub fn description(&self) -> &str { &self.description }
pub fn content(&self) -> &str { &self.content }
fn parse(item: rss::Item) -> Self {
FetchedRSSItem {
guid: Self::get_or_create_guid(&item),
title: item.title().unwrap_or("").to_string(),
description: item.description().unwrap_or("").to_string(),
content: item.content().unwrap_or("").to_string(),
}
}
fn get_or_create_guid(item: &rss::Item) -> String {
if let Some(guid) = item.guid() {
return guid.value().to_string();
}
let mut hasher = std::collections::hash_map::DefaultHasher::new();
item.link().unwrap_or("").hash(&mut hasher);
item.title().unwrap_or("").hash(&mut hasher);
item.description().unwrap_or("").hash(&mut hasher);
format!("gen-{:x}", hasher.finish())
}
}
pub struct FetchedRSSChannel {
title: String,
link: Url,
description: String,
items: Vec<FetchedRSSItem>,
fetched_at: DateTime<Utc>,
}
impl FetchedRSSChannel {
pub fn title(&self) -> &str { &self.title }
pub fn link(&self) -> &Url { &self.link }
pub fn description(&self) -> &str { &self.description }
pub fn items(&self) -> &[FetchedRSSItem] { &self.items }
pub fn fetched_at(&self) -> &DateTime<Utc> { &self.fetched_at }
pub async fn fetch_channel(
client: &AdapterClient, channel: Channel
) -> Result<Option<Self>> {
let bytestream = client.0.get(channel.link().clone())
.send().await?
.bytes().await?;
let rss_channel = rss::Channel::read_from(&bytestream[..])?;
let now = Utc::now();
Ok(Some(FetchedRSSChannel::parse(rss_channel, now)?))
}
fn parse(rss: rss::Channel, fetched_at: DateTime<Utc>) -> Result<Self> {
Ok(FetchedRSSChannel {
title: rss.title,
link: Url::parse(&rss.link)?,
description: rss.description,
items: rss.items.into_iter().map(FetchedRSSItem::parse).collect(),
fetched_at: fetched_at,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::{
ITEM_TITLE, ITEM_GUID, ITEM_GUID2, ITEM_DESC, ITEM_CONT,
CHANNEL_TITLE, CHANNEL_DESC, FEED1,
get_datetime
};
fn create_guid(value: String) -> rss::Guid {
rss::Guid { value, permalink: false }
}
fn create_item(guid: rss::Guid) -> rss::Item {
rss::ItemBuilder::default()
.title(ITEM_TITLE.to_string())
.guid(guid)
.description(ITEM_DESC.to_string())
.content(ITEM_CONT.to_string())
.build()
}
fn create_channel(items: Vec<rss::Item>) -> rss::Channel {
rss::ChannelBuilder::default()
.title(CHANNEL_TITLE.to_string())
.description(CHANNEL_DESC.to_string())
.link(FEED1.to_string())
.items(items)
.build()
}
#[test]
fn parse_item() {
let rss_guid = create_guid(ITEM_GUID.to_string());
let rss_item = create_item(rss_guid);
let item = FetchedRSSItem::parse(rss_item);
assert_eq!(item.guid, ITEM_GUID);
assert_eq!(item.title, ITEM_TITLE);
assert_eq!(item.description, ITEM_DESC);
assert_eq!(item.content, ITEM_CONT);
}
#[test]
fn parse_feed() {
let rss_guid = create_guid(ITEM_GUID.to_string());
let rss_guid2 = create_guid(ITEM_GUID2.to_string());
let rss_item = create_item(rss_guid);
let rss_item2 = create_item(rss_guid2);
let rss_channel = create_channel([rss_item, rss_item2].to_vec());
let date: DateTime<Utc> = get_datetime();
let channel = FetchedRSSChannel::parse(rss_channel, date).unwrap();
assert_eq!(channel.title, CHANNEL_TITLE);
assert_eq!(channel.link.as_str(), FEED1);
assert_eq!(channel.description, CHANNEL_DESC);
assert_eq!(channel.fetched_at, date);
assert_eq!(channel.items.len(), 2);
assert!(channel.items.iter().any(|i| i.guid() == ITEM_GUID));
assert!(channel.items.iter().any(|i| i.guid() == ITEM_GUID2));
}
}

View file

@ -0,0 +1,49 @@
use std::error::Error;
type Result<T> = std::result::Result<T, Box<dyn Error>>;
pub mod db;
pub mod fetch;
pub mod score;
#[cfg(test)]
pub mod test_utils;
pub struct AdapterPool(sqlx::SqlitePool);
pub struct AdapterClient(reqwest::Client);
pub struct AdapterBuilder {
database_url: String,
}
impl AdapterBuilder {
pub fn new() -> Self {
Self {
database_url: "sqlite:test.db".to_string(),
}
}
pub fn database_url(mut self, url: &str) -> Self {
self.database_url = url.to_string();
self
}
pub async fn create(self) -> Result<Adapter> {
let db = sqlx::sqlite::SqlitePoolOptions::new()
.connect(&self.database_url).await?;
sqlx::migrate!().run(&db).await?;
let client = reqwest::Client::new();
Ok(Adapter { db: AdapterPool(db), client: AdapterClient(client) })
}
}
pub struct Adapter {
db: AdapterPool,
client: AdapterClient,
}
impl Adapter {
pub fn get_pool(&self) -> &AdapterPool { &self.db }
pub fn get_client(&self) -> &AdapterClient { &self.client }
}

447
koucha/src/score.rs Normal file
View file

@ -0,0 +1,447 @@
use chrono::{DateTime, Utc, TimeDelta};
use crate::{Result};
use std::ops::{Add, Sub};
mod default {
use crate::score::SECONDS_IN_A_DAY;
pub const INITIAL_SCORE: i64 = 70;
pub const GRAVITY: i64 = -10;
pub const BOOST: i64 = 12;
pub const BOOST_FREEZE_IN_SECONDS: i64 = SECONDS_IN_A_DAY;
}
const SECONDS_IN_A_DAY: i64 = 60 * 60 * 24;
macro_rules! rich_i64 {
($name:ident) => {
#[derive(PartialOrd, PartialEq, Debug, Copy, Clone)]
pub struct $name(i64);
impl From<$name> for i64 { fn from(id: $name) -> Self { id.0 } }
};
}
macro_rules! defaulting_i64 {
($name:ident, $default:expr) => {
rich_i64!($name);
impl $name {
pub fn new(value: Option<i64>) -> Self {
Self(value.unwrap_or($default))
}
}
};
}
macro_rules! addable_i64s {
($lhs:ident, $rhs:ident) => {
impl Add<$rhs> for $lhs {
type Output = Self;
fn add(self, other: $rhs) -> Self::Output { Self(self.0 + other.0) }
}
}
}
defaulting_i64!(Score, default::INITIAL_SCORE);
addable_i64s!(Score, Score);
addable_i64s!(Score, Boost);
impl Sub<Boost> for Score {
type Output = Self;
fn sub(self, other: Boost) -> Self::Output { Self(self.0 - other.0) }
}
addable_i64s!(Score, GravityOverDuration);
defaulting_i64!(Boost, default::BOOST);
defaulting_i64!(Gravity, default::GRAVITY);
rich_i64!(GravityOverDuration);
impl Gravity {
fn over_duration(
&self, start: DateTime<Utc>, end: DateTime<Utc>
) -> GravityOverDuration {
let elapsed_time = end.signed_duration_since(start);
GravityOverDuration(
self.0 * (elapsed_time.num_seconds() / SECONDS_IN_A_DAY)
)
}
}
pub struct UnparsedTimedScore {
pub value: i64,
pub last_updated: DateTime<Utc>,
pub last_boosted: Option<DateTime<Utc>>,
}
impl UnparsedTimedScore {
pub fn parse(self) -> TimedScore {
match self.last_boosted {
None => TimedScore::Decaying(DecayingScore {
value: Score(self.value),
last_updated: self.last_updated,
}),
Some(last_boosted) => TimedScore::Boosted(BoostedScore {
value: Score(self.value),
boosted_at: last_boosted,
}),
}
}
pub fn unparse(ts: TimedScore) -> Self {
match ts {
TimedScore::Decaying(ds) => UnparsedTimedScore {
value: ds.value.into(),
last_updated: ds.last_updated,
last_boosted: None,
},
TimedScore::Boosted(bs) => UnparsedTimedScore {
value: bs.value.into(),
last_updated: bs.boosted_at,
last_boosted: Some(bs.boosted_at),
},
}
}
}
#[derive(Clone)]
pub enum TimedScore {
Decaying(DecayingScore),
Boosted(BoostedScore),
}
impl TimedScore {
pub fn new() -> DecayingScore {
Self::new_with_initial(Score::new(None))
}
pub fn new_with_initial(initial_score: Score) -> DecayingScore {
Self::new_with_initial_and_time(initial_score, Utc::now())
}
pub fn new_with_initial_and_time(
initial: Score, time: DateTime<Utc>
) -> DecayingScore {
DecayingScore {
value: initial,
last_updated: time,
}
}
pub fn get_score(&self) -> Score {
match self {
Self::Decaying(s) => s.get_score(),
Self::Boosted(b) => b.get_score(),
}
}
pub fn update_score(self, gravity: Gravity) -> Self {
self.update_score_at_time(gravity, Utc::now())
}
fn update_score_at_time(self, gravity: Gravity, time: DateTime<Utc>) -> Self {
match self {
Self::Decaying(d) => TimedScore::Decaying(
d.apply_gravity_to_time(gravity, time)
),
Self::Boosted(b) => {
let try_unfrozen = b.try_unfreeze_at_time(time);
match try_unfrozen {
Self::Decaying(s) => TimedScore::Decaying(
s.apply_gravity_to_time(gravity, time)
),
Self::Boosted(b) => TimedScore::Boosted(b),
}
}
}
}
pub fn get_decaying(self) -> Result<DecayingScore> {
match self {
Self::Decaying(s) => Ok(s),
Self::Boosted(_) => Err("Attempted to get_decaying() of a boosted score".into()),
}
}
pub fn get_boosted(self) -> Result<BoostedScore> {
match self {
Self::Decaying(_) => Err("Attempted to get_boosted() of a decaying score".into()),
Self::Boosted(b) => Ok(b),
}
}
}
#[derive(Clone)]
pub struct BoostedScore {
value: Score,
boosted_at: DateTime<Utc>,
}
#[derive(Clone)]
pub struct DecayingScore {
value: Score,
last_updated: DateTime<Utc>,
}
impl DecayingScore {
fn get_score(&self) -> Score {
self.value
}
fn apply_gravity_to_time(
self, gravity: Gravity, update_time: DateTime<Utc>
) -> Self {
Self {
last_updated: update_time,
value: self.value + gravity.over_duration(self.last_updated, update_time),
}
}
pub fn boost(self, boost: Boost) -> BoostedScore {
self.boost_at_time(boost, Utc::now())
}
fn boost_at_time(self, boost: Boost, boost_time: DateTime<Utc>) -> BoostedScore {
BoostedScore {
value: self.value + boost,
boosted_at: boost_time,
}
}
}
impl BoostedScore {
fn get_score(&self) -> Score {
self.value
}
pub fn unboost(self, boost: Boost) -> DecayingScore {
DecayingScore {
value: self.value - boost,
last_updated: self.boosted_at,
}
}
fn try_unfreeze_at_time(self, update_time: DateTime<Utc>) -> TimedScore {
let boost_end = self.boosted_at + TimeDelta::seconds(default::BOOST_FREEZE_IN_SECONDS);
if boost_end < update_time {
TimedScore::Decaying(DecayingScore {
value: self.value,
last_updated: boost_end,
})
} else {
TimedScore::Boosted(self)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::get_datetime;
#[test]
fn gravity_default() {
let gravity = Gravity::new(None);
assert_eq!(i64::from(gravity), default::GRAVITY);
}
#[test]
fn boost_default() {
let boost = Boost::new(None);
assert_eq!(i64::from(boost), default::BOOST);
}
// "Score" Tests
#[test]
fn parse_decaying() {
let ups = UnparsedTimedScore {
value: 10,
last_updated: get_datetime(),
last_boosted: None,
};
ups.parse().get_decaying().unwrap();
}
#[test]
fn parse_boosted() {
let dt = get_datetime();
let ups = UnparsedTimedScore {
value: 10,
last_updated: dt,
last_boosted: Some(dt),
};
ups.parse().get_boosted().unwrap();
}
#[test]
fn new() {
let score = TimedScore::new();
assert_eq!(score.value, Score(default::INITIAL_SCORE));
}
#[test]
fn new_with_values() {
let dt = get_datetime();
let score = TimedScore::new_with_initial_and_time(Score(10), dt);
assert_eq!(score.value, Score(10));
assert_eq!(score.last_updated, dt);
}
#[test]
fn update_score_stays_decaying() {
let dt = get_datetime();
let score = TimedScore::Decaying(
TimedScore::new_with_initial_and_time(Score(10), dt)
);
let gravity = Gravity::new(None);
let dt2 = dt + TimeDelta::seconds(SECONDS_IN_A_DAY);
score.update_score_at_time(gravity, dt2).get_decaying().unwrap();
}
#[test]
fn update_score_stays_frozen() {
let dt = get_datetime();
let score = TimedScore::Boosted(
BoostedScore { value: Score(10), boosted_at: dt }
);
let gravity = Gravity::new(None);
let dt2 = dt + TimeDelta::seconds(default::BOOST_FREEZE_IN_SECONDS);
score.update_score_at_time(gravity, dt2).get_boosted().unwrap();
}
#[test]
fn update_score_thaws_and_decays() {
let dt = get_datetime();
let score = TimedScore::Boosted(
BoostedScore { value: Score(10), boosted_at: dt }
);
let gravity = Gravity::new(None);
let dt2 = dt + TimeDelta::seconds(
default::BOOST_FREEZE_IN_SECONDS + SECONDS_IN_A_DAY
);
let updated = score.update_score_at_time(gravity, dt2)
.get_decaying().unwrap();
assert!(updated.value < Score(10))
}
#[test]
fn get_decaying_success() {
let dt = get_datetime();
let score = TimedScore::Decaying(
TimedScore::new_with_initial_and_time(Score(10), dt)
);
score.get_decaying().unwrap();
}
#[test]
#[should_panic = "Attempted to get_boosted() of a decaying score"]
fn get_boosted_failure() {
let dt = get_datetime();
let score = TimedScore::Decaying(
TimedScore::new_with_initial_and_time(Score(10), dt)
);
score.get_boosted().unwrap();
}
#[test]
#[should_panic = "Attempted to get_decaying() of a boosted score"]
fn get_decaying_failure() {
let dt = get_datetime();
let boost = Boost::new(None);
let score = TimedScore::Boosted(
TimedScore::new_with_initial_and_time(Score(10), dt)
.boost_at_time(boost, dt)
);
score.get_decaying().unwrap();
}
#[test]
fn get_boosted_success() {
let dt = get_datetime();
let boost = Boost::new(None);
let score = TimedScore::Boosted(
TimedScore::new_with_initial_and_time(Score(10), dt)
.boost_at_time(boost, dt)
);
score.get_boosted().unwrap();
}
// "DecayingScore" Tests
#[test]
fn apply_gravity_to_future() {
let dt = get_datetime();
let score = DecayingScore { value: Score(10), last_updated: dt };
let future = dt + TimeDelta::seconds(SECONDS_IN_A_DAY);
let gravity = Gravity::new(None);
let updated = score.apply_gravity_to_time(gravity, future);
assert!(updated.value < Score(10));
assert_eq!(updated.last_updated, future);
}
#[test]
fn apply_gravity_to_past() {
let dt = get_datetime();
let score = DecayingScore { value: Score(10), last_updated: dt };
let past = dt - TimeDelta::seconds(SECONDS_IN_A_DAY);
let gravity = Gravity::new(None);
let updated = score.apply_gravity_to_time(gravity, past);
assert!(updated.value > Score(10));
assert_eq!(updated.last_updated, past);
}
#[test]
fn boost() {
let dt = get_datetime();
let score = DecayingScore { value: Score(10), last_updated: dt };
let boost = Boost::new(None);
let boosted = score.boost_at_time(boost, dt);
assert_eq!(boosted.value, Score(10) + Boost(default::BOOST));
assert_eq!(boosted.boosted_at, dt);
}
// "BoostedScore" tests
#[test]
fn unboost() {
let dt = get_datetime();
let score = DecayingScore { value: Score(10), last_updated: dt };
let boost = Boost::new(None);
let boosted = score.boost_at_time(boost, dt);
let unboosted = boosted.unboost(boost);
assert_eq!(unboosted.value, Score(10));
assert_eq!(unboosted.last_updated, dt);
}
#[test]
fn boosted_stays_frozen() {
let dt = get_datetime();
let score = BoostedScore { value: Score(10), boosted_at: dt };
let last_second = dt + TimeDelta::seconds(default::BOOST_FREEZE_IN_SECONDS);
score.try_unfreeze_at_time(last_second).get_boosted().unwrap();
}
#[test]
fn boosted_thaws() {
let dt = get_datetime();
let score = BoostedScore { value: Score(10), boosted_at: dt };
let first_second = dt + TimeDelta::days(default::BOOST_FREEZE_IN_SECONDS+1);
score.try_unfreeze_at_time(first_second).get_decaying().unwrap();
}
}

52
koucha/src/test_utils.rs Normal file
View file

@ -0,0 +1,52 @@
#![cfg(test)]
use crate::{
Adapter,
AdapterBuilder,
AdapterPool,
db::{
Channel,
Feed,
User,
}
};
use reqwest::Url;
use chrono::{
Utc,
TimeZone,
DateTime
};
pub const FEED1: &str = "https://example.com/feed";
pub const FEED2: &str = "https://example2.com/feed";
pub const USERNAME: &str = "Alice";
pub const USERNAME2: &str = "Bob";
pub const FEED_TITLE: &str = "My Feed!";
pub const FEED_TITLE2: &str = "My Second Feed!";
pub const CHANNEL_TITLE: &str = "My Channel!";
pub const CHANNEL_DESC: &str = "My Channel's description";
pub const ITEM_GUID: &str = "item-guid";
pub const ITEM_GUID2: &str = "item-guid2";
pub const ITEM_TITLE: &str = "My Item!";
pub const ITEM_DESC: &str = "My Item's description";
pub const ITEM_CONT: &str = "The content of my Item";
pub fn get_datetime() -> DateTime<Utc> {
Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap()
}
pub async fn setup_adapter() -> Adapter {
AdapterBuilder::new()
.database_url("sqlite::memory:")
.create().await.unwrap()
}
pub async fn setup_channel(pool: &AdapterPool) -> Channel {
let url = Url::parse(FEED1).unwrap();
Channel::get_or_create(pool, url).await.unwrap()
}
pub async fn setup_feed(pool: &AdapterPool) -> Feed {
let user = User::create(pool, USERNAME).await.unwrap();
Feed::create(pool, user.key(), FEED_TITLE).await.unwrap()
}