db, add item and functions to get items
Creates item. It's pretty barebowns until I implement fetching. I also added get functions for feed and channel to get items with.
This commit is contained in:
parent
ac6e71eb62
commit
1c3f4cc854
5 changed files with 179 additions and 2 deletions
|
|
@ -4,6 +4,8 @@ mod feed;
|
|||
pub use feed::Feed;
|
||||
mod channel;
|
||||
pub use channel::Channel;
|
||||
mod item;
|
||||
pub use item::Item;
|
||||
|
||||
macro_rules! define_key {
|
||||
($name:ident) => {
|
||||
|
|
@ -15,3 +17,4 @@ macro_rules! define_key {
|
|||
define_key!(UserKey);
|
||||
define_key!(FeedKey);
|
||||
define_key!(ChannelKey);
|
||||
define_key!(ItemKey);
|
||||
|
|
|
|||
|
|
@ -3,7 +3,11 @@ use chrono::{DateTime, Utc};
|
|||
use crate::{
|
||||
Result,
|
||||
AdapterPool,
|
||||
db::ChannelKey,
|
||||
db::{
|
||||
ChannelKey,
|
||||
Item,
|
||||
item::UnparsedItem,
|
||||
},
|
||||
};
|
||||
|
||||
pub struct UnparsedChannel {
|
||||
|
|
@ -74,6 +78,17 @@ impl Channel {
|
|||
link_str, link_str // We use the url as a placeholder title
|
||||
).fetch_one(&pool.0).await?.parse()
|
||||
}
|
||||
|
||||
pub async fn get_items(&self, pool: &AdapterPool) -> Result<Vec<Item>> {
|
||||
sqlx::query_as!(
|
||||
UnparsedItem,
|
||||
"SELECT id as `id!`, channel_id, fetched_at, title, description,
|
||||
content
|
||||
FROM items
|
||||
WHERE channel_id = ?",
|
||||
self.key.0
|
||||
).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
@ -81,7 +96,7 @@ mod tests {
|
|||
use super::*;
|
||||
use crate::{
|
||||
test_utils::{
|
||||
FEED1, FEED2, CHANNEL_TITLE, CHANNEL_DESC,
|
||||
FEED1, FEED2, CHANNEL_TITLE, CHANNEL_DESC, ITEM_GUID, ITEM_GUID2,
|
||||
setup_adapter,
|
||||
setup_channel,
|
||||
},
|
||||
|
|
@ -176,4 +191,18 @@ mod tests {
|
|||
|
||||
assert_eq!(channels.len(), 2);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_items() {
|
||||
let adapter = setup_adapter().await;
|
||||
let pool = adapter.get_pool();
|
||||
let channel = setup_channel(pool).await;
|
||||
|
||||
Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap();
|
||||
Item::get_or_create(pool, channel.key(), ITEM_GUID2).await.unwrap();
|
||||
|
||||
let items = channel.get_items(pool).await.unwrap();
|
||||
|
||||
assert_eq!(items.len(), 2);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,8 +5,10 @@ use crate::{
|
|||
Channel,
|
||||
ChannelKey,
|
||||
FeedKey,
|
||||
Item,
|
||||
UserKey,
|
||||
channel::UnparsedChannel,
|
||||
item::UnparsedItem,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -77,6 +79,22 @@ impl Feed {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_items(
|
||||
&self, pool: &AdapterPool, limit: u8, offset: u32
|
||||
) -> Result<Vec<Item>> {
|
||||
sqlx::query_as!(
|
||||
UnparsedItem,
|
||||
"SELECT i.id as `id!`, i.channel_id, i.fetched_at, i.title, i.description,
|
||||
i.content
|
||||
FROM items i
|
||||
JOIN feed_items fi on i.id = fi.item_id
|
||||
WHERE feed_id = ? AND archived = FALSE
|
||||
ORDER BY score DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
self.key.0, limit, offset
|
||||
).fetch_all(&pool.0).await?.into_iter().map(UnparsedItem::parse).collect()
|
||||
}
|
||||
|
||||
pub async fn get_channels(
|
||||
&self, pool: &AdapterPool
|
||||
) -> Result<Vec<Channel>> {
|
||||
|
|
|
|||
122
koucha/src/db/item.rs
Normal file
122
koucha/src/db/item.rs
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
use crate::{
|
||||
Result,
|
||||
AdapterPool,
|
||||
db::{
|
||||
ChannelKey,
|
||||
ItemKey,
|
||||
},
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
pub struct UnparsedItem {
|
||||
pub id: i64,
|
||||
pub channel_id: i64,
|
||||
pub fetched_at: Option<String>,
|
||||
|
||||
pub title: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
impl UnparsedItem {
|
||||
pub fn parse(self) -> Result<Item> {
|
||||
Ok(Item {
|
||||
key: ItemKey(self.id),
|
||||
channel_id: ChannelKey(self.channel_id),
|
||||
fetched_at: match self.fetched_at {
|
||||
Some(dt_str) => Some(DateTime::parse_from_rfc2822(&dt_str)?
|
||||
.with_timezone(&Utc)),
|
||||
None => None,
|
||||
},
|
||||
|
||||
title: self.title,
|
||||
description: self.description,
|
||||
content: self.content,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Item {
|
||||
key: ItemKey,
|
||||
channel_id: ChannelKey,
|
||||
|
||||
fetched_at: Option<DateTime<Utc>>,
|
||||
title: Option<String>,
|
||||
description: Option<String>,
|
||||
content: Option<String>,
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub fn key(&self) -> ItemKey { self.key }
|
||||
pub fn channel(&self) -> ChannelKey { self.channel_id }
|
||||
pub fn title(&self) -> Option<&str> { self.title.as_deref() }
|
||||
pub fn description(&self) -> Option<&str> { self.description.as_deref() }
|
||||
pub fn content(&self) -> Option<&str> { self.content.as_deref() }
|
||||
|
||||
pub async fn get_or_create(
|
||||
pool: &AdapterPool, from_channel: ChannelKey, guid: &str
|
||||
) -> Result<Self> {
|
||||
|
||||
let item = sqlx::query_as!(
|
||||
UnparsedItem,
|
||||
"INSERT INTO items (channel_id, guid)
|
||||
VALUES (?, ?)
|
||||
ON CONFLICT(channel_id, guid) DO UPDATE SET channel_id = channel_id
|
||||
RETURNING id as `id!`, channel_id, fetched_at, title, description,
|
||||
content",
|
||||
from_channel.0, guid
|
||||
).fetch_one(&pool.0).await?.parse();
|
||||
|
||||
item
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test_utils::{
|
||||
ITEM_GUID, ITEM_TITLE, ITEM_DESC, ITEM_CONT,
|
||||
setup_adapter,
|
||||
setup_channel,
|
||||
};
|
||||
use chrono::TimeZone;
|
||||
|
||||
// UnparsedItem tests
|
||||
#[test]
|
||||
fn parse_unparsed_item() {
|
||||
const ITEM_ID: i64 = 1;
|
||||
const CHANNEL_ID: i64 = 1;
|
||||
|
||||
let date: DateTime<Utc> = Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap();
|
||||
let raw_item = UnparsedItem {
|
||||
id: ITEM_ID,
|
||||
channel_id: CHANNEL_ID,
|
||||
fetched_at: Some(date.to_rfc2822()),
|
||||
title: Some(ITEM_TITLE.to_string()),
|
||||
description: Some(ITEM_DESC.to_string()),
|
||||
content: Some(ITEM_CONT.to_string()),
|
||||
};
|
||||
let item = raw_item.parse().unwrap();
|
||||
|
||||
assert_eq!(item.key.0, ITEM_ID);
|
||||
assert_eq!(item.channel_id.0, CHANNEL_ID);
|
||||
assert_eq!(item.fetched_at, Some(date));
|
||||
assert_eq!(item.title, Some(ITEM_TITLE.to_string()));
|
||||
assert_eq!(item.description, Some(ITEM_DESC.to_string()));
|
||||
assert_eq!(item.content, Some(ITEM_CONT.to_string()));
|
||||
|
||||
}
|
||||
|
||||
// Item Tests
|
||||
#[tokio::test]
|
||||
async fn get_or_create_duplicate() {
|
||||
let adapter = setup_adapter().await;
|
||||
let pool = adapter.get_pool();
|
||||
let channel = setup_channel(pool).await;
|
||||
|
||||
let item1 = Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap();
|
||||
let item2 = Item::get_or_create(pool, channel.key(), ITEM_GUID).await.unwrap();
|
||||
|
||||
assert_eq!(item1.key(), item2.key());
|
||||
}
|
||||
}
|
||||
|
|
@ -25,6 +25,11 @@ pub const FEED_TITLE: &str = "My Feed!";
|
|||
pub const FEED_TITLE2: &str = "My Second Feed!";
|
||||
pub const CHANNEL_TITLE: &str = "My Channel!";
|
||||
pub const CHANNEL_DESC: &str = "My Channel's description";
|
||||
pub const ITEM_GUID: &str = "item-guid";
|
||||
pub const ITEM_GUID2: &str = "item-guid2";
|
||||
pub const ITEM_TITLE: &str = "My Item!";
|
||||
pub const ITEM_DESC: &str = "My Item's description";
|
||||
pub const ITEM_CONT: &str = "The content of my Item";
|
||||
|
||||
pub fn get_datetime() -> DateTime<Utc> {
|
||||
Utc.with_ymd_and_hms(2020,1,1,0,0,0).unwrap()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue