Compare commits

..

No commits in common. "account-creation" and "main" have entirely different histories.

50 changed files with 276 additions and 1200 deletions

5
.gitignore vendored
View file

@ -1,5 +0,0 @@
.env
.pds-data/
target/
tmp/

83
Cargo.lock generated
View file

@ -70,8 +70,8 @@ name = "api"
version = "0.1.0"
dependencies = [
"atproto",
"axum",
"http 1.3.1",
"router",
"serde",
"serde_json",
"tokio",
@ -79,18 +79,6 @@ dependencies = [
"tracing-subscriber",
]
[[package]]
name = "argon2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"
dependencies = [
"base64ct",
"blake2",
"cpufeatures",
"password-hash",
]
[[package]]
name = "async-lock"
version = "3.4.0"
@ -135,7 +123,6 @@ dependencies = [
"time",
"tracing",
"tracing-subscriber",
"unicode-segmentation",
]
[[package]]
@ -295,15 +282,6 @@ dependencies = [
"serde",
]
[[package]]
name = "blake2"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
dependencies = [
"digest",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@ -315,9 +293,9 @@ dependencies = [
[[package]]
name = "bon"
version = "3.6.4"
version = "3.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f61138465baf186c63e8d9b6b613b508cd832cba4ce93cf37ce5f096f91ac1a6"
checksum = "ced38439e7a86a4761f7f7d5ded5ff009135939ecb464a24452eaa4c1696af7d"
dependencies = [
"bon-macros",
"rustversion",
@ -325,9 +303,9 @@ dependencies = [
[[package]]
name = "bon-macros"
version = "3.6.4"
version = "3.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40d1dad34aa19bf02295382f08d9bc40651585bd497266831d40ee6296fb49ca"
checksum = "0ce61d2d3844c6b8d31b2353d9f66cf5e632b3e9549583fe3cac2f4f6136725e"
dependencies = [
"darling",
"ident_case",
@ -684,25 +662,6 @@ dependencies = [
"serde",
]
[[package]]
name = "entryway"
version = "0.1.0"
dependencies = [
"argon2",
"async-trait",
"atproto",
"http 1.3.1",
"router",
"serde",
"serde_json",
"sqlx",
"thiserror 2.0.12",
"time",
"tokio",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "equivalent"
version = "1.0.2"
@ -1615,17 +1574,6 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "password-hash"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
dependencies = [
"base64ct",
"rand_core",
"subtle",
]
[[package]]
name = "pem-rfc7468"
version = "0.7.0"
@ -1865,21 +1813,6 @@ dependencies = [
"zstd",
]
[[package]]
name = "router"
version = "0.1.0"
dependencies = [
"atproto",
"axum",
"bon",
"http 1.3.1",
"serde",
"serde_json",
"tokio",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "rsa"
version = "0.9.8"
@ -2775,12 +2708,6 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unsigned-varint"
version = "0.8.0"

View file

@ -1,13 +1,11 @@
[workspace]
resolver = "3"
members = [ "api", "atproto", "entryway", "db", "router", "ingestor" ]
members = [ "api", "atproto","db", "ingestor"]
[workspace.dependencies]
async-trait = "0.1.88"
atproto = { path = "./atproto" }
db = { path = "./db" }
router = { path = "./router" }
serde = { version = "1.0.219", features = ["derive"] }
serde = "1.0.219"
serde_json = "1.0.140"
sqlx = { version = "0.8.6", features = ["postgres", "runtime-tokio"] }
thiserror = "2.0.12"

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
atproto.workspace = true
router.workspace = true
axum = { version = "0.8.3", features = ["json"] }
http = "1.3.1"
serde.workspace = true
serde_json.workspace = true

View file

@ -1,4 +1,4 @@
use router::{
use crate::router::{
Router,
Endpoint,
xrpc::{

59
api/src/router.rs Normal file
View file

@ -0,0 +1,59 @@
use crate::router::xrpc::{
XrpcEndpoint,
XrpcHandler,
QueryInput,
ProcedureInput,
};
use atproto::Nsid;
use axum::Router as AxumRouter;
use core::net::SocketAddr;
use std::net::{IpAddr, Ipv4Addr};
use tokio::net::TcpListener;
pub struct Router {
addr: SocketAddr,
router: AxumRouter,
}
// In case server ever needs to support more than just XRPC
pub enum Endpoint {
Xrpc(XrpcEndpoint),
}
impl Endpoint {
pub fn new_xrpc_query<Q>(nsid: Nsid, query: Q) -> Self
where
Q: XrpcHandler<QueryInput> + Clone
{
Endpoint::Xrpc(XrpcEndpoint::new_query(nsid,query))
}
pub fn new_xrpc_procedure<P>(nsid: Nsid, procedure: P) -> Self
where
P: XrpcHandler<ProcedureInput> + Clone
{
Endpoint::Xrpc(XrpcEndpoint::new_procedure(nsid,procedure))
}
}
pub mod xrpc;
impl Router {
pub fn new() -> Self {
let mut router = AxumRouter::new();
router = XrpcEndpoint::not_implemented().add_to_router(router);
let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127,0,0,1)), 6702);
Router { router, addr }
}
pub fn add_endpoint(mut self, endpoint: Endpoint) -> Self {
match endpoint {
Endpoint::Xrpc(ep) => self.router = ep.add_to_router(self.router),
};
self
}
pub async fn serve(self) {
let listener = TcpListener::bind(self.addr).await.unwrap();
axum::serve(listener, self.router).await.unwrap();
}
}

View file

@ -1,10 +1,9 @@
use crate::Endpoint;
use std::{
collections::HashMap,
pin::Pin,
future::Future,
};
use atproto::types::Nsid;
use atproto::Nsid;
use axum::{
extract::{
Json,
@ -52,11 +51,8 @@ pub fn response(code: StatusCode, message: &str) -> Response {
error(code, "", message)
}
pub struct QueryInput<S = ()>
where S: Clone + Send + Sync + 'static,
{
pub parameters: HashMap<String, String>,
pub state: S,
pub struct QueryInput {
parameters: HashMap<String, String>,
}
impl<S> FromRequestParts<S> for QueryInput
where
@ -64,28 +60,21 @@ where
{
type Rejection = Response;
async fn from_request_parts(parts: &mut Parts, state: &S)
async fn from_request_parts(parts: &mut Parts, _state: &S)
-> Result<Self, Self::Rejection> {
let query_params: Result<Query<HashMap<String, String>>, QueryRejection> = Query::try_from_uri(&parts.uri);
match query_params {
Ok(p) => Ok(QueryInput { parameters: p.0, state }),
Ok(p) => Ok(QueryInput { parameters: p.0 }),
Err(e) => Err(error(StatusCode::BAD_REQUEST, "Bad Parameters", &e.body_text())),
}
}
}
#[derive(Debug)]
pub struct ProcedureInput<J, S = ()>
where S: Clone + Send + Sync + 'static,
{
pub parameters: HashMap<String, String>,
pub input: J,
pub state: S,
pub struct ProcedureInput {
parameters: HashMap<String, String>,
input: Json<Value>,
}
impl<J, S> FromRequest<S> for ProcedureInput<J, S>
impl<S> FromRequest<S> for ProcedureInput
where
J: for<'de> serde::Deserialize<'de> + Send + 'static,
Bytes: FromRequest<S>,
S: Send + Sync,
{
@ -93,15 +82,19 @@ where
async fn from_request(req: Request, state: &S)
-> Result<Self, Self::Rejection> {
let parameters: HashMap<String, String> = Query::try_from_uri(req.uri())
.map(|p| p.0)
.map_err(|e| error(StatusCode::BAD_REQUEST, "Bad Paramters", &e.body_text()))?;
let query_params: Result<Query<HashMap<String, String>>, QueryRejection> = Query::try_from_uri(req.uri());
let parameters = match query_params {
Ok(p) => p.0,
Err(e) => return Err(error(StatusCode::BAD_REQUEST, "Bad Parameters", &e.body_text())),
};
let input: J = Json::<J>::from_request(req, state).await
.map(|Json(v)| v)
.map_err(|e| error(StatusCode::BAD_REQUEST, "Bad Parameters", &e.body_text()))?;
let json_value = Json::<Value>::from_request(req, state).await;
let input: Json<Value> = match json_value {
Ok(v) => v,
Err(e) => return Err(error(StatusCode::BAD_REQUEST, "Bad Parameters", &e.body_text())),
};
Ok(ProcedureInput { parameters, input, state })
Ok(ProcedureInput { parameters, input })
}
}
@ -119,26 +112,26 @@ where
Box::pin((self)(input))
}
}
impl<J, F, Fut> XrpcHandler<ProcedureInput<J>> for F
impl<F, Fut> XrpcHandler<ProcedureInput> for F
where
F: Fn(ProcedureInput<J>) -> Fut + Send + Sync + 'static,
F: Fn(ProcedureInput) -> Fut + Send + Sync + 'static,
Fut: Future<Output = Response> + Send + 'static,
{
fn call(&self, input: ProcedureInput<J>)
fn call(&self, input: ProcedureInput)
-> Pin<Box<dyn Future<Output = Response>+ Send>> {
Box::pin((self)(input))
}
}
impl XrpcEndpoint {
pub fn new_query<Q, S>(nsid: Nsid, query: Q) -> Self
pub fn new_query<Q>(nsid: Nsid, query: Q) -> Self
where
Q: XrpcHandler<QueryInput> + Clone
{
XrpcEndpoint {
path: Path::Nsid(nsid),
resolver: get(async move | mut parts: Parts, state: &S | -> Response {
match QueryInput<S>::from_request_parts(&mut parts, state).await {
resolver: get(async move | mut parts: Parts | -> Response {
match QueryInput::from_request_parts(&mut parts, &()).await {
Ok(qi) => query.call(qi).await,
Err(e) => e
}
@ -146,15 +139,14 @@ impl XrpcEndpoint {
}
}
pub fn new_procedure<P, J, S>(nsid: Nsid, procedure: P) -> Self
pub fn new_procedure<P>(nsid: Nsid, procedure: P) -> Self
where
P: XrpcHandler<ProcedureInput<J, S>> + Clone,
J: for<'de> serde::Deserialize<'de> + Send + 'static,
P: XrpcHandler<ProcedureInput> + Clone
{
XrpcEndpoint {
path: Path::Nsid(nsid),
resolver: post(async move | req: Request, state: &S | -> Response {
match ProcedureInput::<J, S>::from_request(req, &state).await {
resolver: post(async move | req: Request | -> Response {
match ProcedureInput::from_request(req, &()).await {
Ok(pi) => procedure.call(pi).await,
Err(e) => e
}
@ -162,6 +154,15 @@ impl XrpcEndpoint {
}
}
pub fn add_to_router(self, router: axumRouter) -> axumRouter {
let path = match self.path {
Path::Nsid(nsid) => &("/xrpc/".to_owned() + nsid.as_str()),
Path::NotImplemented => "/xrpc/{*nsid}",
};
router.route(path, self.resolver)
}
pub fn not_implemented() -> Self {
let resolver = (
StatusCode::NOT_IMPLEMENTED,
@ -178,13 +179,3 @@ impl XrpcEndpoint {
}
}
impl Endpoint for XrpcEndpoint {
fn add_to_router(self, router: axumRouter) -> axumRouter {
let path = match self.path {
Path::Nsid(nsid) => &("/xrpc/".to_owned() + &nsid.to_string()),
Path::NotImplemented => "/xrpc/{*nsid}",
};
router.route(path, self.resolver)
}
}

View file

@ -13,7 +13,6 @@ time = { version = "0.3.41", features = ["parsing", "formatting"] }
tracing-subscriber.workspace = true
tracing.workspace = true
thiserror.workspace = true
unicode-segmentation = "1.9.0"
[features]
default = []

View file

@ -20,12 +20,8 @@ pub enum FormatError {
pub enum ParseError {
#[error("Time Parse Error: {0}")]
Datetime(#[from] time::error::Parse),
#[error("Json Parse Error: {0}")]
Serde(#[from] serde_json::error::Error),
#[error("Length of parsed object too long, max: {max:?}, got: {got:?}.")]
Length { max: usize, got: usize },
#[error("Length of parsed object too short, min: {min:?}, got: {got:?}.")]
MinLength { min: usize, got: usize },
#[error("Currently Did is enforced, cannot use handle, {handle:?}")]
ForceDid { handle: String },
#[error("Incorrectly formatted")]

View file

@ -1 +1,3 @@
pub mod myspoor;
// @generated - This file is generated by esquema-codegen (forked from atrium-codegen). DO NOT EDIT.
pub mod record;
pub mod my;

View file

@ -0,0 +1,3 @@
// @generated - This file is generated by esquema-codegen (forked from atrium-codegen). DO NOT EDIT.
//!Definitions for the `my` namespace.
pub mod spoor;

View file

@ -0,0 +1,4 @@
// @generated - This file is generated by esquema-codegen (forked from atrium-codegen). DO NOT EDIT.
//!Definitions for the `my.spoor` namespace.
pub mod content;
pub mod log;

View file

@ -0,0 +1,16 @@
// @generated - This file is generated by esquema-codegen (forked from atrium-codegen). DO NOT EDIT.
//!Definitions for the `my.spoor.log` namespace.
pub mod activity;
pub mod session;
#[derive(Debug)]
pub struct Activity;
impl atrium_api::types::Collection for Activity {
const NSID: &'static str = "my.spoor.log.activity";
type Record = activity::Record;
}
#[derive(Debug)]
pub struct Session;
impl atrium_api::types::Collection for Session {
const NSID: &'static str = "my.spoor.log.session";
type Record = session::Record;
}

View file

@ -1,11 +0,0 @@
use serde::Deserialize;
use crate::types::{BoundString, StrongRef, Uri, Did, Datetime};
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Session {
pub content: StrongRef<Uri>,
pub label: Option<BoundString<64, 640>>,
pub other_participants: Option<Vec<Did>>,
pub created_at: Datetime,
}

View file

@ -1,2 +0,0 @@
pub mod content;
pub mod log;

View file

@ -0,0 +1,65 @@
// @generated - This file is generated by esquema-codegen (forked from atrium-codegen). DO NOT EDIT.
//!A collection of known record types.
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(tag = "$type")]
pub enum KnownRecord {
#[serde(rename = "my.spoor.content.external")]
LexiconsMySpoorContentExternal(
Box<crate::lexicons::my::spoor::content::external::Record>,
),
#[serde(rename = "my.spoor.content.media")]
LexiconsMySpoorContentMedia(Box<crate::lexicons::my::spoor::content::media::Record>),
#[serde(rename = "my.spoor.log.activity")]
LexiconsMySpoorLogActivity(Box<crate::lexicons::my::spoor::log::activity::Record>),
#[serde(rename = "my.spoor.log.session")]
LexiconsMySpoorLogSession(Box<crate::lexicons::my::spoor::log::session::Record>),
}
impl From<crate::lexicons::my::spoor::content::external::Record> for KnownRecord {
fn from(record: crate::lexicons::my::spoor::content::external::Record) -> Self {
KnownRecord::LexiconsMySpoorContentExternal(Box::new(record))
}
}
impl From<crate::lexicons::my::spoor::content::external::RecordData> for KnownRecord {
fn from(
record_data: crate::lexicons::my::spoor::content::external::RecordData,
) -> Self {
KnownRecord::LexiconsMySpoorContentExternal(Box::new(record_data.into()))
}
}
impl From<crate::lexicons::my::spoor::content::media::Record> for KnownRecord {
fn from(record: crate::lexicons::my::spoor::content::media::Record) -> Self {
KnownRecord::LexiconsMySpoorContentMedia(Box::new(record))
}
}
impl From<crate::lexicons::my::spoor::content::media::RecordData> for KnownRecord {
fn from(
record_data: crate::lexicons::my::spoor::content::media::RecordData,
) -> Self {
KnownRecord::LexiconsMySpoorContentMedia(Box::new(record_data.into()))
}
}
impl From<crate::lexicons::my::spoor::log::activity::Record> for KnownRecord {
fn from(record: crate::lexicons::my::spoor::log::activity::Record) -> Self {
KnownRecord::LexiconsMySpoorLogActivity(Box::new(record))
}
}
impl From<crate::lexicons::my::spoor::log::activity::RecordData> for KnownRecord {
fn from(record_data: crate::lexicons::my::spoor::log::activity::RecordData) -> Self {
KnownRecord::LexiconsMySpoorLogActivity(Box::new(record_data.into()))
}
}
impl From<crate::lexicons::my::spoor::log::session::Record> for KnownRecord {
fn from(record: crate::lexicons::my::spoor::log::session::Record) -> Self {
KnownRecord::LexiconsMySpoorLogSession(Box::new(record))
}
}
impl From<crate::lexicons::my::spoor::log::session::RecordData> for KnownRecord {
fn from(record_data: crate::lexicons::my::spoor::log::session::RecordData) -> Self {
KnownRecord::LexiconsMySpoorLogSession(Box::new(record_data.into()))
}
}
impl Into<atrium_api::types::Unknown> for KnownRecord {
fn into(self) -> atrium_api::types::Unknown {
atrium_api::types::TryIntoUnknown::try_into_unknown(&self).unwrap()
}
}

View file

@ -1,7 +1,5 @@
// pub mod lexicons;
pub mod lexicons;
pub mod types;
pub mod error;
#[cfg(feature = "sqlx-support")]
pub mod sqlx;
pub use atrium_api::types::Collection;

View file

@ -1,20 +1,5 @@
use crate::error::{Error, ParseError};
#[macro_export]
macro_rules! basic_deserializer {
($name:ident) => {
impl<'de> serde::de::Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let value: String = serde::de::Deserialize::deserialize(deserializer)?;
value.parse::<$name>().map_err(<D::Error as serde::de::Error>::custom)
}
}
}
}
macro_rules! basic_string_type {
($name:ident, $regex:literal, $max_len:literal) => {
pub struct $name { value: String, }
@ -50,26 +35,20 @@ macro_rules! basic_string_type {
})
}
}
basic_deserializer!($name);
}
}
mod authority;
mod bound_string;
mod cid;
mod datetime;
mod did;
mod record_key;
mod strong_ref;
mod uri;
pub use authority::Authority;
pub use bound_string::BoundString;
pub use cid::Cid;
pub use datetime::Datetime;
pub use did::Did;
mod cid;
pub use cid::Cid;
mod authority;
pub use authority::Authority;
mod datetime;
pub use datetime::Datetime;
mod record_key;
pub use record_key::RecordKey;
pub use strong_ref::StrongRef;
mod uri;
pub use uri::Uri;
basic_string_type!(Handle,
@ -84,3 +63,22 @@ basic_string_type!(Tid,
r"^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$",
13
);
pub struct StrongRef<T> {
content: T,
cid: Cid,
}
impl<T> StrongRef<T> {
pub fn get_content(&self) -> &T {
&self.content
}
pub fn extract_content(self) -> (T, Cid) {
(self.content, self.cid)
}
pub fn get_cid(&self) -> &Cid {
&self.cid
}
}

View file

@ -1,16 +1,12 @@
use crate::{
types::{
Did, Handle
},
types::{Did, Handle},
error::{Error, ParseError},
};
use serde::Deserialize;
use std::{
fmt::{Display, Formatter, Result as FmtResult},
str::FromStr,
};
#[derive(Deserialize)]
pub enum Authority {
Did(Did),
Handle(Handle),

View file

@ -1,57 +0,0 @@
use unicode_segmentation::UnicodeSegmentation;
use crate::error::{Error, ParseError};
use std::{
fmt::{Display, Formatter, Result as FmtResult},
str::FromStr,
};
pub struct BoundString<
const MIN: usize, const MAX: usize>
{
value: String,
}
impl<const MIN: usize, const MAX: usize> BoundString<MIN, MAX> {
fn check_length(s: &str) -> Result<(), Error> {
let grapheme_count: usize = s.graphemes(true).take(MAX + 1).count();
if grapheme_count > MAX {
return Err(Error::Parse {
err: ParseError::Length { max: MAX, got: grapheme_count },
object: "String".to_string(),
});
}
if grapheme_count < MIN {
return Err(Error::Parse {
err: ParseError::MinLength { min: MIN, got: grapheme_count },
object: "String".to_string(),
});
}
Ok(())
}
}
impl<const MIN: usize, const MAX: usize> Display for BoundString<MIN, MAX> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "{}", self.value)
}
}
impl<const MIN: usize, const MAX: usize> FromStr for BoundString<MIN, MAX> {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::check_length(s)?;
Ok(BoundString { value: s.to_string() })
}
}
impl<'de, const MIN: usize, const MAX: usize> serde::de::Deserialize<'de>
for BoundString<MIN, MAX> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let value: String = serde::de::Deserialize::deserialize(deserializer)?;
value.parse::<BoundString<MIN,MAX>>().map_err(<D::Error as serde::de::Error>::custom)
}
}

View file

@ -1,7 +1,4 @@
use crate::{
basic_deserializer,
error::Error
};
use crate::error::Error;
pub struct Cid { value: String, }
@ -17,5 +14,3 @@ impl std::str::FromStr for Cid {
Ok(Self { value: s.to_string() })
}
}
basic_deserializer!(Cid);

View file

@ -1,7 +1,4 @@
use crate::{
basic_deserializer,
error::{Error, ParseError, FormatError},
};
use crate::error::{Error, ParseError, FormatError};
use time::{
UtcDateTime,
format_description::well_known::{Rfc3339, Iso8601},
@ -64,5 +61,3 @@ impl FromStr for Datetime {
Ok(Datetime { time: datetime, derived_string: s.to_string() })
}
}
basic_deserializer!(Datetime);

View file

@ -1,7 +1,4 @@
use crate::{
basic_deserializer,
error::{Error, ParseError},
};
use crate::error::{Error, ParseError};
use std::{
fmt::{Display, Formatter, Result as FmtResult},
str::FromStr,
@ -34,8 +31,6 @@ impl FromStr for DidMethod {
}
}
basic_deserializer!(DidMethod);
pub struct Did {
method: DidMethod,
identifier: String,
@ -75,5 +70,3 @@ impl FromStr for Did {
})
}
}
basic_deserializer!(Did);

View file

@ -61,5 +61,3 @@ impl FromStr for RecordKey {
Ok(RecordKey::Any(s.to_string()))
}
}
basic_deserializer!(RecordKey);

View file

@ -1,46 +0,0 @@
use crate::{
basic_deserializer,
types::{Cid, Uri},
error::{Error, ParseError},
};
pub struct StrongRef<T> {
content: T,
cid: Cid,
}
impl StrongRef<Uri> {
pub fn from_atrium_api(strong_ref: atrium_api::com::atproto::repo::strong_ref::MainData) -> Result<Self, Error> {
let str_cid = serde_json::to_string(&strong_ref.cid).map_err(|e| {
Error::Parse { err: ParseError::Serde(e), object: "Uri".to_string() }
})?;
Ok(Self {
content: strong_ref.uri.parse::<Uri>()?,
cid: str_cid.parse::<Cid>()?,
})
}
}
impl<T> StrongRef<T> {
pub fn map_content<U, F>(self, f: F) -> StrongRef<U>
where
F: FnOnce(T) -> U,
{
StrongRef {
content: f(self.content),
cid: self.cid,
}
}
pub fn get_content(&self) -> &T {
&self.content
}
pub fn extract_content(self) -> (T, Cid) {
(self.content, self.cid)
}
pub fn get_cid(&self) -> &Cid {
&self.cid
}
}

View file

@ -1,5 +1,4 @@
use crate::{
basic_deserializer,
types::{Did, Authority, Nsid, RecordKey},
error::{Error, ParseError},
};
@ -34,78 +33,47 @@ impl Display for Uri {
impl FromStr for Uri {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::check_length(s)?;
let Some((
_whole, unchecked_authority, unchecked_collection, unchecked_rkey
)): Option<(&str, &str, &str, &str)> = regex_captures!(
r"/^at:\/\/([\w\.\-_~:]+)(?:\/([\w\.\-_~:]+)(?:)\/([\w\.\-_~:]+))?$/i",
s,
) else {
return Err(Error::Parse {
err: ParseError::Format,
object: "Uri".to_string(),
});
};
let did = Self::check_authority(unchecked_authority.to_string())?;
let collection = if unchecked_collection.is_empty() { None }
else { Some(Self::check_collection(unchecked_collection.to_string())?) };
let rkey = if unchecked_rkey.is_empty() { None }
else { Some(Self::check_rkey(unchecked_rkey.to_string())?) };
Ok(Uri { authority: did, collection, rkey })
}
}
impl Uri {
pub fn from_components(
authority_str: String, collection_str: Option<String>,
rkey_str: Option<String>
) -> Result<Self, Error> {
let authority = Self::check_authority(authority_str)?;
let collection = collection_str.map(Self::check_collection).transpose()?;
let rkey = rkey_str.map(Self::check_rkey).transpose()?;
let uri = Uri { authority, collection, rkey };
Self::check_length(&uri.to_string())?;
Ok(uri)
}
fn check_length(s: &str) -> Result<(), Error> {
if s.len() > 8000 {
return Err(Error::Parse {
err: ParseError::Length { max: 8000, got: s.len() },
object: "Did".to_string(),
});
}
Ok(())
}
fn check_authority(authority: String) -> Result<Did, Error> {
Ok(match Authority::from_str(&authority)? {
let Some((
_whole, unchecked_authority, unchecked_collection, unchecked_rkey
)) = regex_captures!(
r"/^at:\/\/([\w\.\-_~:]+)(?:\/([\w\.\-_~:]+)(?:)\/([\w\.\-_~:]+))?$/i",
s,
) else {
return Err(Error::Parse {
err: ParseError::Format,
object: "Uri".to_string(),
});
};
let did = match Authority::from_str(unchecked_authority)? {
Authority::Handle(h) =>
return Err(Error::Parse {
err: ParseError::ForceDid { handle: h.to_string() },
object: "Uri".to_string(),
}),
Authority::Did(d) => d,
})
}
};
fn check_collection(collection: String) -> Result<Nsid, Error> {
Ok(collection.parse::<Nsid>()?)
}
let collection = if unchecked_collection.is_empty() { None }
else { Some(unchecked_collection.parse::<Nsid>()?) };
fn check_rkey(rkey: String) -> Result<RecordKey, Error> {
Ok(rkey.parse::<RecordKey>()?)
}
let rkey = if unchecked_rkey.is_empty() { None }
else { Some(unchecked_rkey.parse::<RecordKey>()?) };
Ok(Uri { authority: did, collection, rkey })
}
}
impl Uri {
pub fn authority_as_did(&self) -> &Did {
&self.authority
}
}
basic_deserializer!(Uri);

View file

@ -1,19 +0,0 @@
[package]
name = "entryway"
version = "0.1.0"
edition = "2024"
[dependencies]
atproto = { workspace = true, features = ["sqlx-support"] }
router.workspace = true
http = "1.3.1"
serde.workspace = true
serde_json.workspace = true
tokio.workspace = true
tracing-subscriber.workspace = true
tracing.workspace = true
async-trait.workspace = true
sqlx.workspace = true
thiserror.workspace = true
argon2 = "0.5"
time = { version = "0.3", features = ["formatting", "macros"] }

View file

@ -1,23 +0,0 @@
-- PDS Entryway Account Management Schema
-- Minimal schema for account creation and authentication
-- Actor table - stores public identity information
CREATE TABLE actor (
did VARCHAR PRIMARY KEY,
handle VARCHAR,
created_at VARCHAR NOT NULL
);
-- Case-insensitive unique index on handle
CREATE UNIQUE INDEX actor_handle_lower_idx ON actor (LOWER(handle));
-- Account table - stores private authentication data
CREATE TABLE account (
did VARCHAR PRIMARY KEY,
email VARCHAR NOT NULL,
password_scrypt VARCHAR NOT NULL,
email_confirmed_at VARCHAR
);
-- Case-insensitive unique index on email
CREATE UNIQUE INDEX account_email_lower_idx ON account (LOWER(email));

View file

@ -1,23 +0,0 @@
-- PDS Entryway Account Management Schema
-- Minimal schema for account creation and authentication
-- Actor table - stores public identity information
CREATE TABLE actor (
did VARCHAR PRIMARY KEY,
handle VARCHAR,
created_at VARCHAR NOT NULL
);
-- Case-insensitive unique index on handle
CREATE UNIQUE INDEX actor_handle_lower_idx ON actor (LOWER(handle));
-- Account table - stores private authentication data
CREATE TABLE account (
did VARCHAR PRIMARY KEY,
email VARCHAR NOT NULL,
password_scrypt VARCHAR NOT NULL,
email_confirmed_at VARCHAR
);
-- Case-insensitive unique index on email
CREATE UNIQUE INDEX account_email_lower_idx ON account (LOWER(email));

View file

@ -1,13 +0,0 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Database connection error: {0}")]
Connection(#[from] sqlx::Error),
#[error("Handle already taken: {0}")]
HandleTaken(String),
#[error("Email already taken: {0}")]
EmailTaken(String),
}

View file

@ -1,5 +0,0 @@
pub mod error;
pub mod operations;
pub use error::DatabaseError;
pub use operations::Database;

View file

@ -1,82 +0,0 @@
use sqlx::{Pool, Postgres};
use atproto::types::{
Handle
Did
};
use crate::database::DatabaseError;
pub struct Database {
pool: Pool<Postgres>,
}
impl Database {
pub fn new(pool: Pool<Postgres>) -> Self {
Self { pool }
}
// Account availability checking
pub async fn check_handle_available(&self, handle: &Handle) -> Result<bool, DatabaseError> {
let count = sqlx::query_scalar!(
"SELECT COUNT(*) FROM actor WHERE LOWER(handle) = LOWER($1)",
handle
)
.fetch_one(&self.pool)
.await?;
Ok(count.unwrap_or(0) == 0)
}
pub async fn check_email_available(&self, email: &str) -> Result<bool, DatabaseError> {
let count = sqlx::query_scalar!(
"SELECT COUNT(*) FROM account WHERE LOWER(email) = LOWER($1)",
email
)
.fetch_one(&self.pool)
.await?;
Ok(count.unwrap_or(0) == 0)
}
// Account creation
pub async fn create_account(
&self,
did: &Did,
handle: &Handle,
email: &str,
password_hash: &str,
created_at: &str,
) -> Result<(), DatabaseError> {
// Use a transaction to ensure both actor and account records are created together
let mut tx = self.pool.begin().await?;
// Insert into actor table
sqlx::query!(
r#"
INSERT INTO actor (did, handle, created_at)
VALUES ($1, $2, $3)
"#,
did,
handle,
created_at
)
.execute(&mut *tx)
.await?;
// Insert into account table
sqlx::query!(
r#"
INSERT INTO account (did, email, password_scrypt)
VALUES ($1, $2, $3)
"#,
did,
email,
password_hash
)
.execute(&mut *tx)
.await?;
tx.commit().await?;
Ok(())
}
}

View file

@ -1,64 +0,0 @@
use router::{
Router,
xrpc::XrpcEndpoint,
};
use atproto::types::Nsid;
use sqlx::{Pool, Postgres};
use std::env;
use tracing::{event, Level};
mod xrpc;
mod database;
use xrpc::create_account;
use database::Database;
struct Config {
entryway_url: String,
entryway_did: String,
entryway_plc_rotation_key: String,
entryway_jwt_key_256_hex: String,
}
#[tokio::main]
async fn main() {
let subscriber = tracing_subscriber::FmtSubscriber::new();
let _ = tracing::subscriber::set_global_default(subscriber);
// Set up database connection
let database_url = env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://localhost/entryway_dev".to_string());
event!(Level::INFO, "Connecting to database: {}", database_url);
let pool = match sqlx::postgres::PgPoolOptions::new()
.max_connections(5)
.connect(&database_url)
.await
{
Ok(pool) => pool,
Err(e) => {
event!(Level::ERROR, "Failed to connect to database: {}", e);
std::process::exit(1);
}
};
let database = Database::new(pool);
// Run migrations
if let Err(e) = database.run_migrations().await {
event!(Level::ERROR, "Failed to run migrations: {}", e);
std::process::exit(1);
}
event!(Level::INFO, "Database setup complete");
// TODO: Wire up database to XRPC handlers
// For now, keeping the existing router setup
let mut router = Router::new();
let create_account_nsid: Nsid = "com.atproto.server.createAccount".parse::<Nsid>().expect("valid nsid");
router = router.add_endpoint(XrpcEndpoint::not_implemented());
router = router.add_endpoint(XrpcEndpoint::new_procedure(create_account_nsid, create_account));
router.serve().await;
}

View file

@ -1,258 +0,0 @@
use router::xrpc::{ProcedureInput, Response, error};
use serde::{Deserialize, Serialize};
use http::status::StatusCode;
use tracing::{event, instrument, Level};
use atproto::types::Handle;
use std::str::FromStr;
use argon2::{Argon2, PasswordHasher, password_hash::{rand_core::OsRng, SaltString}};
use time::OffsetDateTime;
use crate::database::{Database, DatabaseError};
#[derive(Deserialize, Debug)]
pub struct CreateAccountInput {
pub email: Option<String>,
pub handle: String,
pub did: Option<String>,
pub invite_code: Option<String>,
pub verification_code: Option<String>,
pub verification_phone: Option<String>,
pub password: Option<String>,
pub recovery_key: Option<String>,
pub plc_op: Option<String>,
}
#[derive(Serialize, Debug)]
pub struct CreateAccountResponse {
pub handle: String,
pub did: String,
// pub did_doc: Option<DidDocument>, // TODO: Define DidDocument type
pub access_jwt: String,
pub refresh_jwt: String,
}
#[instrument]
pub async fn create_account(data: ProcedureInput<CreateAccountInput>) -> Response {
event!(Level::INFO, "Creating account for handle: {}", data.input.handle);
// TODO: Get database from context/config
// For now, this won't compile but shows the intended flow
// 1. Input validation
let validated_input = match validate_inputs(&data.input).await {
Ok(input) => input,
Err(err) => return err,
};
// 2. Check handle and email availability
// if let Err(err) = check_availability(&database, &validated_input).await {
// return err;
// }
// 3. Generate DID (placeholder for now)
let did = generate_placeholder_did(&validated_input.handle).await;
// 4. Hash password if provided
let password_hash = if let Some(password) = &validated_input.password {
match hash_password(password) {
Ok(hash) => Some(hash),
Err(_) => {
return error(
StatusCode::INTERNAL_SERVER_ERROR,
"InternalServerError",
"Failed to hash password"
);
}
}
} else {
None
};
// 5. Create account in database
let created_at = OffsetDateTime::now_utc().format(&time::format_description::well_known::Iso8601::DEFAULT)
.unwrap_or_else(|_| "unknown".to_string());
// if let Err(err) = create_account_in_db(&database, &did, &validated_input, password_hash.as_deref(), &created_at).await {
// return convert_db_error_to_response(err);
// }
// 6. Generate session tokens (placeholder for now)
let credentials = Credentials {
access_jwt: "placeholder_access_token".to_string(),
refresh_jwt: "placeholder_refresh_token".to_string(),
};
// Return success response
let response = CreateAccountResponse {
handle: validated_input.handle.clone(),
did: did.clone(),
access_jwt: credentials.access_jwt,
refresh_jwt: credentials.refresh_jwt,
};
event!(Level::INFO, "Account created successfully for DID: {} with handle: {}", did, validated_input.handle);
// TODO: Replace with proper JSON response encoding
error(StatusCode::OK, "success", "Account created successfully")
}
// Maximum password length (matches atproto TypeScript implementation)
const NEW_PASSWORD_MAX_LENGTH: usize = 256;
// TODO: Implement these helper functions
async fn validate_inputs(input: &CreateAccountInput) -> Result<ValidatedInput, Response> {
// Based on validateInputsForLocalPds in the TypeScript version
// Validate email is provided and has basic format
let email = match &input.email {
Some(e) if !e.is_empty() => e.clone(),
_ => {
return Err(error(
StatusCode::BAD_REQUEST,
"InvalidRequest",
"Email is required"
));
}
};
// Validate email format (basic validation for now)
// TODO: Improve email validation - add proper RFC validation and disposable email checking
// TypeScript version uses @hapi/address for validation and disposable-email-domains-js for disposable check
if !is_valid_email(&email) {
return Err(error(
StatusCode::BAD_REQUEST,
"InvalidRequest",
"This email address is not supported, please use a different email."
));
}
// Validate password length if provided
if let Some(password) = &input.password {
if password.len() > NEW_PASSWORD_MAX_LENGTH {
return Err(error(
StatusCode::BAD_REQUEST,
"InvalidRequest",
&format!("Password too long. Maximum length is {} characters.", NEW_PASSWORD_MAX_LENGTH)
));
}
}
// Validate and normalize handle using atproto types
let handle = Handle::from_str(&input.handle).map_err(|_| {
error(
StatusCode::BAD_REQUEST,
"InvalidRequest",
"Invalid handle format"
)
})?;
// TODO: Invite codes - not supported for now but leave placeholder
if input.invite_code.is_some() {
event!(Level::INFO, "Invite codes not yet supported, ignoring");
}
Ok(ValidatedInput {
handle: handle.to_string(),
email: email.to_lowercase(), // Normalize email to lowercase
password: input.password.clone(),
invite_code: input.invite_code.clone(),
})
}
// Basic email validation - checks for @ and . in reasonable positions
// TODO: Replace with proper email validation library like email-address crate
fn is_valid_email(email: &str) -> bool {
// Very basic email validation
let at_pos = email.find('@');
let last_dot_pos = email.rfind('.');
match (at_pos, last_dot_pos) {
(Some(at), Some(dot)) => {
// @ must come before the last dot
// Must have content before @, between @ and dot, and after dot
at > 0 && dot > at + 1 && dot < email.len() - 1
}
_ => false,
}
}
async fn check_availability(database: &Database, input: &ValidatedInput) -> Result<(), Response> {
// Check that handle and email are not already taken
match database.check_handle_available(&input.handle).await {
Ok(false) => {
return Err(error(
StatusCode::BAD_REQUEST,
"InvalidRequest",
&format!("Handle already taken: {}", input.handle)
));
}
Err(err) => {
event!(Level::ERROR, "Database error checking handle availability: {:?}", err);
return Err(error(
StatusCode::INTERNAL_SERVER_ERROR,
"InternalServerError",
"Database error"
));
}
_ => {}
}
match database.check_email_available(&input.email).await {
Ok(false) => {
return Err(error(
StatusCode::BAD_REQUEST,
"InvalidRequest",
&format!("Email already taken: {}", input.email)
));
}
Err(err) => {
event!(Level::ERROR, "Database error checking email availability: {:?}", err);
return Err(error(
StatusCode::INTERNAL_SERVER_ERROR,
"InternalServerError",
"Database error"
));
}
_ => {}
}
Ok(())
}
async fn generate_placeholder_did(handle: &str) -> String {
// TODO: Replace with actual DID generation (did:plc)
// For now, generate a placeholder DID based on handle
format!("did:placeholder:{}", handle.replace(".", "-"))
}
fn hash_password(password: &str) -> Result<String, argon2::password_hash::Error> {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
let password_hash = argon2.hash_password(password.as_bytes(), &salt)?;
Ok(password_hash.to_string())
}
async fn create_account_in_db(
database: &Database,
did: &str,
input: &ValidatedInput,
password_hash: Option<&str>,
created_at: &str,
) -> Result<(), DatabaseError> {
let hash = password_hash.unwrap_or(""); // Empty hash if no password
database.create_account(did, &input.handle, &input.email, hash, created_at).await
}
#[derive(Debug)]
struct ValidatedInput {
handle: String,
email: String,
password: Option<String>,
invite_code: Option<String>,
}
#[derive(Debug)]
struct Credentials {
access_jwt: String,
refresh_jwt: String,
}

View file

@ -1,3 +0,0 @@
pub mod create_account;
pub use create_account::create_account;

24
flake.lock generated
View file

@ -2,18 +2,16 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1752436162,
"narHash": "sha256-Kt1UIPi7kZqkSc5HVj6UY5YLHHEzPBkgpNUByuyxtlw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "dfcd5b901dbab46c9c6e80b265648481aafb01f8",
"type": "github"
"lastModified": 1735563628,
"narHash": "sha256-OnSAY7XDSx7CtDoqNh8jwVwh4xNL/2HaJxGjryLWzX8=",
"rev": "b134951a4c9f3c995fd7be05f3243f8ecd65d798",
"revCount": 637546,
"type": "tarball",
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2405.637546%2Brev-b134951a4c9f3c995fd7be05f3243f8ecd65d798/01941dc2-2ab2-7453-8ebd-88712e28efae/source.tar.gz"
},
"original": {
"owner": "NixOS",
"ref": "nixos-25.05",
"repo": "nixpkgs",
"type": "github"
"type": "tarball",
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.2405.%2A.tar.gz"
}
},
"nixpkgs_2": {
@ -43,11 +41,11 @@
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1752547600,
"narHash": "sha256-0vUE42ji4mcCvQO8CI0Oy8LmC6u2G4qpYldZbZ26MLc=",
"lastModified": 1749695868,
"narHash": "sha256-debjTLOyqqsYOUuUGQsAHskFXH5+Kx2t3dOo/FCoNRA=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "9127ca1f5a785b23a2fc1c74551a27d3e8b9a28b",
"rev": "55f914d5228b5c8120e9e0f9698ed5b7214d09cd",
"type": "github"
},
"original": {

View file

@ -3,15 +3,13 @@
# Flake inputs
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.2405.*.tar.gz";
rust-overlay.url = "github:oxalica/rust-overlay"; # A helper for Rust + Nix
};
# Flake outputs
outputs = { self, nixpkgs, rust-overlay }:
let
pdsDirectory = "/home/pan/prog/atproto/appview";
# Overlays enable you to customize the Nixpkgs attribute set
overlays = [
# Makes a `rust-bin` attribute available in Nixpkgs
@ -35,88 +33,19 @@
forAllSystems = f: nixpkgs.lib.genAttrs allSystems (system: f {
pkgs = import nixpkgs { inherit overlays system; };
});
# Systemd service configuration
createSystemdService = pkgs: pdsDir: pkgs.writeTextFile {
name = "pds.service";
text = ''
[Unit]
Description=Development Environment Service
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=${pkgs.pds}/bin/pds
WorkingDirectory=${pdsDir}
EnvironmentFile=${pdsDir}/.env
Environment=PDS_DATA_DIRECTORY=${pdsDir}/.pds-data
Environment=PDS_BLOBSTORE_DISK_LOCATION=${pdsDir}/.pds-data/blocks
'';
};
# Scripts for managing the systemd service
createServiceScripts = pkgs: pdsDir:
let
serviceFile = createSystemdService pkgs pdsDir;
serviceName = "pds";
in {
startScript = pkgs.writeShellScript "start-dev-service" ''
set -e
# Create user systemd directory if it doesn't exist
mkdir -p ~/.config/systemd/user
# Copy service file
cp -f ${serviceFile} ~/.config/systemd/user/${serviceName}.service
# Reload systemd and start service
systemctl --user daemon-reload
systemctl --user start ${serviceName}
systemctl --user enable ${serviceName}
systemctl --user status ${serviceName} --no-pager
'';
stopScript = pkgs.writeShellScript "stop-dev-service" ''
set -e
if systemctl --user is-enabled --quiet ${serviceName}; then
# Stop and disable service
systemctl --user stop ${serviceName} || true
systemctl --user disable ${serviceName} || true
# Remove service file
rm -f ~/.config/systemd/user/${serviceName}.service
# Reload systemd
systemctl --user daemon-reload
fi
'';
};
in
{
# Development environment output
devShells = forAllSystems ({ pkgs }:
let
scripts = createServiceScripts pkgs pdsDirectory;
in {
default = pkgs.mkShell {
# The Nix packages provided in the environment
packages = (with pkgs; [
# The package provided by our custom overlay. Includes cargo, Clippy, cargo-fmt,
# rustdoc, rustfmt, and other tools.
sqlx-cli
rustToolchain
]) ++ pkgs.lib.optionals pkgs.stdenv.isDarwin (with pkgs; [ libiconv ]);
shellHook = pkgs.lib.optionalString pkgs.stdenv.isLinux ''
# Cleanup
${scripts.stopScript}
# Start the systemd service
${scripts.startScript}
'';
};
});
devShells = forAllSystems ({ pkgs }: {
default = pkgs.mkShell {
# The Nix packages provided in the environment
packages = (with pkgs; [
# The package provided by our custom overlay. Includes cargo, Clippy, cargo-fmt,
# rustdoc, rustfmt, and other tools.
sqlx-cli
rustToolchain
]) ++ pkgs.lib.optionals pkgs.stdenv.isDarwin (with pkgs; [ libiconv ]);
};
});
};
}

View file

@ -1,15 +0,0 @@
[package]
name = "router"
version = "0.1.0"
edition = "2024"
[dependencies]
atproto.workspace = true
axum = { version = "0.8.3", features = ["json"] }
bon = "3.6.4"
http = "1.3.1"
serde.workspace = true
serde_json.workspace = true
tokio.workspace = true
tracing-subscriber.workspace = true
tracing.workspace = true

View file

@ -1,50 +0,0 @@
use crate::xrpc::XrpcEndpoint;
use axum::Router as AxumRouter;
use core::net::SocketAddr;
use std::net::{IpAddr, Ipv4Addr};
use tokio::net::TcpListener;
pub mod xrpc;
pub mod wellknown;
pub enum Error {}
pub struct Router {
addr: SocketAddr,
router: AxumRouter,
}
impl Default for Router {
fn default() -> Self {
Self::new()
}
}
impl<S> Router<S>
where
S: Clone + Send + Sync + 'static,
{
pub fn new() -> Self {
let router = AxumRouter::new();
let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127,0,0,1)), 6702);
Router { router, addr }
}
pub fn with_state<S2>(mut self, state: S) -> Router<S2> {
self.router = self.with_state(S);
self
}
pub fn add_endpoint<E: Endpoint>(mut self, endpoint: E) -> Self {
self.router = endpoint.add_to_router(self.router);
self
}
pub async fn serve(self) {
let listener = TcpListener::bind(self.addr).await.unwrap();
axum::serve(listener, self.router).await.unwrap();
}
}
pub trait Endpoint {
fn add_to_router(self, router: AxumRouter) -> AxumRouter;
}

View file

@ -1,19 +0,0 @@
use crate::Endpoint;
use axum::{
routing::method_routing::MethodRouter,
Router as axumRouter,
};
pub mod atproto;
pub mod oauth;
trait WellKnownEndpoint {
fn get_known_route(&self) -> String;
fn get_resolver(self) -> MethodRouter;
}
impl<WK: WellKnownEndpoint> Endpoint for WK {
fn add_to_router(self, router: axumRouter) -> axumRouter {
router.route(&format!(".well-known/{}", self.get_known_route()), self.get_resolver())
}
}

View file

@ -1 +0,0 @@
pub mod handle_resolution;

View file

@ -1,52 +0,0 @@
use crate::{
wellknown::WellKnownEndpoint,
Error,
};
use atproto::types::{Handle, Did};
use axum::{
routing::{
method_routing::MethodRouter,
get,
},
http::{
StatusCode,
HeaderMap,
},
};
pub struct HandleResolutionEndpoint {
resolver: MethodRouter,
}
impl HandleResolutionEndpoint {
pub fn new<HR>(handle_resolver: HR) -> Self where
HR: HandleResolver + Clone
{
HandleResolutionEndpoint {
resolver: get(async move | headers: HeaderMap | -> (StatusCode, String) {
let Some(Ok(hostname)) = headers.get("host").map(|header_value| {
header_value.to_str()
}) else {
return (StatusCode::INTERNAL_SERVER_ERROR, String::from("Internal Server Error"));
};
let Ok(valid_handle) = hostname.parse::<atproto::types::Handle>() else {
return (StatusCode::NOT_FOUND, String::from("User not found"));
};
match handle_resolver.call(valid_handle) {
Ok(Some(did)) => (StatusCode::OK, did.to_string()),
Ok(None) => (StatusCode::NOT_FOUND, String::from("User not found")),
Err(_) => (StatusCode::INTERNAL_SERVER_ERROR, String::from("Internal Server Error")),
}
})
}
}
}
pub trait HandleResolver: Send + Sync + 'static {
fn call(&self, handle: Handle) -> Result<Option<Did>, Error>;
}
impl WellKnownEndpoint for HandleResolutionEndpoint {
fn get_known_route(&self) -> String { String::from("atproto-did") }
fn get_resolver(self) -> MethodRouter { self.resolver }
}

View file

@ -1 +0,0 @@
pub mod authorization_server;

View file

@ -1,103 +0,0 @@
use serde::de::{
Error,
Unexpected,
};
use serde_json::{
json,
Result,
Value
};
use bon::Builder;
trait Metadata {
fn format_metadata(self, required: RequiredMetadata) -> Result<Value>;
}
pub struct RequiredMetadata {
issuer: String,
authorization_endpoint: String,
token_endpoint: String,
}
impl RequiredMetadata {
fn new(
issuer: String,
authorization_endpoint: String,
token_endpoint: String
) -> Self {
RequiredMetadata {
issuer, authorization_endpoint, token_endpoint
}
}
}
#[derive(Builder)]
struct AtprotoMetadata {
additional_response_types_supported: Option<Vec<String>>,
additional_grant_types_supported: Option<Vec<String>>,
additional_code_challenge_methods_supported: Option<Vec<String>>,
additional_token_endpoint_auth_methods_supported: Option<Vec<String>>,
additional_token_endpoint_auth_signing_alg_values_supported: Option<Vec<String>>,
additional_scopes_supported: Option<Vec<String>>,
pushed_authorization_request_endpoint: String,
additional_dpop_signing_alg_values_supported: Option<Vec<String>>,
}
impl AtprotoMetadata {
fn check_fields(&self) -> Result<()> {
// TODO: Issuer check (https scheme, no default port, no path segments
if self.additional_token_endpoint_auth_signing_alg_values_supported
.as_ref()
.is_none_or(|vec| vec.iter().any(|s| s == "none")) {
return Err(Error::invalid_value(
Unexpected::Other("\"none\" in token_endpoint_auth_signing_alg_values_supported"),
&"\"none\" to be omitted from token_endpoint_auth_signing_alg_values_supported"
));
}
Ok(())
}
}
impl Metadata for AtprotoMetadata {
fn format_metadata(self, required: RequiredMetadata) -> Result<Value> {
self.check_fields()?;
Ok(json!({
"issuer": required.issuer,
"authorization_endpoint": required.authorization_endpoint,
"token_endpoint": required.token_endpoint,
"response_types_supported":
self.additional_response_types_supported.unwrap_or_default()
.extend(["code".to_string()]),
"grant_types_supported":
self.additional_grant_types_supported.unwrap_or_default()
.extend([
"authorization_code".to_string(),
"refresh_token".to_string()
]),
"code_challenge_methods_supported":
self.additional_code_challenge_methods_supported.unwrap_or_default()
.extend(["S256".to_string()]),
"token_endpoint_auth_methods_supported":
self.additional_token_endpoint_auth_methods_supported.unwrap_or_default()
.extend([
"none".to_string(),
"private_key_jwt".to_string()
]),
"token_endpoint_auth_signing_alg_values_supported":
self.additional_token_endpoint_auth_signing_alg_values_supported.unwrap_or_default()
.extend(["ES256".to_string()]),
"scopes_supported":
self.additional_scopes_supported.unwrap_or_default()
.extend(["atproto".to_string()]),
"authorization_response_iss_parameter_supported": true,
"require_pushed_authorization_requests": true,
"pushed_authorization_request_endpoint": self.pushed_authorization_request_endpoint,
"dpop_signing_alg_values_supported":
self.additional_dpop_signing_alg_values_supported.unwrap_or_default()
.extend(["ES256".to_string()]),
"client_id_metadata_document_supported": true,
}))
}
}