Separate crates for separate concerns

This commit is contained in:
lewis
2026-01-10 21:13:41 +02:00
parent cae667d219
commit 8f595b5ffb
266 changed files with 2188 additions and 686 deletions

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT t.token FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "token",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "05fd99170e31e68fa5028c862417cdf535cd70e09fde0a8a28249df0070eb2fc"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE users SET deactivated_at = $1 WHERE did = $2",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Timestamptz",
"Text"
]
},
"nullable": []
},
"hash": "0710b57fb9aa933525f617b15e6e2e5feaa9c59c38ec9175568abdacda167107"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT body FROM comms_queue WHERE user_id = (SELECT id FROM users WHERE did = $1) AND comms_type = 'email_update' ORDER BY created_at DESC LIMIT 1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "body",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "0ec60bb854a4991d0d7249a68f7445b65c8cc8c723baca221d85f5e4f2478b99"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) FROM comms_queue WHERE status = 'pending' AND user_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "24a7686c535e4f0332f45daa20cfce2209635090252ac3692823450431d03dc6"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE users SET password_reset_code_expires_at = NOW() - INTERVAL '1 hour' WHERE email = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "29ef76852bb89af1ab9e679ceaa4abcf8bc8268a348d3be0da9840d1708d20b5"
}

View File

@@ -0,0 +1,54 @@
{
"db_name": "PostgreSQL",
"query": "SELECT subject, body, comms_type as \"comms_type: String\" FROM comms_queue WHERE user_id = $1 AND comms_type = 'admin_email' ORDER BY created_at DESC LIMIT 1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "subject",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "body",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "comms_type: String",
"type_info": {
"Custom": {
"name": "comms_type",
"kind": {
"Enum": [
"welcome",
"email_verification",
"password_reset",
"email_update",
"account_deletion",
"admin_email",
"plc_operation",
"two_factor_code",
"channel_verification",
"passkey_recovery",
"legacy_login_alert",
"migration_verification"
]
}
}
}
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
true,
false,
false
]
},
"hash": "4445cc86cdf04894b340e67661b79a3c411917144a011f50849b737130b24dbe"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id FROM users WHERE email = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "4560c237741ce9d4166aecd669770b3360a3ac71e649b293efb88d92c3254068"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT body, metadata FROM comms_queue WHERE user_id = $1 AND comms_type = 'channel_verification' ORDER BY created_at DESC LIMIT 1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "body",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "metadata",
"type_info": "Jsonb"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true
]
},
"hash": "4649e8daefaf4cfefc5cb2de8b3813f13f5892f653128469be727b686e6a0f0a"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT token, expires_at FROM account_deletion_requests WHERE did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "token",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "expires_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false
]
},
"hash": "47fe4a54857344d8f789f37092a294cd58f64b4fb431b54b5deda13d64525e88"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT email_verified FROM users WHERE did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "email_verified",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "49cbc923cc4a0dcf7dea4ead5ab9580ff03b717586c4ca2d5343709e2dac86b6"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT k.key_bytes, k.encryption_version\n FROM user_keys k\n JOIN users u ON k.user_id = u.id\n WHERE u.did = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "key_bytes",
"type_info": "Bytea"
},
{
"ordinal": 1,
"name": "encryption_version",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
true
]
},
"hash": "5a016f289caf75177731711e56e92881ba343c73a9a6e513e205c801c5943ec0"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT body FROM comms_queue WHERE user_id = $1 AND comms_type = 'email_update' ORDER BY created_at DESC LIMIT 1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "body",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "5a036d95feedcbe6fb6396b10a7b4bd6a2eedeefda46a23e6a904cdbc3a65d45"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT subject FROM comms_queue WHERE user_id = $1 AND comms_type = 'admin_email' AND body = 'Email without subject' LIMIT 1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "subject",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
true
]
},
"hash": "785a864944c5939331704c71b0cd3ed26ffdd64f3fd0f26ecc28b6a0557bbe8f"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT t.token\n FROM plc_operation_tokens t\n JOIN users u ON t.user_id = u.id\n WHERE u.did = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "token",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "7caa8f9083b15ec1209dda35c4c6f6fba9fe338e4a6a10636b5389d426df1631"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT t.token, t.expires_at FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "token",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "expires_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false
]
},
"hash": "82717b6f61cd79347e1ca7e92c4413743ba168d1e0d8b85566711e54d4048f81"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT body FROM comms_queue WHERE user_id = (SELECT id FROM users WHERE did = $1) AND comms_type = 'email_verification' ORDER BY created_at DESC LIMIT 1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "body",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "9ad422bf3c43e3cfd86fc88c73594246ead214ca794760d3fe77bb5cf4f27be5"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT did, public_key_did_key FROM reserved_signing_keys WHERE public_key_did_key = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "did",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "public_key_did_key",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true,
false
]
},
"hash": "9b035b051769e6b9d45910a8bb42ac0f84c73de8c244ba4560f004ee3f4b7002"
}

View File

@@ -0,0 +1,108 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n id, user_id, recipient, subject, body,\n channel as \"channel: CommsChannel\",\n comms_type as \"comms_type: CommsType\",\n status as \"status: CommsStatus\"\n FROM comms_queue\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "recipient",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "subject",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "body",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "channel: CommsChannel",
"type_info": {
"Custom": {
"name": "comms_channel",
"kind": {
"Enum": [
"email",
"discord",
"telegram",
"signal"
]
}
}
}
},
{
"ordinal": 6,
"name": "comms_type: CommsType",
"type_info": {
"Custom": {
"name": "comms_type",
"kind": {
"Enum": [
"welcome",
"email_verification",
"password_reset",
"email_update",
"account_deletion",
"admin_email",
"plc_operation",
"two_factor_code",
"channel_verification",
"passkey_recovery",
"legacy_login_alert",
"migration_verification"
]
}
}
}
},
{
"ordinal": 7,
"name": "status: CommsStatus",
"type_info": {
"Custom": {
"name": "comms_status",
"kind": {
"Enum": [
"pending",
"processing",
"sent",
"failed"
]
}
}
}
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
false
]
},
"hash": "9e772a967607553a0ab800970eaeadcaab7e06bdb79e0c89eb919b1bc1d6fabe"
}

View File

@@ -0,0 +1,34 @@
{
"db_name": "PostgreSQL",
"query": "SELECT private_key_bytes, expires_at, used_at FROM reserved_signing_keys WHERE public_key_did_key = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "private_key_bytes",
"type_info": "Bytea"
},
{
"ordinal": 1,
"name": "expires_at",
"type_info": "Timestamptz"
},
{
"ordinal": 2,
"name": "used_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
true
]
},
"hash": "a23a390659616779d7dbceaa3b5d5171e70fa25e3b8393e142cebcbff752f0f5"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT token FROM account_deletion_requests WHERE did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "token",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "a802d7d860f263eace39ce82bb27b633cec7287c1cc177f0e1d47ec6571564d5"
}

View File

@@ -0,0 +1,60 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n recipient, subject, body,\n comms_type as \"comms_type: CommsType\"\n FROM comms_queue\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "recipient",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "subject",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "body",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "comms_type: CommsType",
"type_info": {
"Custom": {
"name": "comms_type",
"kind": {
"Enum": [
"welcome",
"email_verification",
"password_reset",
"email_update",
"account_deletion",
"admin_email",
"plc_operation",
"two_factor_code",
"channel_verification",
"passkey_recovery",
"legacy_login_alert",
"migration_verification"
]
}
}
}
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true,
false,
false
]
},
"hash": "b0fca342e85dea89a06b4fee144cae4825dec587b1387f0fee401458aea2a2e5"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT password_reset_code FROM users WHERE email = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "password_reset_code",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true
]
},
"hash": "cd3b8098ad4c1056c1d23acd8a6b29f7abfe18ee6f559bd94ab16274b1cfdfee"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) as \"count!\" FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count!",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
null
]
},
"hash": "cda68f9b6c60295a196fc853b70ec5fd51a8ffaa2bac5942c115c99d1cbcafa3"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE comms_queue\n SET status = 'processing', updated_at = NOW()\n WHERE id IN (\n SELECT id FROM comms_queue\n WHERE status = 'pending'\n AND scheduled_for <= $1\n AND attempts < max_attempts\n ORDER BY scheduled_for ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n RETURNING\n id, user_id,\n channel as \"channel: CommsChannel\",\n comms_type as \"comms_type: super::types::CommsType\",\n status as \"status: CommsStatus\",\n recipient, subject, body, metadata,\n attempts, max_attempts, last_error,\n created_at, updated_at, scheduled_for, processed_at\n ",
"query": "\n UPDATE comms_queue\n SET status = 'processing', updated_at = NOW()\n WHERE id IN (\n SELECT id FROM comms_queue\n WHERE status = 'pending'\n AND scheduled_for <= $1\n AND attempts < max_attempts\n ORDER BY scheduled_for ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n RETURNING\n id, user_id,\n channel as \"channel: CommsChannel\",\n comms_type as \"comms_type: CommsType\",\n status as \"status: CommsStatus\",\n recipient, subject, body, metadata,\n attempts, max_attempts, last_error,\n created_at, updated_at, scheduled_for, processed_at\n ",
"describe": {
"columns": [
{
@@ -32,7 +32,7 @@
},
{
"ordinal": 3,
"name": "comms_type: super::types::CommsType",
"name": "comms_type: CommsType",
"type_info": {
"Custom": {
"name": "comms_type",
@@ -153,5 +153,5 @@
true
]
},
"hash": "20dd204aa552572ec9dc5b9950efdfa8a2e37aae3f171a2be73bee3057f86e08"
"hash": "d4c68f8502bc81c27383f15dca1990c41b5e5534a3db9c137e3ef8e66fdf0a87"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE account_deletion_requests SET expires_at = NOW() - INTERVAL '1 hour' WHERE token = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "d529d6dc9858c1da360f0417e94a3b40041b043bae57e95002d4bf5df46a4ab4"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) FROM comms_queue WHERE user_id = $1 AND comms_type = 'password_reset'",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "e20cbe2a939d790aaea718b084a80d8ede655ba1cc0fd4346d7e91d6de7d6cf3"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT used_at FROM reserved_signing_keys WHERE public_key_did_key = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "used_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true
]
},
"hash": "e64cd36284d10ab7f3d9f6959975a1a627809f444b0faff7e611d985f31b90e9"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT email FROM users WHERE did = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "email",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true
]
},
"hash": "f26c13023b47b908ec96da2e6b8bf8b34ca6a2246c20fc96f76f0e95530762a7"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE users SET is_admin = TRUE WHERE did = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "f29da3bdfbbc547b339b4cdb059fac26435b0feec65cf1c56f851d1c4d6b1814"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT password_reset_code, password_reset_code_expires_at FROM users WHERE email = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "password_reset_code",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "password_reset_code_expires_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true,
true
]
},
"hash": "f7af28963099aec12cf1d4f8a9a03699bb3a90f39bc9c4c0f738a37827e8f382"
}

168
Cargo.lock generated
View File

@@ -6313,6 +6313,110 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "tranquil-auth"
version = "0.1.0"
dependencies = [
"anyhow",
"base32",
"base64 0.22.1",
"bcrypt",
"chrono",
"hmac",
"k256",
"rand 0.8.5",
"serde",
"serde_json",
"sha2",
"subtle",
"thiserror 2.0.17",
"totp-rs",
"tranquil-crypto",
"urlencoding",
"uuid",
]
[[package]]
name = "tranquil-cache"
version = "0.1.0"
dependencies = [
"async-trait",
"base64 0.22.1",
"redis",
"thiserror 2.0.17",
"tracing",
"tranquil-infra",
]
[[package]]
name = "tranquil-comms"
version = "0.1.0"
dependencies = [
"async-trait",
"base64 0.22.1",
"chrono",
"reqwest",
"serde",
"serde_json",
"sqlx",
"thiserror 2.0.17",
"tokio",
"urlencoding",
"uuid",
]
[[package]]
name = "tranquil-crypto"
version = "0.1.0"
dependencies = [
"aes-gcm",
"base64 0.22.1",
"hkdf",
"hmac",
"p256 0.13.2",
"rand 0.8.5",
"serde",
"serde_json",
"sha2",
"subtle",
"thiserror 2.0.17",
]
[[package]]
name = "tranquil-infra"
version = "0.1.0"
dependencies = [
"async-trait",
"bytes",
"futures",
"thiserror 2.0.17",
"tokio",
"tracing",
]
[[package]]
name = "tranquil-oauth"
version = "0.1.0"
dependencies = [
"anyhow",
"axum",
"base64 0.22.1",
"chrono",
"ed25519-dalek",
"p256 0.13.2",
"p384",
"rand 0.8.5",
"reqwest",
"serde",
"serde_json",
"sha2",
"sqlx",
"tokio",
"tracing",
"tranquil-types",
"uuid",
]
[[package]]
name = "tranquil-pds"
version = "0.1.0"
@@ -6380,6 +6484,16 @@ dependencies = [
"tower-layer",
"tracing",
"tracing-subscriber",
"tranquil-auth",
"tranquil-cache",
"tranquil-comms",
"tranquil-crypto",
"tranquil-infra",
"tranquil-oauth",
"tranquil-repo",
"tranquil-scopes",
"tranquil-storage",
"tranquil-types",
"urlencoding",
"uuid",
"webauthn-rs",
@@ -6388,6 +6502,60 @@ dependencies = [
"zip",
]
[[package]]
name = "tranquil-repo"
version = "0.1.0"
dependencies = [
"bytes",
"cid",
"jacquard-repo",
"multihash",
"sha2",
"sqlx",
"tranquil-types",
]
[[package]]
name = "tranquil-scopes"
version = "0.1.0"
dependencies = [
"axum",
"futures",
"reqwest",
"serde",
"serde_json",
"tokio",
"tracing",
]
[[package]]
name = "tranquil-storage"
version = "0.1.0"
dependencies = [
"async-trait",
"aws-config",
"aws-sdk-s3",
"bytes",
"futures",
"sha2",
"thiserror 2.0.17",
"tracing",
"tranquil-infra",
]
[[package]]
name = "tranquil-types"
version = "0.1.0"
dependencies = [
"chrono",
"cid",
"jacquard",
"serde",
"serde_json",
"sqlx",
"thiserror 2.0.17",
]
[[package]]
name = "triomphe"
version = "0.1.15"

View File

@@ -1,79 +1,103 @@
[package]
name = "tranquil-pds"
[workspace]
resolver = "2"
members = [
"crates/tranquil-types",
"crates/tranquil-infra",
"crates/tranquil-crypto",
"crates/tranquil-storage",
"crates/tranquil-cache",
"crates/tranquil-repo",
"crates/tranquil-scopes",
"crates/tranquil-auth",
"crates/tranquil-oauth",
"crates/tranquil-comms",
"crates/tranquil-pds",
]
[workspace.package]
version = "0.1.0"
edition = "2024"
license = "AGPL-3.0-or-later"
[dependencies]
anyhow = "1.0.100"
async-trait = "0.1.89"
aws-config = "1.8.12"
aws-sdk-s3 = "1.118.0"
axum = { version = "0.8.8", features = ["ws", "macros"] }
[workspace.dependencies]
tranquil-types = { path = "crates/tranquil-types" }
tranquil-infra = { path = "crates/tranquil-infra" }
tranquil-crypto = { path = "crates/tranquil-crypto" }
tranquil-storage = { path = "crates/tranquil-storage" }
tranquil-cache = { path = "crates/tranquil-cache" }
tranquil-repo = { path = "crates/tranquil-repo" }
tranquil-scopes = { path = "crates/tranquil-scopes" }
tranquil-auth = { path = "crates/tranquil-auth" }
tranquil-oauth = { path = "crates/tranquil-oauth" }
tranquil-comms = { path = "crates/tranquil-comms" }
aes-gcm = "0.10"
anyhow = "1.0"
async-trait = "0.1"
aws-config = "1.8"
aws-sdk-s3 = "1.118"
axum = { version = "0.8", features = ["ws", "macros"] }
base32 = "0.5"
base64 = "0.22.1"
bcrypt = "0.17.1"
bytes = "1.11.0"
chrono = { version = "0.4.42", features = ["serde"] }
cid = "0.11.1"
dotenvy = "0.15.7"
futures = "0.3.30"
base64 = "0.22"
bcrypt = "0.17"
bs58 = "0.5"
bytes = "1.11"
chrono = { version = "0.4", features = ["serde"] }
cid = "0.11"
dotenvy = "0.15"
ed25519-dalek = { version = "2.1", features = ["pkcs8"] }
futures = "0.3"
futures-util = "0.3"
governor = "0.10"
hex = "0.4"
hickory-resolver = { version = "0.24", features = ["tokio-runtime"] }
hkdf = "0.12"
hmac = "0.12"
http = "1.4"
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "gif", "webp"] }
infer = "0.19"
aes-gcm = "0.10"
jacquard = { version = "0.9.5", default-features = false, features = ["api", "api_bluesky", "api_full", "derive", "dns"] }
jacquard-axum = "0.9.6"
jacquard-repo = "0.9.6"
jsonwebtoken = { version = "10.2.0", features = ["rust_crypto"] }
k256 = { version = "0.13.3", features = ["ecdsa", "pem", "pkcs8"] }
multibase = "0.9.1"
multihash = "0.19.3"
rand = "0.8.5"
regex = "1"
reqwest = { version = "0.12.28", features = ["json"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_bytes = "0.11.14"
serde_ipld_dagcbor = "0.6.4"
ipld-core = "0.4.2"
serde_json = "1.0.146"
serde_urlencoded = "0.7"
sha2 = "0.10.9"
subtle = "2.5"
ipld-core = "0.4"
iroh-car = "0.5"
jacquard = { version = "0.9", default-features = false, features = ["api", "api_bluesky", "api_full", "derive", "dns"] }
jacquard-axum = "0.9"
jacquard-repo = "0.9"
jsonwebtoken = { version = "10.2", features = ["rust_crypto"] }
k256 = { version = "0.13", features = ["ecdsa", "pem", "pkcs8"] }
metrics = "0.24"
metrics-exporter-prometheus = { version = "0.16", default-features = false, features = ["http-listener"] }
multibase = "0.9"
multihash = "0.19"
p256 = { version = "0.13", features = ["ecdsa"] }
p384 = { version = "0.13", features = ["ecdsa"] }
ed25519-dalek = { version = "2.1", features = ["pkcs8"] }
sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json"] }
thiserror = "2.0.17"
tokio = { version = "1.48.0", features = ["macros", "rt-multi-thread", "time", "signal", "process"] }
tracing = "0.1.43"
tracing-subscriber = "0.3.22"
tokio-tungstenite = { version = "0.28.0", features = ["native-tls"] }
urlencoding = "2.1"
uuid = { version = "1.19.0", features = ["v4", "v5", "fast-rng"] }
iroh-car = "0.5.1"
image = { version = "0.25.9", default-features = false, features = ["jpeg", "png", "gif", "webp"] }
redis = { version = "1.0.1", features = ["tokio-comp", "connection-manager"] }
tower-http = { version = "0.6.8", features = ["fs", "cors"] }
hickory-resolver = { version = "0.24", features = ["tokio-runtime"] }
metrics = "0.24.3"
metrics-exporter-prometheus = { version = "0.16", default-features = false, features = ["http-listener"] }
bs58 = "0.5.1"
rand = "0.8"
redis = { version = "1.0", features = ["tokio-comp", "connection-manager"] }
regex = "1"
reqwest = { version = "0.12", features = ["json"] }
serde = { version = "1.0", features = ["derive"] }
serde_bytes = "0.11"
serde_ipld_dagcbor = "0.6"
serde_json = "1.0"
serde_urlencoded = "0.7"
sha2 = "0.10"
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json"] }
subtle = "2.5"
thiserror = "2.0"
tokio = { version = "1.48", features = ["macros", "rt-multi-thread", "time", "signal", "process"] }
tokio-tungstenite = { version = "0.28", features = ["native-tls"] }
totp-rs = { version = "5", features = ["qr"] }
webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-user-presence-only-security-keys"] }
webauthn-rs-proto = "0.5.4"
zip = { version = "7.0.0", default-features = false, features = ["deflate"] }
tower = "0.5.2"
tower-layer = "0.3.3"
futures-util = "0.3.31"
http = "1.4.0"
[features]
external-infra = []
[dev-dependencies]
tower = "0.5"
tower-http = { version = "0.6", features = ["fs", "cors"] }
tower-layer = "0.3"
tracing = "0.1"
tracing-subscriber = "0.3"
urlencoding = "2.1"
uuid = { version = "1.19", features = ["v4", "v5", "fast-rng"] }
webauthn-rs = { version = "0.5", features = ["danger-allow-state-serialisation", "danger-user-presence-only-security-keys"] }
webauthn-rs-proto = "0.5"
zip = { version = "7.0", default-features = false, features = ["deflate"] }
ciborium = "0.2"
ctor = "0.6.3"
testcontainers = "0.26.2"
testcontainers-modules = { version = "0.14.0", features = ["postgres"] }
wiremock = "0.6.5"
# urlencoding is also in dependencies, but tests use it directly
ctor = "0.6"
testcontainers = "0.26"
testcontainers-modules = { version = "0.14", features = ["postgres"] }
wiremock = "0.6"

View File

@@ -0,0 +1,25 @@
[package]
name = "tranquil-auth"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
tranquil-crypto = { workspace = true }
anyhow = { workspace = true }
base32 = { workspace = true }
base64 = { workspace = true }
bcrypt = { workspace = true }
chrono = { workspace = true }
hmac = { workspace = true }
k256 = { workspace = true }
rand = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sha2 = { workspace = true }
subtle = { workspace = true }
thiserror = { workspace = true }
totp-rs = { workspace = true }
urlencoding = { workspace = true }
uuid = { workspace = true }

View File

@@ -0,0 +1,29 @@
mod token;
mod totp;
mod types;
mod verify;
pub use token::{
SCOPE_ACCESS, SCOPE_APP_PASS, SCOPE_APP_PASS_PRIVILEGED, SCOPE_REFRESH, TOKEN_TYPE_ACCESS,
TOKEN_TYPE_REFRESH, TOKEN_TYPE_SERVICE, create_access_token, create_access_token_hs256,
create_access_token_hs256_with_metadata, create_access_token_with_delegation,
create_access_token_with_metadata, create_access_token_with_scope_metadata,
create_refresh_token, create_refresh_token_hs256, create_refresh_token_hs256_with_metadata,
create_refresh_token_with_metadata, create_service_token, create_service_token_hs256,
};
pub use totp::{
decrypt_totp_secret, encrypt_totp_secret, generate_backup_codes, generate_qr_png_base64,
generate_totp_secret, generate_totp_uri, hash_backup_code, is_backup_code_format,
verify_backup_code, verify_totp_code,
};
pub use types::{
ActClaim, Claims, Header, TokenData, TokenVerifyError, TokenWithMetadata, UnsafeClaims,
};
pub use verify::{
get_algorithm_from_token, get_did_from_token, get_jti_from_token, verify_access_token,
verify_access_token_hs256, verify_access_token_typed, verify_refresh_token,
verify_refresh_token_hs256, verify_token,
};

View File

@@ -1,12 +1,11 @@
use super::{ActClaim, Claims, Header};
use super::types::{ActClaim, Claims, Header, TokenWithMetadata};
use anyhow::Result;
use base64::Engine as _;
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use chrono::{DateTime, Duration, Utc};
use chrono::{Duration, Utc};
use hmac::{Hmac, Mac};
use k256::ecdsa::{Signature, SigningKey, signature::Signer};
use sha2::Sha256;
use uuid;
type HmacSha256 = Hmac<Sha256>;
@@ -18,12 +17,6 @@ pub const SCOPE_REFRESH: &str = "com.atproto.refresh";
pub const SCOPE_APP_PASS: &str = "com.atproto.appPass";
pub const SCOPE_APP_PASS_PRIVILEGED: &str = "com.atproto.appPassPrivileged";
pub struct TokenWithMetadata {
pub token: String,
pub jti: String,
pub expires_at: DateTime<Utc>,
}
pub fn create_access_token(did: &str, key_bytes: &[u8]) -> Result<String> {
Ok(create_access_token_with_metadata(did, key_bytes)?.token)
}
@@ -33,13 +26,14 @@ pub fn create_refresh_token(did: &str, key_bytes: &[u8]) -> Result<String> {
}
pub fn create_access_token_with_metadata(did: &str, key_bytes: &[u8]) -> Result<TokenWithMetadata> {
create_access_token_with_scope_metadata(did, key_bytes, None)
create_access_token_with_scope_metadata(did, key_bytes, None, None)
}
pub fn create_access_token_with_scope_metadata(
did: &str,
key_bytes: &[u8],
scopes: Option<&str>,
hostname: Option<&str>,
) -> Result<TokenWithMetadata> {
let scope = scopes.unwrap_or(SCOPE_ACCESS);
create_signed_token_with_metadata(
@@ -48,6 +42,7 @@ pub fn create_access_token_with_scope_metadata(
TOKEN_TYPE_ACCESS,
key_bytes,
Duration::minutes(15),
hostname,
)
}
@@ -56,6 +51,7 @@ pub fn create_access_token_with_delegation(
key_bytes: &[u8],
scopes: Option<&str>,
controller_did: Option<&str>,
hostname: Option<&str>,
) -> Result<TokenWithMetadata> {
let scope = scopes.unwrap_or(SCOPE_ACCESS);
let act = controller_did.map(|c| ActClaim { sub: c.to_string() });
@@ -66,6 +62,7 @@ pub fn create_access_token_with_delegation(
key_bytes,
Duration::minutes(15),
act,
hostname,
)
}
@@ -79,6 +76,7 @@ pub fn create_refresh_token_with_metadata(
TOKEN_TYPE_REFRESH,
key_bytes,
Duration::days(14),
None,
)
}
@@ -111,8 +109,9 @@ fn create_signed_token_with_metadata(
typ: &str,
key_bytes: &[u8],
duration: Duration,
hostname: Option<&str>,
) -> Result<TokenWithMetadata> {
create_signed_token_with_act(did, scope, typ, key_bytes, duration, None)
create_signed_token_with_act(did, scope, typ, key_bytes, duration, None, hostname)
}
fn create_signed_token_with_act(
@@ -122,6 +121,7 @@ fn create_signed_token_with_act(
key_bytes: &[u8],
duration: Duration,
act: Option<ActClaim>,
hostname: Option<&str>,
) -> Result<TokenWithMetadata> {
let signing_key = SigningKey::from_slice(key_bytes)?;
@@ -132,13 +132,14 @@ fn create_signed_token_with_act(
let expiration = expires_at.timestamp();
let jti = uuid::Uuid::new_v4().to_string();
let aud_hostname = hostname.map(|h| h.to_string()).unwrap_or_else(|| {
std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string())
});
let claims = Claims {
iss: did.to_owned(),
sub: did.to_owned(),
aud: format!(
"did:web:{}",
std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string())
),
aud: format!("did:web:{}", aud_hostname),
exp: expiration as usize,
iat: Utc::now().timestamp() as usize,
scope: Some(scope.to_string()),

View File

@@ -13,12 +13,18 @@ pub fn generate_totp_secret() -> Vec<u8> {
secret
}
pub fn encrypt_totp_secret(secret: &[u8]) -> Result<Vec<u8>, String> {
crate::config::encrypt_key(secret)
pub fn encrypt_totp_secret(
secret: &[u8],
master_key: &[u8; 32],
) -> Result<Vec<u8>, tranquil_crypto::CryptoError> {
tranquil_crypto::encrypt_with_key(master_key, secret)
}
pub fn decrypt_totp_secret(encrypted: &[u8], version: i32) -> Result<Vec<u8>, String> {
crate::config::decrypt_key(encrypted, Some(version))
pub fn decrypt_totp_secret(
encrypted: &[u8],
master_key: &[u8; 32],
) -> Result<Vec<u8>, tranquil_crypto::CryptoError> {
tranquil_crypto::decrypt_with_key(master_key, encrypted)
}
fn create_totp(
@@ -53,16 +59,12 @@ pub fn verify_totp_code(secret: &[u8], code: &str) -> bool {
.map(|d| d.as_secs())
.unwrap_or(0);
for offset in [-1i64, 0, 1] {
[-1i64, 0, 1].iter().any(|&offset| {
let time = (now as i64 + offset * TOTP_STEP as i64) as u64;
let expected = totp.generate(time);
let is_valid: bool = code.as_bytes().ct_eq(expected.as_bytes()).into();
if is_valid {
return true;
}
}
false
is_valid
})
}
pub fn generate_totp_uri(secret: &[u8], account_name: &str, issuer: &str) -> String {
@@ -107,14 +109,15 @@ pub fn generate_backup_codes() -> Vec<String> {
let mut codes = Vec::with_capacity(BACKUP_CODE_COUNT);
let mut rng = rand::thread_rng();
for _ in 0..BACKUP_CODE_COUNT {
let mut code = String::with_capacity(BACKUP_CODE_LENGTH);
for _ in 0..BACKUP_CODE_LENGTH {
let idx = (rng.next_u32() as usize) % BACKUP_CODE_ALPHABET.len();
code.push(BACKUP_CODE_ALPHABET[idx] as char);
}
(0..BACKUP_CODE_COUNT).for_each(|_| {
let code: String = (0..BACKUP_CODE_LENGTH)
.map(|_| {
let idx = (rng.next_u32() as usize) % BACKUP_CODE_ALPHABET.len();
BACKUP_CODE_ALPHABET[idx] as char
})
.collect();
codes.push(code);
}
});
codes
}
@@ -167,10 +170,10 @@ mod tests {
fn test_backup_codes() {
let codes = generate_backup_codes();
assert_eq!(codes.len(), BACKUP_CODE_COUNT);
for code in &codes {
codes.iter().for_each(|code| {
assert_eq!(code.len(), BACKUP_CODE_LENGTH);
assert!(is_backup_code_format(code));
}
});
}
#[test]

View File

@@ -0,0 +1,63 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::fmt;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ActClaim {
pub sub: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
pub iss: String,
pub sub: String,
pub aud: String,
pub exp: usize,
pub iat: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub scope: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub lxm: Option<String>,
pub jti: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub act: Option<ActClaim>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Header {
pub alg: String,
pub typ: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UnsafeClaims {
pub iss: String,
pub sub: Option<String>,
}
pub struct TokenData<T> {
pub claims: T,
}
pub struct TokenWithMetadata {
pub token: String,
pub jti: String,
pub expires_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TokenVerifyError {
Expired,
Invalid,
}
impl fmt::Display for TokenVerifyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Expired => write!(f, "Token expired"),
Self::Invalid => write!(f, "Token invalid"),
}
}
}
impl std::error::Error for TokenVerifyError {}

View File

@@ -2,7 +2,7 @@ use super::token::{
SCOPE_ACCESS, SCOPE_APP_PASS, SCOPE_APP_PASS_PRIVILEGED, SCOPE_REFRESH, TOKEN_TYPE_ACCESS,
TOKEN_TYPE_REFRESH,
};
use super::{Claims, Header, TokenData, UnsafeClaims};
use super::types::{Claims, Header, TokenData, TokenVerifyError, UnsafeClaims};
use anyhow::{Context, Result, anyhow};
use base64::Engine as _;
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
@@ -10,28 +10,10 @@ use chrono::Utc;
use hmac::{Hmac, Mac};
use k256::ecdsa::{Signature, SigningKey, VerifyingKey, signature::Verifier};
use sha2::Sha256;
use std::fmt;
use subtle::ConstantTimeEq;
type HmacSha256 = Hmac<Sha256>;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TokenVerifyError {
Expired,
Invalid,
}
impl fmt::Display for TokenVerifyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Expired => write!(f, "Token expired"),
Self::Invalid => write!(f, "Token invalid"),
}
}
}
impl std::error::Error for TokenVerifyError {}
pub fn get_did_from_token(token: &str) -> Result<String, String> {
let parts: Vec<&str> = token.split('.').collect();
if parts.len() != 3 {
@@ -68,6 +50,22 @@ pub fn get_jti_from_token(token: &str) -> Result<String, String> {
.ok_or_else(|| "No jti claim in token".to_string())
}
pub fn get_algorithm_from_token(token: &str) -> Result<String, String> {
let parts: Vec<&str> = token.split('.').collect();
if parts.len() != 3 {
return Err("Invalid token format".to_string());
}
let header_bytes = URL_SAFE_NO_PAD
.decode(parts[0])
.map_err(|e| format!("Base64 decode failed: {}", e))?;
let header: Header =
serde_json::from_slice(&header_bytes).map_err(|e| format!("JSON decode failed: {}", e))?;
Ok(header.alg)
}
pub fn verify_token(token: &str, key_bytes: &[u8]) -> Result<TokenData<Claims>> {
verify_token_internal(token, key_bytes, None, None)
}
@@ -331,19 +329,3 @@ fn verify_token_typed_internal(
Ok(TokenData { claims })
}
pub fn get_algorithm_from_token(token: &str) -> Result<String, String> {
let parts: Vec<&str> = token.split('.').collect();
if parts.len() != 3 {
return Err("Invalid token format".to_string());
}
let header_bytes = URL_SAFE_NO_PAD
.decode(parts[0])
.map_err(|e| format!("Base64 decode failed: {}", e))?;
let header: Header =
serde_json::from_slice(&header_bytes).map_err(|e| format!("JSON decode failed: {}", e))?;
Ok(header.alg)
}

View File

@@ -0,0 +1,14 @@
[package]
name = "tranquil-cache"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
tranquil-infra = { workspace = true }
async-trait = { workspace = true }
base64 = { workspace = true }
redis = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }

View File

@@ -1,30 +1,10 @@
pub use tranquil_infra::{Cache, CacheError, DistributedRateLimiter};
use async_trait::async_trait;
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64};
use std::sync::Arc;
use std::time::Duration;
#[derive(Debug, thiserror::Error)]
pub enum CacheError {
#[error("Cache connection error: {0}")]
Connection(String),
#[error("Serialization error: {0}")]
Serialization(String),
}
#[async_trait]
pub trait Cache: Send + Sync {
async fn get(&self, key: &str) -> Option<String>;
async fn set(&self, key: &str, value: &str, ttl: Duration) -> Result<(), CacheError>;
async fn delete(&self, key: &str) -> Result<(), CacheError>;
async fn get_bytes(&self, key: &str) -> Option<Vec<u8>> {
self.get(key).await.and_then(|s| BASE64.decode(&s).ok())
}
async fn set_bytes(&self, key: &str, value: &[u8], ttl: Duration) -> Result<(), CacheError> {
let encoded = BASE64.encode(value);
self.set(key, &encoded, ttl).await
}
}
#[derive(Clone)]
pub struct ValkeyCache {
conn: redis::aio::ConnectionManager,
@@ -77,6 +57,15 @@ impl Cache for ValkeyCache {
.await
.map_err(|e| CacheError::Connection(e.to_string()))
}
async fn get_bytes(&self, key: &str) -> Option<Vec<u8>> {
self.get(key).await.and_then(|s| BASE64.decode(&s).ok())
}
async fn set_bytes(&self, key: &str, value: &[u8], ttl: Duration) -> Result<(), CacheError> {
let encoded = BASE64.encode(value);
self.set(key, &encoded, ttl).await
}
}
pub struct NoOpCache;
@@ -94,11 +83,14 @@ impl Cache for NoOpCache {
async fn delete(&self, _key: &str) -> Result<(), CacheError> {
Ok(())
}
}
#[async_trait]
pub trait DistributedRateLimiter: Send + Sync {
async fn check_rate_limit(&self, key: &str, limit: u32, window_ms: u64) -> bool;
async fn get_bytes(&self, _key: &str) -> Option<Vec<u8>> {
None
}
async fn set_bytes(&self, _key: &str, _value: &[u8], _ttl: Duration) -> Result<(), CacheError> {
Ok(())
}
}
#[derive(Clone)]

View File

@@ -0,0 +1,18 @@
[package]
name = "tranquil-comms"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
async-trait = { workspace = true }
base64 = { workspace = true }
chrono = { workspace = true }
reqwest = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sqlx = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
urlencoding = { workspace = true }
uuid = { workspace = true }

View File

@@ -0,0 +1,13 @@
mod locale;
mod sender;
mod types;
pub use locale::{
DEFAULT_LOCALE, NotificationStrings, VALID_LOCALES, format_message, get_strings,
validate_locale,
};
pub use sender::{
CommsSender, DiscordSender, EmailSender, SendError, SignalSender, TelegramSender,
is_valid_phone_number, mime_encode_header, sanitize_header_value,
};
pub use types::{CommsChannel, CommsStatus, CommsType, NewComms, QueuedComms};

View File

@@ -1,9 +1,9 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use uuid::Uuid;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)]
#[serde(rename_all = "lowercase")]
#[sqlx(type_name = "comms_channel", rename_all = "lowercase")]
pub enum CommsChannel {
Email,
@@ -12,7 +12,8 @@ pub enum CommsChannel {
Signal,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)]
#[serde(rename_all = "lowercase")]
#[sqlx(type_name = "comms_status", rename_all = "lowercase")]
pub enum CommsStatus {
Pending,
@@ -21,7 +22,8 @@ pub enum CommsStatus {
Failed,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)]
#[serde(rename_all = "snake_case")]
#[sqlx(type_name = "comms_type", rename_all = "snake_case")]
pub enum CommsType {
Welcome,
@@ -37,7 +39,7 @@ pub enum CommsType {
MigrationVerification,
}
#[derive(Debug, Clone, FromRow)]
#[derive(Debug, Clone)]
pub struct QueuedComms {
pub id: Uuid,
pub user_id: Uuid,

View File

@@ -0,0 +1,18 @@
[package]
name = "tranquil-crypto"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
aes-gcm = { workspace = true }
base64 = { workspace = true }
hkdf = { workspace = true }
hmac = { workspace = true }
p256 = { workspace = true }
rand = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sha2 = { workspace = true }
subtle = { workspace = true }
thiserror = { workspace = true }

View File

@@ -0,0 +1,78 @@
#[allow(deprecated)]
use aes_gcm::{Aes256Gcm, KeyInit, Nonce, aead::Aead};
use hkdf::Hkdf;
use sha2::Sha256;
use crate::CryptoError;
pub fn derive_key(master_key: &[u8], context: &[u8]) -> Result<[u8; 32], CryptoError> {
let hk = Hkdf::<Sha256>::new(None, master_key);
let mut output = [0u8; 32];
hk.expand(context, &mut output)
.map_err(|e| CryptoError::KeyDerivationFailed(format!("{}", e)))?;
Ok(output)
}
pub fn encrypt_with_key(key: &[u8; 32], plaintext: &[u8]) -> Result<Vec<u8>, CryptoError> {
use rand::RngCore;
let cipher = Aes256Gcm::new_from_slice(key)
.map_err(|e| CryptoError::EncryptionFailed(format!("Failed to create cipher: {}", e)))?;
let mut nonce_bytes = [0u8; 12];
rand::thread_rng().fill_bytes(&mut nonce_bytes);
#[allow(deprecated)]
let nonce = Nonce::from_slice(&nonce_bytes);
let ciphertext = cipher
.encrypt(nonce, plaintext)
.map_err(|e| CryptoError::EncryptionFailed(format!("{}", e)))?;
let mut result = Vec::with_capacity(12 + ciphertext.len());
result.extend_from_slice(&nonce_bytes);
result.extend_from_slice(&ciphertext);
Ok(result)
}
pub fn decrypt_with_key(key: &[u8; 32], encrypted: &[u8]) -> Result<Vec<u8>, CryptoError> {
if encrypted.len() < 12 {
return Err(CryptoError::DecryptionFailed(
"Encrypted data too short".to_string(),
));
}
let cipher = Aes256Gcm::new_from_slice(key)
.map_err(|e| CryptoError::DecryptionFailed(format!("Failed to create cipher: {}", e)))?;
#[allow(deprecated)]
let nonce = Nonce::from_slice(&encrypted[..12]);
let ciphertext = &encrypted[12..];
cipher
.decrypt(nonce, ciphertext)
.map_err(|e| CryptoError::DecryptionFailed(format!("{}", e)))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encrypt_decrypt() {
let key = [0u8; 32];
let plaintext = b"hello world";
let encrypted = encrypt_with_key(&key, plaintext).unwrap();
let decrypted = decrypt_with_key(&key, &encrypted).unwrap();
assert_eq!(plaintext.as_slice(), decrypted.as_slice());
}
#[test]
fn test_derive_key() {
let master = b"master-key-for-testing";
let key1 = derive_key(master, b"context-1").unwrap();
let key2 = derive_key(master, b"context-2").unwrap();
assert_ne!(key1, key2);
}
}

View File

@@ -0,0 +1,19 @@
mod encryption;
mod jwk;
mod signing;
pub use encryption::{decrypt_with_key, derive_key, encrypt_with_key};
pub use jwk::{Jwk, JwkSet, create_jwk_set};
pub use signing::{DeviceCookieSigner, SigningKeyPair};
#[derive(Debug, Clone, thiserror::Error)]
pub enum CryptoError {
#[error("Encryption failed: {0}")]
EncryptionFailed(String),
#[error("Decryption failed: {0}")]
DecryptionFailed(String),
#[error("Invalid key: {0}")]
InvalidKey(String),
#[error("Key derivation failed: {0}")]
KeyDerivationFailed(String),
}

View File

@@ -0,0 +1,150 @@
use base64::{Engine, engine::general_purpose::URL_SAFE_NO_PAD};
use hmac::Mac;
use p256::ecdsa::SigningKey;
use sha2::{Digest, Sha256};
use subtle::ConstantTimeEq;
use crate::CryptoError;
type HmacSha256 = hmac::Hmac<Sha256>;
pub struct SigningKeyPair {
#[allow(dead_code)]
signing_key: SigningKey,
pub key_id: String,
pub x: String,
pub y: String,
}
impl SigningKeyPair {
pub fn from_seed(seed: &[u8]) -> Result<Self, CryptoError> {
let mut hasher = Sha256::new();
hasher.update(b"oauth-signing-key-derivation:");
hasher.update(seed);
let hash = hasher.finalize();
let signing_key = SigningKey::from_slice(&hash)
.map_err(|e| CryptoError::InvalidKey(format!("Failed to create signing key: {}", e)))?;
let verifying_key = signing_key.verifying_key();
let point = verifying_key.to_encoded_point(false);
let x = URL_SAFE_NO_PAD.encode(
point
.x()
.ok_or_else(|| CryptoError::InvalidKey("Missing X coordinate".to_string()))?,
);
let y = URL_SAFE_NO_PAD.encode(
point
.y()
.ok_or_else(|| CryptoError::InvalidKey("Missing Y coordinate".to_string()))?,
);
let mut kid_hasher = Sha256::new();
kid_hasher.update(x.as_bytes());
kid_hasher.update(y.as_bytes());
let kid_hash = kid_hasher.finalize();
let key_id = URL_SAFE_NO_PAD.encode(&kid_hash[..8]);
Ok(Self {
signing_key,
key_id,
x,
y,
})
}
}
pub struct DeviceCookieSigner {
key: [u8; 32],
}
impl DeviceCookieSigner {
pub fn new(key: [u8; 32]) -> Self {
Self { key }
}
pub fn sign(&self, device_id: &str) -> String {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
let message = format!("{}:{}", device_id, timestamp);
let mut mac =
<HmacSha256 as Mac>::new_from_slice(&self.key).expect("HMAC key size is valid");
mac.update(message.as_bytes());
let signature = URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
format!("{}.{}.{}", device_id, timestamp, signature)
}
pub fn verify(&self, cookie_value: &str, max_age_days: u64) -> Option<String> {
let parts: Vec<&str> = cookie_value.splitn(3, '.').collect();
if parts.len() != 3 {
return None;
}
let device_id = parts[0];
let timestamp_str = parts[1];
let provided_signature = parts[2];
let timestamp: u64 = timestamp_str.parse().ok()?;
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
if now.saturating_sub(timestamp) > max_age_days * 24 * 60 * 60 {
return None;
}
let message = format!("{}:{}", device_id, timestamp);
let mut mac =
<HmacSha256 as Mac>::new_from_slice(&self.key).expect("HMAC key size is valid");
mac.update(message.as_bytes());
let expected_signature = URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes());
if provided_signature
.as_bytes()
.ct_eq(expected_signature.as_bytes())
.into()
{
Some(device_id.to_string())
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_signing_key_pair() {
let seed = b"test-seed-for-signing-key";
let kp = SigningKeyPair::from_seed(seed).unwrap();
assert!(!kp.key_id.is_empty());
assert!(!kp.x.is_empty());
assert!(!kp.y.is_empty());
}
#[test]
fn test_device_cookie_signer() {
let key = [0u8; 32];
let signer = DeviceCookieSigner::new(key);
let signed = signer.sign("device-123");
let verified = signer.verify(&signed, 400);
assert_eq!(verified, Some("device-123".to_string()));
}
#[test]
fn test_device_cookie_invalid() {
let key = [0u8; 32];
let signer = DeviceCookieSigner::new(key);
assert!(signer.verify("invalid", 400).is_none());
assert!(signer.verify("a.b.c", 400).is_none());
}
}

View File

@@ -0,0 +1,13 @@
[package]
name = "tranquil-infra"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
async-trait = { workspace = true }
bytes = { workspace = true }
futures = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }

View File

@@ -0,0 +1,58 @@
use async_trait::async_trait;
use bytes::Bytes;
use futures::Stream;
use std::pin::Pin;
use std::time::Duration;
#[derive(Debug, thiserror::Error)]
pub enum StorageError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("S3 error: {0}")]
S3(String),
#[error("Other: {0}")]
Other(String),
}
pub struct StreamUploadResult {
pub sha256_hash: [u8; 32],
pub size: u64,
}
#[async_trait]
pub trait BlobStorage: Send + Sync {
async fn put(&self, key: &str, data: &[u8]) -> Result<(), StorageError>;
async fn put_bytes(&self, key: &str, data: Bytes) -> Result<(), StorageError>;
async fn get(&self, key: &str) -> Result<Vec<u8>, StorageError>;
async fn get_bytes(&self, key: &str) -> Result<Bytes, StorageError>;
async fn get_head(&self, key: &str, size: usize) -> Result<Bytes, StorageError>;
async fn delete(&self, key: &str) -> Result<(), StorageError>;
async fn put_stream(
&self,
key: &str,
stream: Pin<Box<dyn Stream<Item = Result<Bytes, std::io::Error>> + Send>>,
) -> Result<StreamUploadResult, StorageError>;
async fn copy(&self, src_key: &str, dst_key: &str) -> Result<(), StorageError>;
}
#[derive(Debug, thiserror::Error)]
pub enum CacheError {
#[error("Cache connection error: {0}")]
Connection(String),
#[error("Serialization error: {0}")]
Serialization(String),
}
#[async_trait]
pub trait Cache: Send + Sync {
async fn get(&self, key: &str) -> Option<String>;
async fn set(&self, key: &str, value: &str, ttl: Duration) -> Result<(), CacheError>;
async fn delete(&self, key: &str) -> Result<(), CacheError>;
async fn get_bytes(&self, key: &str) -> Option<Vec<u8>>;
async fn set_bytes(&self, key: &str, value: &[u8], ttl: Duration) -> Result<(), CacheError>;
}
#[async_trait]
pub trait DistributedRateLimiter: Send + Sync {
async fn check_rate_limit(&self, key: &str, limit: u32, window_ms: u64) -> bool;
}

View File

@@ -0,0 +1,25 @@
[package]
name = "tranquil-oauth"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
tranquil-types = { workspace = true }
anyhow = { workspace = true }
sqlx = { workspace = true }
axum = { workspace = true }
base64 = { workspace = true }
chrono = { workspace = true }
ed25519-dalek = { workspace = true }
p256 = { workspace = true }
p384 = { workspace = true }
rand = { workspace = true }
reqwest = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sha2 = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
uuid = { workspace = true }

View File

@@ -4,7 +4,8 @@ use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use super::OAuthError;
use crate::OAuthError;
use crate::types::ClientAuth;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClientMetadata {
@@ -96,7 +97,6 @@ impl ClientMetadataCache {
url.scheme() == "http"
&& url.host_str() == Some("localhost")
&& url.port().is_none()
// empty path
&& url.path() == "/"
} else {
false
@@ -108,16 +108,14 @@ impl ClientMetadataCache {
.map_err(|_| OAuthError::InvalidClient("Invalid loopback client_id URL".into()))?;
let mut redirect_uris = Vec::<String>::new();
let mut scope: Option<String> = None;
for (key, value) in url.query_pairs() {
if key == "redirect_uri" {
url.query_pairs().for_each(|(key, value)| {
if key == "redirect_uri" && redirect_uris.is_empty() {
redirect_uris.push(value.to_string());
break;
}
if key == "scope" {
if key == "scope" && scope.is_none() {
scope = Some(value.into());
break;
}
}
});
if redirect_uris.is_empty() {
redirect_uris.push("http://127.0.0.1/".into());
redirect_uris.push("http://[::1]/".into());
@@ -289,9 +287,10 @@ impl ClientMetadataCache {
"redirect_uris is required".to_string(),
));
}
for uri in &metadata.redirect_uris {
self.validate_redirect_uri_format(uri)?;
}
metadata
.redirect_uris
.iter()
.try_for_each(|uri| self.validate_redirect_uri_format(uri))?;
if !metadata.grant_types.is_empty()
&& !metadata
.grant_types
@@ -357,8 +356,7 @@ impl ClientMetadataCache {
if !scheme
.chars()
.next()
.map(|c| c.is_ascii_lowercase())
.unwrap_or(false)
.is_some_and(|c| c.is_ascii_lowercase())
{
return Err(OAuthError::InvalidClient(format!(
"Invalid redirect_uri scheme: {}",
@@ -388,30 +386,26 @@ impl ClientMetadata {
pub async fn verify_client_auth(
cache: &ClientMetadataCache,
metadata: &ClientMetadata,
client_auth: &super::ClientAuth,
client_auth: &ClientAuth,
) -> Result<(), OAuthError> {
let expected_method = metadata.auth_method();
match (expected_method, client_auth) {
("none", super::ClientAuth::None) => Ok(()),
("none", ClientAuth::None) => Ok(()),
("none", _) => Err(OAuthError::InvalidClient(
"Client is configured for no authentication, but credentials were provided".to_string(),
)),
("private_key_jwt", super::ClientAuth::PrivateKeyJwt { client_assertion }) => {
("private_key_jwt", ClientAuth::PrivateKeyJwt { client_assertion }) => {
verify_private_key_jwt_async(cache, metadata, client_assertion).await
}
("private_key_jwt", _) => Err(OAuthError::InvalidClient(
"Client requires private_key_jwt authentication".to_string(),
)),
("client_secret_post", super::ClientAuth::SecretPost { .. }) => {
Err(OAuthError::InvalidClient(
"client_secret_post is not supported for ATProto OAuth".to_string(),
))
}
("client_secret_basic", super::ClientAuth::SecretBasic { .. }) => {
Err(OAuthError::InvalidClient(
"client_secret_basic is not supported for ATProto OAuth".to_string(),
))
}
("client_secret_post", ClientAuth::SecretPost { .. }) => Err(OAuthError::InvalidClient(
"client_secret_post is not supported for ATProto OAuth".to_string(),
)),
("client_secret_basic", ClientAuth::SecretBasic { .. }) => Err(OAuthError::InvalidClient(
"client_secret_basic is not supported for ATProto OAuth".to_string(),
)),
(method, _) => Err(OAuthError::InvalidClient(format!(
"Unsupported or mismatched authentication method: {}",
method
@@ -519,12 +513,12 @@ async fn verify_private_key_jwt_async(
.get("keys")
.and_then(|k| k.as_array())
.ok_or_else(|| OAuthError::InvalidClient("Invalid JWKS: missing keys array".to_string()))?;
let matching_keys: Vec<&serde_json::Value> = if let Some(kid) = kid {
keys.iter()
let matching_keys: Vec<&serde_json::Value> = match kid {
Some(kid) => keys
.iter()
.filter(|k| k.get("kid").and_then(|v| v.as_str()) == Some(kid))
.collect()
} else {
keys.iter().collect()
.collect(),
None => keys.iter().collect(),
};
if matching_keys.is_empty() {
return Err(OAuthError::InvalidClient(

View File

@@ -4,8 +4,8 @@ use chrono::Utc;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use super::OAuthError;
use crate::types::{DPoPProofId, JwkThumbprint};
use crate::OAuthError;
use tranquil_types::{DPoPProofId, JwkThumbprint};
const DPOP_NONCE_VALIDITY_SECS: i64 = 300;
const DPOP_MAX_AGE_SECS: i64 = 300;

View File

@@ -82,16 +82,16 @@ impl IntoResponse for OAuthError {
}
}
impl From<sqlx::Error> for OAuthError {
fn from(err: sqlx::Error) -> Self {
tracing::error!("Database error in OAuth flow: {}", err);
OAuthError::ServerError("An internal error occurred".to_string())
}
}
impl From<anyhow::Error> for OAuthError {
fn from(err: anyhow::Error) -> Self {
tracing::error!("Internal error in OAuth flow: {}", err);
OAuthError::ServerError("An internal error occurred".to_string())
}
}
impl From<sqlx::Error> for OAuthError {
fn from(err: sqlx::Error) -> Self {
tracing::error!("Database error in OAuth flow: {}", err);
OAuthError::ServerError("An internal error occurred".to_string())
}
}

View File

@@ -0,0 +1,17 @@
mod client;
mod dpop;
mod error;
mod types;
pub use client::{ClientMetadata, ClientMetadataCache, verify_client_auth};
pub use dpop::{
DPoPJwk, DPoPProofHeader, DPoPProofPayload, DPoPVerifier, DPoPVerifyResult,
compute_access_token_hash, compute_jwk_thumbprint,
};
pub use error::OAuthError;
pub use types::{
AuthFlowState, AuthorizationRequestParameters, AuthorizationServerMetadata,
AuthorizedClientData, ClientAuth, Code, DPoPClaims, DeviceData, DeviceId, JwkPublicKey, Jwks,
OAuthClientMetadata, ParResponse, ProtectedResourceMetadata, RefreshToken, RefreshTokenState,
RequestData, RequestId, SessionId, TokenData, TokenId, TokenRequest, TokenResponse,
};

1
crates/tranquil-pds/.sqlx Symbolic link
View File

@@ -0,0 +1 @@
../../.sqlx

View File

@@ -0,0 +1,92 @@
[package]
name = "tranquil-pds"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
tranquil-types = { workspace = true }
tranquil-infra = { workspace = true }
tranquil-crypto = { workspace = true }
tranquil-storage = { workspace = true }
tranquil-cache = { workspace = true }
tranquil-repo = { workspace = true }
tranquil-scopes = { workspace = true }
tranquil-auth = { workspace = true }
tranquil-oauth = { workspace = true }
tranquil-comms = { workspace = true }
aes-gcm = { workspace = true }
anyhow = { workspace = true }
async-trait = { workspace = true }
aws-config = { workspace = true }
aws-sdk-s3 = { workspace = true }
axum = { workspace = true }
base32 = { workspace = true }
base64 = { workspace = true }
bcrypt = { workspace = true }
bs58 = { workspace = true }
bytes = { workspace = true }
chrono = { workspace = true }
cid = { workspace = true }
dotenvy = { workspace = true }
ed25519-dalek = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
governor = { workspace = true }
hex = { workspace = true }
hickory-resolver = { workspace = true }
hkdf = { workspace = true }
hmac = { workspace = true }
http = { workspace = true }
image = { workspace = true }
infer = { workspace = true }
ipld-core = { workspace = true }
iroh-car = { workspace = true }
jacquard = { workspace = true }
jacquard-axum = { workspace = true }
jacquard-repo = { workspace = true }
jsonwebtoken = { workspace = true }
k256 = { workspace = true }
metrics = { workspace = true }
metrics-exporter-prometheus = { workspace = true }
multibase = { workspace = true }
multihash = { workspace = true }
p256 = { workspace = true }
p384 = { workspace = true }
rand = { workspace = true }
redis = { workspace = true }
regex = { workspace = true }
reqwest = { workspace = true }
serde = { workspace = true }
serde_bytes = { workspace = true }
serde_ipld_dagcbor = { workspace = true }
serde_json = { workspace = true }
serde_urlencoded = { workspace = true }
sha2 = { workspace = true }
sqlx = { workspace = true }
subtle = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tokio-tungstenite = { workspace = true }
totp-rs = { workspace = true }
tower = { workspace = true }
tower-http = { workspace = true }
tower-layer = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
urlencoding = { workspace = true }
uuid = { workspace = true }
webauthn-rs = { workspace = true }
webauthn-rs-proto = { workspace = true }
zip = { workspace = true }
[features]
external-infra = []
[dev-dependencies]
ciborium = { workspace = true }
ctor = { workspace = true }
testcontainers = { workspace = true }
testcontainers-modules = { workspace = true }
wiremock = { workspace = true }

View File

@@ -0,0 +1 @@
../../migrations

View File

@@ -130,13 +130,8 @@ pub async fn delete_account(
error!("Failed to commit account deletion transaction: {:?}", e);
return ApiError::InternalError(Some("Failed to commit deletion".into())).into_response();
}
if let Err(e) = crate::api::repo::record::sequence_account_event(
&state,
did,
false,
Some("deleted"),
)
.await
if let Err(e) =
crate::api::repo::record::sequence_account_event(&state, did, false, Some("deleted")).await
{
warn!(
"Failed to sequence account deletion event for {}: {}",

View File

@@ -104,12 +104,9 @@ pub async fn update_account_handle(
}
let _ = state.cache.delete(&format!("handle:{}", handle)).await;
let handle_typed = Handle::new_unchecked(&handle);
if let Err(e) = crate::api::repo::record::sequence_identity_event(
&state,
did,
Some(&handle_typed),
)
.await
if let Err(e) =
crate::api::repo::record::sequence_identity_event(&state, did, Some(&handle_typed))
.await
{
warn!(
"Failed to sequence identity event for admin handle update: {}",

View File

@@ -224,9 +224,12 @@ pub async fn update_subject_status(
.execute(&mut *tx)
.await
} else {
sqlx::query!("UPDATE users SET deactivated_at = NULL WHERE did = $1", did.as_str())
.execute(&mut *tx)
.await
sqlx::query!(
"UPDATE users SET deactivated_at = NULL WHERE did = $1",
did.as_str()
)
.execute(&mut *tx)
.await
};
if let Err(e) = result {
error!(

View File

@@ -768,9 +768,5 @@ pub async fn create_delegated_account(
info!(did = %did, handle = %handle, controller = %&auth.0.did, "Delegated account created");
Json(CreateDelegatedAccountResponse {
did: did.into(),
handle: handle.into(),
})
.into_response()
Json(CreateDelegatedAccountResponse { did, handle }).into_response()
}

View File

@@ -796,8 +796,12 @@ pub async fn create_account(
if !is_migration && !is_did_web_byod {
let did_typed = Did::new_unchecked(&did);
let handle_typed = Handle::new_unchecked(&handle);
if let Err(e) =
crate::api::repo::record::sequence_identity_event(&state, &did_typed, Some(&handle_typed)).await
if let Err(e) = crate::api::repo::record::sequence_identity_event(
&state,
&did_typed,
Some(&handle_typed),
)
.await
{
warn!("Failed to sequence identity event for {}: {}", did, e);
}

View File

@@ -754,7 +754,8 @@ pub async fn update_handle(
let _ = state.cache.delete(&format!("handle:{}", handle)).await;
let handle_typed = Handle::new_unchecked(&handle);
if let Err(e) =
crate::api::repo::record::sequence_identity_event(&state, &did, Some(&handle_typed)).await
crate::api::repo::record::sequence_identity_event(&state, &did, Some(&handle_typed))
.await
{
warn!("Failed to sequence identity event for handle update: {}", e);
}

View File

@@ -268,8 +268,7 @@ async fn proxy_handler(
}
Err(e) => {
warn!("Token validation failed: {:?}", e);
if matches!(e, crate::auth::TokenValidationError::TokenExpired)
&& extracted.is_dpop
if matches!(e, crate::auth::TokenValidationError::TokenExpired) && extracted.is_dpop
{
let www_auth =
"DPoP error=\"invalid_token\", error_description=\"Token has expired\"";

View File

@@ -421,7 +421,14 @@ pub async fn apply_writes(
ops,
modified_keys,
all_blob_cids,
} = match process_writes(&input.writes, initial_mst, &did, input.validate, &tracking_store).await
} = match process_writes(
&input.writes,
initial_mst,
&did,
input.validate,
&tracking_store,
)
.await
{
Ok(acc) => acc,
Err(response) => return response,

View File

@@ -257,13 +257,15 @@ pub async fn list_records(
.zip(blocks.into_iter())
.filter_map(|((_, rkey, cid_str), block_opt)| {
block_opt.and_then(|block| {
serde_ipld_dagcbor::from_slice::<Ipld>(&block).ok().map(|ipld| {
json!({
"uri": format!("at://{}/{}/{}", input.repo, input.collection, rkey),
"cid": cid_str,
"value": ipld_to_json(ipld)
serde_ipld_dagcbor::from_slice::<Ipld>(&block)
.ok()
.map(|ipld| {
json!({
"uri": format!("at://{}/{}/{}", input.repo, input.collection, rkey),
"cid": cid_str,
"value": ipld_to_json(ipld)
})
})
})
})
})
.collect();

View File

@@ -219,9 +219,9 @@ pub async fn commit_and_log(
.await
.map_err(|e| format!("DB Error (user_blocks delete obsolete): {}", e))?;
}
let (upserts, deletes): (Vec<_>, Vec<_>) = ops.iter().partition(|op| {
matches!(op, RecordOp::Create { .. } | RecordOp::Update { .. })
});
let (upserts, deletes): (Vec<_>, Vec<_>) = ops
.iter()
.partition(|op| matches!(op, RecordOp::Create { .. } | RecordOp::Update { .. }));
let (upsert_collections, upsert_rkeys, upsert_cids): (Vec<String>, Vec<String>, Vec<String>) =
upserts
.into_iter()

Some files were not shown because too many files have changed in this diff Show More