First version of pds migration

This commit is contained in:
lewis
2025-12-11 17:10:19 +02:00
parent 2eb67eb688
commit ea7837fcec
40 changed files with 5332 additions and 37 deletions

View File

@@ -9,7 +9,7 @@ use axum::{
use bcrypt::{DEFAULT_COST, hash};
use jacquard::types::{did::Did, integer::LimitedU32, string::Tid};
use jacquard_repo::{commit::Commit, mst::Mst, storage::BlockStore};
use k256::SecretKey;
use k256::{ecdsa::SigningKey, SecretKey};
use rand::rngs::OsRng;
use serde::{Deserialize, Serialize};
use serde_json::json;
@@ -302,9 +302,33 @@ pub async fn create_account(
let rev = Tid::now(LimitedU32::MIN);
let commit = Commit::new_unsigned(did_obj, mst_root, rev, None);
let unsigned_commit = Commit::new_unsigned(did_obj, mst_root, rev, None);
let commit_bytes = match commit.to_cbor() {
let signing_key = match SigningKey::from_slice(&secret_key_bytes) {
Ok(k) => k,
Err(e) => {
error!("Error creating signing key: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let signed_commit = match unsigned_commit.sign(&signing_key) {
Ok(c) => c,
Err(e) => {
error!("Error signing genesis commit: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let commit_bytes = match signed_commit.to_cbor() {
Ok(b) => b,
Err(e) => {
error!("Error serializing genesis commit: {:?}", e);

View File

@@ -1,7 +1,9 @@
pub mod account;
pub mod did;
pub mod plc;
pub use account::create_account;
pub use did::{
get_recommended_did_credentials, resolve_handle, update_handle, user_did_doc, well_known_did,
};
pub use plc::{request_plc_operation_signature, sign_plc_operation, submit_plc_operation};

618
src/api/identity/plc.rs Normal file
View File

@@ -0,0 +1,618 @@
use crate::plc::{
create_update_op, sign_operation, signing_key_to_did_key, validate_plc_operation,
PlcClient, PlcError, PlcService,
};
use crate::state::AppState;
use axum::{
extract::State,
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
use chrono::{Duration, Utc};
use k256::ecdsa::SigningKey;
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::collections::HashMap;
use tracing::{error, info, warn};
fn generate_plc_token() -> String {
let mut rng = rand::thread_rng();
let chars: Vec<char> = "abcdefghijklmnopqrstuvwxyz234567".chars().collect();
let part1: String = (0..5).map(|_| chars[rng.gen_range(0..chars.len())]).collect();
let part2: String = (0..5).map(|_| chars[rng.gen_range(0..chars.len())]).collect();
format!("{}-{}", part1, part2)
}
pub async fn request_plc_operation_signature(
State(state): State<AppState>,
headers: axum::http::HeaderMap,
) -> Response {
let token = match crate::auth::extract_bearer_token_from_header(
headers.get("Authorization").and_then(|h| h.to_str().ok()),
) {
Some(t) => t,
None => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationRequired"})),
)
.into_response();
}
};
let auth_user = match crate::auth::validate_bearer_token(&state.db, &token).await {
Ok(user) => user,
Err(e) => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationFailed", "message": e})),
)
.into_response();
}
};
let did = &auth_user.did;
let user = match sqlx::query!(
"SELECT id FROM users WHERE did = $1",
did
)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(json!({"error": "AccountNotFound"})),
)
.into_response();
}
Err(e) => {
error!("DB error: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let _ = sqlx::query!(
"DELETE FROM plc_operation_tokens WHERE user_id = $1 OR expires_at < NOW()",
user.id
)
.execute(&state.db)
.await;
let plc_token = generate_plc_token();
let expires_at = Utc::now() + Duration::minutes(10);
if let Err(e) = sqlx::query!(
r#"
INSERT INTO plc_operation_tokens (user_id, token, expires_at)
VALUES ($1, $2, $3)
"#,
user.id,
plc_token,
expires_at
)
.execute(&state.db)
.await
{
error!("Failed to create PLC token: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
if let Err(e) = crate::notifications::enqueue_plc_operation(
&state.db,
user.id,
&plc_token,
&hostname,
)
.await
{
warn!("Failed to enqueue PLC operation notification: {:?}", e);
}
info!("PLC operation signature requested for user {}", did);
(StatusCode::OK, Json(json!({}))).into_response()
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SignPlcOperationInput {
pub token: Option<String>,
pub rotation_keys: Option<Vec<String>>,
pub also_known_as: Option<Vec<String>>,
pub verification_methods: Option<HashMap<String, String>>,
pub services: Option<HashMap<String, ServiceInput>>,
}
#[derive(Debug, Deserialize, Clone)]
pub struct ServiceInput {
#[serde(rename = "type")]
pub service_type: String,
pub endpoint: String,
}
#[derive(Debug, Serialize)]
pub struct SignPlcOperationOutput {
pub operation: Value,
}
pub async fn sign_plc_operation(
State(state): State<AppState>,
headers: axum::http::HeaderMap,
Json(input): Json<SignPlcOperationInput>,
) -> Response {
let bearer = match crate::auth::extract_bearer_token_from_header(
headers.get("Authorization").and_then(|h| h.to_str().ok()),
) {
Some(t) => t,
None => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationRequired"})),
)
.into_response();
}
};
let auth_user = match crate::auth::validate_bearer_token(&state.db, &bearer).await {
Ok(user) => user,
Err(e) => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationFailed", "message": e})),
)
.into_response();
}
};
let did = &auth_user.did;
let token = match &input.token {
Some(t) => t,
None => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Email confirmation token required to sign PLC operations"
})),
)
.into_response();
}
};
let user = match sqlx::query!("SELECT id FROM users WHERE did = $1", did)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
_ => {
return (
StatusCode::NOT_FOUND,
Json(json!({"error": "AccountNotFound"})),
)
.into_response();
}
};
let token_row = match sqlx::query!(
"SELECT id, expires_at FROM plc_operation_tokens WHERE user_id = $1 AND token = $2",
user.id,
token
)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
Ok(None) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidToken",
"message": "Invalid or expired token"
})),
)
.into_response();
}
Err(e) => {
error!("DB error: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
if Utc::now() > token_row.expires_at {
let _ = sqlx::query!("DELETE FROM plc_operation_tokens WHERE id = $1", token_row.id)
.execute(&state.db)
.await;
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "ExpiredToken",
"message": "Token has expired"
})),
)
.into_response();
}
let key_row = match sqlx::query!(
"SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
user.id
)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
_ => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError", "message": "User signing key not found"})),
)
.into_response();
}
};
let key_bytes = match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
{
Ok(k) => k,
Err(e) => {
error!("Failed to decrypt user key: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let signing_key = match SigningKey::from_slice(&key_bytes) {
Ok(k) => k,
Err(e) => {
error!("Failed to create signing key: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let plc_client = PlcClient::new(None);
let last_op = match plc_client.get_last_op(did).await {
Ok(op) => op,
Err(PlcError::NotFound) => {
return (
StatusCode::NOT_FOUND,
Json(json!({
"error": "NotFound",
"message": "DID not found in PLC directory"
})),
)
.into_response();
}
Err(e) => {
error!("Failed to fetch PLC operation: {:?}", e);
return (
StatusCode::BAD_GATEWAY,
Json(json!({
"error": "UpstreamError",
"message": "Failed to communicate with PLC directory"
})),
)
.into_response();
}
};
if last_op.is_tombstone() {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "DID is tombstoned"
})),
)
.into_response();
}
let services = input.services.map(|s| {
s.into_iter()
.map(|(k, v)| {
(
k,
PlcService {
service_type: v.service_type,
endpoint: v.endpoint,
},
)
})
.collect()
});
let unsigned_op = match create_update_op(
&last_op,
input.rotation_keys,
input.verification_methods,
input.also_known_as,
services,
) {
Ok(op) => op,
Err(PlcError::Tombstoned) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Cannot update tombstoned DID"
})),
)
.into_response();
}
Err(e) => {
error!("Failed to create PLC operation: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let signed_op = match sign_operation(&unsigned_op, &signing_key) {
Ok(op) => op,
Err(e) => {
error!("Failed to sign PLC operation: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let _ = sqlx::query!("DELETE FROM plc_operation_tokens WHERE id = $1", token_row.id)
.execute(&state.db)
.await;
info!("Signed PLC operation for user {}", did);
(
StatusCode::OK,
Json(SignPlcOperationOutput {
operation: signed_op,
}),
)
.into_response()
}
#[derive(Debug, Deserialize)]
pub struct SubmitPlcOperationInput {
pub operation: Value,
}
pub async fn submit_plc_operation(
State(state): State<AppState>,
headers: axum::http::HeaderMap,
Json(input): Json<SubmitPlcOperationInput>,
) -> Response {
let bearer = match crate::auth::extract_bearer_token_from_header(
headers.get("Authorization").and_then(|h| h.to_str().ok()),
) {
Some(t) => t,
None => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationRequired"})),
)
.into_response();
}
};
let auth_user = match crate::auth::validate_bearer_token(&state.db, &bearer).await {
Ok(user) => user,
Err(e) => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationFailed", "message": e})),
)
.into_response();
}
};
let did = &auth_user.did;
if let Err(e) = validate_plc_operation(&input.operation) {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Invalid operation: {}", e)
})),
)
.into_response();
}
let op = &input.operation;
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
let public_url = format!("https://{}", hostname);
let user = match sqlx::query!("SELECT id, handle FROM users WHERE did = $1", did)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
_ => {
return (
StatusCode::NOT_FOUND,
Json(json!({"error": "AccountNotFound"})),
)
.into_response();
}
};
let key_row = match sqlx::query!(
"SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
user.id
)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
_ => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError", "message": "User signing key not found"})),
)
.into_response();
}
};
let key_bytes = match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
{
Ok(k) => k,
Err(e) => {
error!("Failed to decrypt user key: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let signing_key = match SigningKey::from_slice(&key_bytes) {
Ok(k) => k,
Err(e) => {
error!("Failed to create signing key: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
let user_did_key = signing_key_to_did_key(&signing_key);
if let Some(rotation_keys) = op.get("rotationKeys").and_then(|v| v.as_array()) {
let server_rotation_key =
std::env::var("PLC_ROTATION_KEY").unwrap_or_else(|_| user_did_key.clone());
let has_server_key = rotation_keys
.iter()
.any(|k| k.as_str() == Some(&server_rotation_key));
if !has_server_key {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Rotation keys do not include server's rotation key"
})),
)
.into_response();
}
}
if let Some(services) = op.get("services").and_then(|v| v.as_object()) {
if let Some(pds) = services.get("atproto_pds").and_then(|v| v.as_object()) {
let service_type = pds.get("type").and_then(|v| v.as_str());
let endpoint = pds.get("endpoint").and_then(|v| v.as_str());
if service_type != Some("AtprotoPersonalDataServer") {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Incorrect type on atproto_pds service"
})),
)
.into_response();
}
if endpoint != Some(&public_url) {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Incorrect endpoint on atproto_pds service"
})),
)
.into_response();
}
}
}
if let Some(verification_methods) = op.get("verificationMethods").and_then(|v| v.as_object()) {
if let Some(atproto_key) = verification_methods.get("atproto").and_then(|v| v.as_str()) {
if atproto_key != user_did_key {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Incorrect signing key in verificationMethods"
})),
)
.into_response();
}
}
}
if let Some(also_known_as) = op.get("alsoKnownAs").and_then(|v| v.as_array()) {
let expected_handle = format!("at://{}", user.handle);
let first_aka = also_known_as.first().and_then(|v| v.as_str());
if first_aka != Some(&expected_handle) {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Incorrect handle in alsoKnownAs"
})),
)
.into_response();
}
}
let plc_client = PlcClient::new(None);
if let Err(e) = plc_client.send_operation(did, &input.operation).await {
error!("Failed to submit PLC operation: {:?}", e);
return (
StatusCode::BAD_GATEWAY,
Json(json!({
"error": "UpstreamError",
"message": format!("Failed to submit to PLC directory: {}", e)
})),
)
.into_response();
}
if let Err(e) = sqlx::query!(
"INSERT INTO repo_seq (did, event_type) VALUES ($1, 'identity')",
did
)
.execute(&state.db)
.await
{
warn!("Failed to sequence identity event: {:?}", e);
}
info!("Submitted PLC operation for user {}", did);
(StatusCode::OK, Json(json!({}))).into_response()
}

420
src/api/repo/import.rs Normal file
View File

@@ -0,0 +1,420 @@
use crate::state::AppState;
use crate::sync::import::{apply_import, parse_car, ImportError};
use crate::sync::verify::CarVerifier;
use axum::{
body::Bytes,
extract::State,
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
use serde_json::json;
use tracing::{debug, error, info, warn};
const DEFAULT_MAX_IMPORT_SIZE: usize = 100 * 1024 * 1024;
const DEFAULT_MAX_BLOCKS: usize = 50000;
pub async fn import_repo(
State(state): State<AppState>,
headers: axum::http::HeaderMap,
body: Bytes,
) -> Response {
let accepting_imports = std::env::var("ACCEPTING_REPO_IMPORTS")
.map(|v| v != "false" && v != "0")
.unwrap_or(true);
if !accepting_imports {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Service is not accepting repo imports"
})),
)
.into_response();
}
let max_size: usize = std::env::var("MAX_IMPORT_SIZE")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(DEFAULT_MAX_IMPORT_SIZE);
if body.len() > max_size {
return (
StatusCode::PAYLOAD_TOO_LARGE,
Json(json!({
"error": "InvalidRequest",
"message": format!("Import size exceeds limit of {} bytes", max_size)
})),
)
.into_response();
}
let token = match crate::auth::extract_bearer_token_from_header(
headers.get("Authorization").and_then(|h| h.to_str().ok()),
) {
Some(t) => t,
None => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationRequired"})),
)
.into_response();
}
};
let auth_user = match crate::auth::validate_bearer_token(&state.db, &token).await {
Ok(user) => user,
Err(e) => {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "AuthenticationFailed", "message": e})),
)
.into_response();
}
};
let did = &auth_user.did;
let user = match sqlx::query!(
"SELECT id, deactivated_at, takedown_ref FROM users WHERE did = $1",
did
)
.fetch_optional(&state.db)
.await
{
Ok(Some(row)) => row,
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(json!({"error": "AccountNotFound"})),
)
.into_response();
}
Err(e) => {
error!("DB error fetching user: {:?}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response();
}
};
if user.deactivated_at.is_some() {
return (
StatusCode::FORBIDDEN,
Json(json!({
"error": "AccountDeactivated",
"message": "Account is deactivated"
})),
)
.into_response();
}
if user.takedown_ref.is_some() {
return (
StatusCode::FORBIDDEN,
Json(json!({
"error": "AccountTakenDown",
"message": "Account has been taken down"
})),
)
.into_response();
}
let user_id = user.id;
let (root, blocks) = match parse_car(&body).await {
Ok((r, b)) => (r, b),
Err(ImportError::InvalidRootCount) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Expected exactly one root in CAR file"
})),
)
.into_response();
}
Err(ImportError::CarParse(msg)) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Failed to parse CAR file: {}", msg)
})),
)
.into_response();
}
Err(e) => {
error!("CAR parsing error: {:?}", e);
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Invalid CAR file: {}", e)
})),
)
.into_response();
}
};
info!(
"Importing repo for user {}: {} blocks, root {}",
did,
blocks.len(),
root
);
let root_block = match blocks.get(&root) {
Some(b) => b,
None => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "Root block not found in CAR file"
})),
)
.into_response();
}
};
let commit_did = match jacquard_repo::commit::Commit::from_cbor(root_block) {
Ok(commit) => commit.did().to_string(),
Err(e) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Invalid commit: {}", e)
})),
)
.into_response();
}
};
if commit_did != *did {
return (
StatusCode::FORBIDDEN,
Json(json!({
"error": "InvalidRequest",
"message": format!(
"CAR file is for DID {} but you are authenticated as {}",
commit_did, did
)
})),
)
.into_response();
}
let skip_verification = std::env::var("SKIP_IMPORT_VERIFICATION")
.map(|v| v == "true" || v == "1")
.unwrap_or(false);
if !skip_verification {
debug!("Verifying CAR file signature and structure for DID {}", did);
let verifier = CarVerifier::new();
match verifier.verify_car(did, &root, &blocks).await {
Ok(verified) => {
debug!(
"CAR verification successful: rev={}, data_cid={}",
verified.rev, verified.data_cid
);
}
Err(crate::sync::verify::VerifyError::DidMismatch {
commit_did,
expected_did,
}) => {
return (
StatusCode::FORBIDDEN,
Json(json!({
"error": "InvalidRequest",
"message": format!(
"CAR file is for DID {} but you are authenticated as {}",
commit_did, expected_did
)
})),
)
.into_response();
}
Err(crate::sync::verify::VerifyError::InvalidSignature) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidSignature",
"message": "CAR file commit signature verification failed"
})),
)
.into_response();
}
Err(crate::sync::verify::VerifyError::DidResolutionFailed(msg)) => {
warn!("DID resolution failed during import verification: {}", msg);
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Failed to verify DID: {}", msg)
})),
)
.into_response();
}
Err(crate::sync::verify::VerifyError::NoSigningKey) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": "DID document does not contain a signing key"
})),
)
.into_response();
}
Err(crate::sync::verify::VerifyError::MstValidationFailed(msg)) => {
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("MST validation failed: {}", msg)
})),
)
.into_response();
}
Err(e) => {
error!("CAR verification error: {:?}", e);
return (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("CAR verification failed: {}", e)
})),
)
.into_response();
}
}
} else {
warn!("Skipping CAR signature verification for import (SKIP_IMPORT_VERIFICATION=true)");
}
let max_blocks: usize = std::env::var("MAX_IMPORT_BLOCKS")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(DEFAULT_MAX_BLOCKS);
match apply_import(&state.db, user_id, root, blocks, max_blocks).await {
Ok(records) => {
info!(
"Successfully imported {} records for user {}",
records.len(),
did
);
if let Err(e) = sequence_import_event(&state, did, &root.to_string()).await {
warn!("Failed to sequence import event: {:?}", e);
}
(StatusCode::OK, Json(json!({}))).into_response()
}
Err(ImportError::SizeLimitExceeded) => (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Import exceeds block limit of {}", max_blocks)
})),
)
.into_response(),
Err(ImportError::RepoNotFound) => (
StatusCode::NOT_FOUND,
Json(json!({
"error": "RepoNotFound",
"message": "Repository not initialized for this account"
})),
)
.into_response(),
Err(ImportError::InvalidCbor(msg)) => (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Invalid CBOR data: {}", msg)
})),
)
.into_response(),
Err(ImportError::InvalidCommit(msg)) => (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Invalid commit structure: {}", msg)
})),
)
.into_response(),
Err(ImportError::BlockNotFound(cid)) => (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "InvalidRequest",
"message": format!("Referenced block not found in CAR: {}", cid)
})),
)
.into_response(),
Err(ImportError::ConcurrentModification) => (
StatusCode::CONFLICT,
Json(json!({
"error": "ConcurrentModification",
"message": "Repository is being modified by another operation, please retry"
})),
)
.into_response(),
Err(ImportError::VerificationFailed(ve)) => (
StatusCode::BAD_REQUEST,
Json(json!({
"error": "VerificationFailed",
"message": format!("CAR verification failed: {}", ve)
})),
)
.into_response(),
Err(ImportError::DidMismatch { car_did, auth_did }) => (
StatusCode::FORBIDDEN,
Json(json!({
"error": "DidMismatch",
"message": format!("CAR is for {} but authenticated as {}", car_did, auth_did)
})),
)
.into_response(),
Err(e) => {
error!("Import error: {:?}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "InternalError"})),
)
.into_response()
}
}
}
async fn sequence_import_event(
state: &AppState,
did: &str,
commit_cid: &str,
) -> Result<(), sqlx::Error> {
let prev_cid: Option<String> = None;
let ops = serde_json::json!([]);
let blobs: Vec<String> = vec![];
let blocks_cids: Vec<String> = vec![];
sqlx::query!(
r#"
INSERT INTO repo_seq (did, event_type, commit_cid, prev_cid, ops, blobs, blocks_cids)
VALUES ($1, 'commit', $2, $3, $4, $5, $6)
"#,
did,
commit_cid,
prev_cid,
ops,
&blobs,
&blocks_cids
)
.execute(&state.db)
.await?;
Ok(())
}

View File

@@ -1,7 +1,9 @@
pub mod blob;
pub mod import;
pub mod meta;
pub mod record;
pub use blob::{list_missing_blobs, upload_blob};
pub use import::import_repo;
pub use meta::describe_repo;
pub use record::{apply_writes, create_record, delete_record, get_record, list_records, put_record};

View File

@@ -3,6 +3,7 @@ use cid::Cid;
use jacquard::types::{did::Did, integer::LimitedU32, string::Tid};
use jacquard_repo::commit::Commit;
use jacquard_repo::storage::BlockStore;
use k256::ecdsa::SigningKey;
use serde_json::json;
use uuid::Uuid;
@@ -26,12 +27,30 @@ pub async fn commit_and_log(
ops: Vec<RecordOp>,
blocks_cids: &Vec<String>,
) -> Result<CommitResult, String> {
let key_row = sqlx::query!(
"SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
user_id
)
.fetch_one(&state.db)
.await
.map_err(|e| format!("Failed to fetch signing key: {}", e))?;
let key_bytes = crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
.map_err(|e| format!("Failed to decrypt signing key: {}", e))?;
let signing_key = SigningKey::from_slice(&key_bytes)
.map_err(|e| format!("Invalid signing key: {}", e))?;
let did_obj = Did::new(did).map_err(|e| format!("Invalid DID: {}", e))?;
let rev = Tid::now(LimitedU32::MIN);
let new_commit = Commit::new_unsigned(did_obj, new_mst_root, rev.clone(), current_root_cid);
let unsigned_commit = Commit::new_unsigned(did_obj, new_mst_root, rev.clone(), current_root_cid);
let new_commit_bytes = new_commit.to_cbor().map_err(|e| format!("Failed to serialize commit: {:?}", e))?;
let signed_commit = unsigned_commit
.sign(&signing_key)
.map_err(|e| format!("Failed to sign commit: {:?}", e))?;
let new_commit_bytes = signed_commit.to_cbor().map_err(|e| format!("Failed to serialize commit: {:?}", e))?;
let new_root_cid = state.block_store.put(&new_commit_bytes).await
.map_err(|e| format!("Failed to save commit block: {:?}", e))?;