From b8ba815b4cafdff48694d14c994e862738d342ef Mon Sep 17 00:00:00 2001 From: bryan newbold Date: Sun, 19 Feb 2023 15:37:20 -0800 Subject: move a bunch of code from pds to common --- Cargo.lock | 48 +-- adenosine-pds/Cargo.toml | 12 - adenosine-pds/src/bin/adenosine-pds.rs | 1 + adenosine-pds/src/bsky.rs | 2 +- adenosine-pds/src/car.rs | 100 ------ adenosine-pds/src/crypto.rs | 280 ---------------- adenosine-pds/src/did.rs | 367 -------------------- adenosine-pds/src/lib.rs | 16 +- adenosine-pds/src/mst.rs | 399 ---------------------- adenosine-pds/src/repo.rs | 466 -------------------------- adenosine-pds/src/ucan_p256.rs | 85 ----- adenosine-pds/src/vendored.rs | 1 - adenosine-pds/src/vendored/iroh_car/README.md | 27 -- adenosine-pds/src/vendored/iroh_car/error.rs | 29 -- adenosine-pds/src/vendored/iroh_car/header.rs | 107 ------ adenosine-pds/src/vendored/iroh_car/lib.rs | 11 - adenosine-pds/src/vendored/iroh_car/mod.rs | 10 - adenosine-pds/src/vendored/iroh_car/reader.rs | 101 ------ adenosine-pds/src/vendored/iroh_car/util.rs | 95 ------ adenosine-pds/src/vendored/iroh_car/writer.rs | 73 ---- adenosine-pds/src/web.rs | 2 +- adenosine/Cargo.toml | 16 + adenosine/src/car.rs | 100 ++++++ adenosine/src/crypto.rs | 280 ++++++++++++++++ adenosine/src/did.rs | 368 ++++++++++++++++++++ adenosine/src/lib.rs | 7 + adenosine/src/mst.rs | 399 ++++++++++++++++++++++ adenosine/src/repo.rs | 466 ++++++++++++++++++++++++++ adenosine/src/ucan_p256.rs | 85 +++++ adenosine/src/vendored.rs | 1 + adenosine/src/vendored/iroh_car/README.md | 27 ++ adenosine/src/vendored/iroh_car/error.rs | 29 ++ adenosine/src/vendored/iroh_car/header.rs | 107 ++++++ adenosine/src/vendored/iroh_car/lib.rs | 11 + adenosine/src/vendored/iroh_car/mod.rs | 10 + adenosine/src/vendored/iroh_car/reader.rs | 101 ++++++ adenosine/src/vendored/iroh_car/util.rs | 95 ++++++ adenosine/src/vendored/iroh_car/writer.rs | 73 ++++ 38 files changed, 2209 insertions(+), 2198 deletions(-) delete mode 100644 adenosine-pds/src/car.rs delete mode 100644 adenosine-pds/src/crypto.rs delete mode 100644 adenosine-pds/src/did.rs delete mode 100644 adenosine-pds/src/mst.rs delete mode 100644 adenosine-pds/src/repo.rs delete mode 100644 adenosine-pds/src/ucan_p256.rs delete mode 100644 adenosine-pds/src/vendored.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/README.md delete mode 100644 adenosine-pds/src/vendored/iroh_car/error.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/header.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/lib.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/mod.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/reader.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/util.rs delete mode 100644 adenosine-pds/src/vendored/iroh_car/writer.rs create mode 100644 adenosine/src/car.rs create mode 100644 adenosine/src/crypto.rs create mode 100644 adenosine/src/did.rs create mode 100644 adenosine/src/mst.rs create mode 100644 adenosine/src/repo.rs create mode 100644 adenosine/src/ucan_p256.rs create mode 100644 adenosine/src/vendored.rs create mode 100644 adenosine/src/vendored/iroh_car/README.md create mode 100644 adenosine/src/vendored/iroh_car/error.rs create mode 100644 adenosine/src/vendored/iroh_car/header.rs create mode 100644 adenosine/src/vendored/iroh_car/lib.rs create mode 100644 adenosine/src/vendored/iroh_car/mod.rs create mode 100644 adenosine/src/vendored/iroh_car/reader.rs create mode 100644 adenosine/src/vendored/iroh_car/util.rs create mode 100644 adenosine/src/vendored/iroh_car/writer.rs diff --git a/Cargo.lock b/Cargo.lock index 04be8be..0a6e045 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7,17 +7,31 @@ name = "adenosine" version = "0.2.0" dependencies = [ "anyhow", + "async-trait", "base64", + "bs58", "data-encoding", "env_logger", + "futures", + "integer-encoding", + "ipfs-sqlite-block-store", + "k256", "lazy_static", + "libipld", "log", + "multibase", + "multihash", + "p256", "rand", "regex", "reqwest", "serde", "serde_json", + "sha256", + "thiserror", "time", + "tokio", + "ucan", ] [[package]] @@ -53,31 +67,21 @@ dependencies = [ "askama", "async-trait", "bcrypt", - "bs58", "data-encoding", "dotenvy", "futures", - "integer-encoding", "ipfs-sqlite-block-store", - "k256", "lazy_static", "libipld", "log", - "multibase", - "multihash", - "p256", "pretty_env_logger", "rouille", "rusqlite", "rusqlite_migration", "serde", "serde_json", - "sha256", "structopt", - "thiserror", - "time", "tokio", - "ucan", ] [[package]] @@ -284,9 +288,9 @@ dependencies = [ [[package]] name = "async-recursion" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cda8f4bcc10624c4e85bc66b3f452cca98cfa5ca002dc83a16aad2367641bea" +checksum = "3b015a331cc64ebd1774ba119538573603427eaace0a1950c423ab971f903796" dependencies = [ "proc-macro2", "quote", @@ -338,9 +342,9 @@ dependencies = [ [[package]] name = "atomic-waker" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a" +checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599" [[package]] name = "atty" @@ -645,9 +649,9 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7bef69dc86e3c610e4e7aed41035e2a7ed12e72dd7530f61327a6579a4390b" +checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" dependencies = [ "crossbeam-utils", ] @@ -1169,9 +1173,9 @@ dependencies = [ [[package]] name = "gloo-timers" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c4a8d6391675c6b2ee1a6c8d06e8e2d03605c44cec1270675985a4c2a5500b" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" dependencies = [ "futures-channel", "futures-core", @@ -2448,9 +2452,9 @@ dependencies = [ [[package]] name = "serde_bytes" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718dc5fff5b36f99093fc49b280cfc96ce6fc824317783bff5a1fed0c7a64819" +checksum = "416bda436f9aab92e02c8e10d49a15ddd339cea90b6e340fe51ed97abb548294" dependencies = [ "serde", ] @@ -2541,9 +2545,9 @@ dependencies = [ [[package]] name = "sha256" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e334db67871c14c18fc066ad14af13f9fdf5f9a91c61af432d1e3a39c8c6a141" +checksum = "328169f167261957e83d82be47f9e36629e257c62308129033d7f7e7c173d180" dependencies = [ "hex", "sha2 0.9.9", diff --git a/adenosine-pds/Cargo.toml b/adenosine-pds/Cargo.toml index 482a812..41cfaae 100644 --- a/adenosine-pds/Cargo.toml +++ b/adenosine-pds/Cargo.toml @@ -32,25 +32,13 @@ rouille = "3" adenosine = { version = "0.2.0", path = "../adenosine" } tokio = { version = "1", features = ["full"] } futures = "0.3" -sha256 = "1" lazy_static = "1" bcrypt = "0.13" data-encoding = "2" -k256 = { version = "0.11", features = ["ecdsa"] } -p256 = { version = "0.11", features = ["ecdsa"] } -multibase = "0.9" -ucan = "0.7.0-alpha.1" # TODO: replace this with data-encoding or similar; this is only needed for ucan_p256 stuff -bs58 = "0.4" async-trait = "0.1" dotenvy = "0.15" askama = { version = "0.11", features = ["serde-json"] } -time = { version = "0.3", features = ["formatting"] } - -# for vendored iroh-car -thiserror = "1.0" -integer-encoding = { version = "3", features = ["tokio_async"] } -multihash = "0.16" [package.metadata.deb] maintainer = "Bryan Newbold " diff --git a/adenosine-pds/src/bin/adenosine-pds.rs b/adenosine-pds/src/bin/adenosine-pds.rs index 1fef557..c16e7e0 100644 --- a/adenosine-pds/src/bin/adenosine-pds.rs +++ b/adenosine-pds/src/bin/adenosine-pds.rs @@ -1,3 +1,4 @@ +use adenosine::mst; use adenosine_pds::models::AccountRequest; use adenosine_pds::*; use anyhow::Result; diff --git a/adenosine-pds/src/bsky.rs b/adenosine-pds/src/bsky.rs index e966504..caa16f6 100644 --- a/adenosine-pds/src/bsky.rs +++ b/adenosine-pds/src/bsky.rs @@ -1,5 +1,4 @@ use crate::models::*; -use crate::repo::Mutation; /// Helper functions for doing database and repo operations relating to bluesky endpoints and /// records use crate::{ @@ -7,6 +6,7 @@ use crate::{ XrpcError, }; use adenosine::identifiers::{AtUri, DidOrHost, Nsid}; +use adenosine::repo::Mutation; use anyhow::anyhow; use libipld::Cid; use rusqlite::params; diff --git a/adenosine-pds/src/car.rs b/adenosine-pds/src/car.rs deleted file mode 100644 index 22f83bc..0000000 --- a/adenosine-pds/src/car.rs +++ /dev/null @@ -1,100 +0,0 @@ -use anyhow::Result; - -use crate::vendored::iroh_car::{CarHeader, CarReader, CarWriter}; -use futures::TryStreamExt; -use ipfs_sqlite_block_store::BlockStore; -use libipld::{Block, Cid}; -use std::path::PathBuf; -use tokio::fs::File; -use tokio::io::{AsyncRead, BufReader}; - -/// Synchronous wrapper for loading in-memory CAR bytes (`&[u8]`) into a blockstore. -/// -/// Does not do any pinning, even temporarily. Returns the root CID indicated in the CAR file -/// header. -pub fn load_car_bytes_to_blockstore( - db: &mut BlockStore, - car_bytes: &[u8], -) -> Result { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build()?; - rt.block_on(inner_car_bytes_loader(db, car_bytes)) -} - -/// Synchronous wrapper for loading on-disk CAR file (by path) into a blockstore. -/// -/// Does not do any pinning, even temporarily. Returns the root CID indicated in the CAR file -/// header. -pub fn load_car_path_to_blockstore( - db: &mut BlockStore, - car_path: &PathBuf, -) -> Result { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build()?; - rt.block_on(inner_car_path_loader(db, car_path)) -} - -pub fn read_car_bytes_from_blockstore( - db: &mut BlockStore, - root: &Cid, -) -> Result> { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build()?; - rt.block_on(inner_car_bytes_reader(db, root)) -} - -async fn inner_car_bytes_loader( - db: &mut BlockStore, - car_bytes: &[u8], -) -> Result { - let car_reader = CarReader::new(car_bytes).await?; - inner_car_loader(db, car_reader).await -} - -async fn inner_car_path_loader( - db: &mut BlockStore, - car_path: &PathBuf, -) -> Result { - let car_reader = { - let file = File::open(car_path).await?; - let buf_reader = BufReader::new(file); - CarReader::new(buf_reader).await? - }; - inner_car_loader(db, car_reader).await -} - -async fn inner_car_loader( - db: &mut BlockStore, - car_reader: CarReader, -) -> Result { - let car_header = car_reader.header().clone(); - car_reader - .stream() - .try_for_each(|(cid, raw)| { - // TODO: error handling here instead of unwrap (?) - let block = Block::new(cid, raw).unwrap(); - db.put_block(block, None).unwrap(); - futures::future::ready(Ok(())) - }) - .await?; - Ok(car_header.roots()[0]) -} - -async fn inner_car_bytes_reader( - db: &mut BlockStore, - root: &Cid, -) -> Result> { - let car_header = CarHeader::new_v1(vec![*root]); - let buf: Vec = Default::default(); - let mut car_writer = CarWriter::new(car_header, buf); - - let cid_list = db.get_descendants::>(root)?; - for cid in cid_list { - let block = db.get_block(&cid)?.expect("block content"); - car_writer.write(cid, block).await?; - } - Ok(car_writer.finish().await?) -} diff --git a/adenosine-pds/src/crypto.rs b/adenosine-pds/src/crypto.rs deleted file mode 100644 index adf4e02..0000000 --- a/adenosine-pds/src/crypto.rs +++ /dev/null @@ -1,280 +0,0 @@ -use crate::P256KeyMaterial; -use adenosine::identifiers::Did; -use anyhow::{anyhow, ensure, Result}; -use p256::ecdsa::signature::{Signer, Verifier}; -use std::str::FromStr; -use ucan::builder::UcanBuilder; - -// Need to: -// -// - generate new random keypair -// - generate keypair from seed -// - read/write secret keypair (eg, for PDS config loading) -// - sign bytes (and ipld?) using keypair -// - verify signature bytes (and ipld?) using pubkey - -const MULTICODE_P256_BYTES: [u8; 2] = [0x80, 0x24]; -const MULTICODE_K256_BYTES: [u8; 2] = [0xe7, 0x01]; - -#[derive(Clone, PartialEq, Eq)] -pub struct KeyPair { - public: p256::ecdsa::VerifyingKey, - secret: p256::ecdsa::SigningKey, -} - -#[derive(Clone, PartialEq, Eq)] -pub enum PubKey { - P256(p256::ecdsa::VerifyingKey), - K256(k256::ecdsa::VerifyingKey), -} - -impl KeyPair { - pub fn new_random() -> Self { - let signing = p256::ecdsa::SigningKey::random(&mut p256::elliptic_curve::rand_core::OsRng); - KeyPair { - public: signing.verifying_key(), - secret: signing, - } - } - - pub fn from_bytes(bytes: &[u8]) -> Result { - let signing = p256::ecdsa::SigningKey::from_bytes(bytes)?; - Ok(KeyPair { - public: signing.verifying_key(), - secret: signing, - }) - } - - pub fn to_bytes(&self) -> Vec { - self.secret.to_bytes().to_vec() - } - - pub fn pubkey(&self) -> PubKey { - PubKey::P256(self.public) - } - - pub fn sign_bytes(&self, data: &[u8]) -> String { - let sig = self.secret.sign(data); - data_encoding::BASE64URL_NOPAD.encode(&sig.to_vec()) - } - - fn ucan_keymaterial(&self) -> P256KeyMaterial { - P256KeyMaterial(self.public, Some(self.secret.clone())) - } - - /// This is currently just an un-validated token; we don't actually verify these. - pub fn ucan(&self, did: &Did) -> Result { - let key_material = self.ucan_keymaterial(); - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build()?; - rt.block_on(build_ucan(key_material, did)) - } - - pub fn to_hex(&self) -> String { - data_encoding::HEXUPPER.encode(&self.to_bytes()) - } - - pub fn from_hex(hex: &str) -> Result { - Self::from_bytes(&data_encoding::HEXUPPER.decode(hex.as_bytes())?) - } -} - -async fn build_ucan(key_material: P256KeyMaterial, did: &Did) -> Result { - let token_string = UcanBuilder::default() - .issued_by(&key_material) - .for_audience(did) - .with_nonce() - .with_lifetime(60 * 60 * 24 * 90) - .build()? - .sign() - .await? - .encode()?; - Ok(token_string) -} - -impl PubKey { - pub fn verify_bytes(&self, data: &[u8], sig: &str) -> Result<()> { - let sig_bytes = data_encoding::BASE64URL_NOPAD.decode(sig.as_bytes())?; - // TODO: better way other than this re-encoding? - let sig_hex = data_encoding::HEXUPPER.encode(&sig_bytes); - match self { - PubKey::P256(key) => { - let sig = p256::ecdsa::Signature::from_str(&sig_hex)?; - Ok(key.verify(data, &sig)?) - } - PubKey::K256(key) => { - let sig = k256::ecdsa::Signature::from_str(&sig_hex)?; - Ok(key.verify(data, &sig)?) - } - } - } - - pub fn key_type(&self) -> String { - match self { - PubKey::P256(_) => "EcdsaSecp256r1VerificationKey2019", - PubKey::K256(_) => "EcdsaSecp256k1VerificationKey2019", - } - .to_string() - } - - /// This public verification key encoded as base58btc multibase string, not 'compressed', as - /// included in DID documents ('publicKeyMultibase'). - /// - /// Note that the did:key serialization does 'compress' the key into a smaller size. - pub fn to_multibase(&self) -> String { - let mut bytes: Vec = vec![]; - match self { - PubKey::P256(key) => { - bytes.extend_from_slice(&MULTICODE_P256_BYTES); - bytes.extend_from_slice(&key.to_encoded_point(false).to_bytes()); - } - PubKey::K256(key) => { - bytes.extend_from_slice(&MULTICODE_K256_BYTES); - bytes.extend_from_slice(&key.to_bytes()); - } - } - multibase::encode(multibase::Base::Base58Btc, &bytes) - } - - /// Serializes as a 'did:key' string. - pub fn to_did_key(&self) -> String { - let mut bytes: Vec = vec![]; - match self { - PubKey::P256(key) => { - bytes.extend_from_slice(&MULTICODE_P256_BYTES); - bytes.extend_from_slice(&key.to_encoded_point(true).to_bytes()); - } - PubKey::K256(key) => { - bytes.extend_from_slice(&MULTICODE_K256_BYTES); - bytes.extend_from_slice(&key.to_bytes()); - } - } - format!( - "did:key:{}", - multibase::encode(multibase::Base::Base58Btc, &bytes) - ) - } - - pub fn from_did_key(did_key: &str) -> Result { - if !did_key.starts_with("did:key:") || did_key.len() < 20 { - return Err(anyhow!("does not look like a did:key: {}", did_key)); - } - let (key_type, bytes) = multibase::decode(&did_key[8..])?; - ensure!( - key_type == multibase::Base::Base58Btc, - "base58btc-encoded key" - ); - // prefix bytes - let prefix: [u8; 2] = [bytes[0], bytes[1]]; - match prefix { - MULTICODE_K256_BYTES => Ok(PubKey::K256(k256::ecdsa::VerifyingKey::from_sec1_bytes( - &bytes[2..], - )?)), - MULTICODE_P256_BYTES => Ok(PubKey::P256(p256::ecdsa::VerifyingKey::from_sec1_bytes( - &bytes[2..], - )?)), - _ => Err(anyhow!( - "key type (multicodec) not handled when parsing DID key: {}", - did_key - )), - } - } - - pub fn to_bytes(&self) -> Vec { - match self { - PubKey::P256(key) => key.to_encoded_point(true).to_bytes().to_vec(), - PubKey::K256(key) => key.to_bytes().to_vec(), - } - } - - pub fn ucan_keymaterial(&self) -> P256KeyMaterial { - match self { - PubKey::P256(key) => P256KeyMaterial(*key, None), - PubKey::K256(_key) => unimplemented!(), - } - } -} - -impl std::fmt::Display for PubKey { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.to_did_key()) - } -} - -#[test] -fn test_did_secp256k1_p256() { - // did:key secp256k1 test vectors from W3C - // https://github.com/w3c-ccg/did-method-key/blob/main/test-vectors/secp256k1.json - // via atproto repo - let pairs = vec![ - ( - "9085d2bef69286a6cbb51623c8fa258629945cd55ca705cc4e66700396894e0c", - "did:key:zQ3shokFTS3brHcDQrn82RUDfCZESWL1ZdCEJwekUDPQiYBme", - ), - ( - "f0f4df55a2b3ff13051ea814a8f24ad00f2e469af73c363ac7e9fb999a9072ed", - "did:key:zQ3shtxV1FrJfhqE1dvxYRcCknWNjHc3c5X1y3ZSoPDi2aur2", - ), - ( - "6b0b91287ae3348f8c2f2552d766f30e3604867e34adc37ccbb74a8e6b893e02", - "did:key:zQ3shZc2QzApp2oymGvQbzP8eKheVshBHbU4ZYjeXqwSKEn6N", - ), - ( - "c0a6a7c560d37d7ba81ecee9543721ff48fea3e0fb827d42c1868226540fac15", - "did:key:zQ3shadCps5JLAHcZiuX5YUtWHHL8ysBJqFLWvjZDKAWUBGzy", - ), - ( - "175a232d440be1e0788f25488a73d9416c04b6f924bea6354bf05dd2f1a75133", - "did:key:zQ3shptjE6JwdkeKN4fcpnYQY3m9Cet3NiHdAfpvSUZBFoKBj", - ), - ]; - - // test decode/encode did:key - for (_hex, did) in pairs.iter() { - assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key()); - } - - let p256_dids = vec![ - "did:key:zDnaerx9CtbPJ1q36T5Ln5wYt3MQYeGRG5ehnPAmxcf5mDZpv", - "did:key:zDnaerDaTF5BXEavCrfRZEk316dpbLsfPDZ3WJ5hRTPFU2169", - ]; - for did in p256_dids { - assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key()); - } -} - -#[test] -fn test_did_plc_examples() { - // https://atproto.com/specs/did-plc - let example_dids = vec![ - "did:key:zDnaejYFhgFiVF89LhJ4UipACLKuqo6PteZf8eKDVKeExXUPk", - "did:key:zDnaeSezF2TgCD71b5DiiFyhHQwKAfsBVqTTHRMvP597Z5Ztn", - "did:key:zDnaeh9v2RmcMo13Du2d6pjUf5bZwtauYxj3n9dYjw4EZUAR7", - "did:key:zDnaedvvAsDE6H3BDdBejpx9ve2Tz95cymyCAKF66JbyMh1Lt", - ]; - - for did in example_dids { - assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key()); - } -} - -#[test] -fn test_signing() { - let msg = b"you have found the secret message"; - let keypair = KeyPair::new_random(); - let sig_str = keypair.sign_bytes(msg); - keypair.pubkey().verify_bytes(msg, &sig_str).unwrap(); - - // and with pubkey that has been serialized/deserialized - let did_key = keypair.pubkey().to_did_key(); - let pubkey = PubKey::from_did_key(&did_key).unwrap(); - pubkey.verify_bytes(msg, &sig_str).unwrap(); -} - -#[test] -fn test_keypair_hex() { - let before = KeyPair::new_random(); - let after = KeyPair::from_hex(&before.to_hex()).unwrap(); - assert!(before == after); -} diff --git a/adenosine-pds/src/did.rs b/adenosine-pds/src/did.rs deleted file mode 100644 index 88fa5b8..0000000 --- a/adenosine-pds/src/did.rs +++ /dev/null @@ -1,367 +0,0 @@ -/// DID and 'did:plc' stuff -/// -/// This is currently a partial/skeleton implementation, which only generates local/testing did:plc -/// DIDs (and DID documents) using a single 'create' genesis block. Key rotation, etc, is not -/// supported. -use crate::{Did, KeyPair, PubKey}; -use anyhow::Result; -use libipld::cbor::DagCborCodec; -use libipld::multihash::Code; -use libipld::{Block, Cid, DagCbor, DefaultParams}; -use serde_json::json; -use std::str::FromStr; - -#[allow(non_snake_case)] -#[derive(Debug, DagCbor, PartialEq, Eq, Clone)] -pub struct CreateOp { - #[ipld(rename = "type")] - pub op_type: String, - pub signingKey: String, - pub recoveryKey: String, - pub username: String, - pub service: String, - pub prev: Option, - pub sig: String, -} - -#[allow(non_snake_case)] -#[derive(Debug, DagCbor, PartialEq, Eq, Clone)] -struct UnsignedCreateOp { - #[ipld(rename = "type")] - pub op_type: String, - pub signingKey: String, - pub recoveryKey: String, - pub username: String, - pub service: String, - pub prev: Option, -} - -impl UnsignedCreateOp { - fn into_signed(self, sig: String) -> CreateOp { - CreateOp { - op_type: self.op_type, - prev: self.prev, - sig, - signingKey: self.signingKey, - recoveryKey: self.recoveryKey, - username: self.username, - service: self.service, - } - } -} - -impl CreateOp { - pub fn new( - username: String, - atp_pds: String, - keypair: &KeyPair, - recovery_key: Option, - ) -> Self { - let signing_key = keypair.pubkey().to_did_key(); - let recovery_key = recovery_key.unwrap_or(signing_key.clone()); - let unsigned = UnsignedCreateOp { - op_type: "create".to_string(), - prev: None, - signingKey: signing_key, - recoveryKey: recovery_key, - username, - service: atp_pds, - }; - let block = Block::::encode(DagCborCodec, Code::Sha2_256, &unsigned) - .expect("encode DAG-CBOR"); - let sig = keypair.sign_bytes(block.data()); - unsigned.into_signed(sig) - } - - pub fn did_plc(&self) -> Did { - // dump DAG-CBOR - let block = Block::::encode(DagCborCodec, Code::Sha2_256, self) - .expect("encode DAG-CBOR"); - let bin = block.data(); - // hash SHA-256 - let digest_bytes: Vec = data_encoding::HEXLOWER - .decode(sha256::digest(bin).as_bytes()) - .expect("SHA-256 digest is always hex string"); - // encode base32 - let digest_b32 = data_encoding::BASE32_NOPAD - .encode(&digest_bytes) - .to_ascii_lowercase(); - // truncate - Did::from_str(&format!("did:plc:{}", &digest_b32[0..24])).unwrap() - } - - fn into_unsigned(self) -> UnsignedCreateOp { - UnsignedCreateOp { - op_type: self.op_type, - prev: self.prev, - signingKey: self.signingKey, - recoveryKey: self.recoveryKey, - username: self.username, - service: self.service, - } - } - - pub fn did_doc(&self) -> serde_json::Value { - let meta = DidDocMeta { - did: self.did_plc(), - // TODO - user_url: format!("https://{}", self.username), - service_url: self.service.clone(), - recovery_didkey: self.recoveryKey.clone(), - signing_didkey: self.signingKey.clone(), - }; - meta.did_doc() - } - - /// This method only makes sense on the "genesis" create object - pub fn verify_self(&self) -> Result<()> { - let key = PubKey::from_did_key(&self.signingKey)?; - let unsigned = { - let cpy = (*self).clone(); - cpy.into_unsigned() - }; - //println!("unsigned: {:?}", unsigned); - let block = Block::::encode(DagCborCodec, Code::Sha2_256, &unsigned) - .expect("encode DAG-CBOR"); - key.verify_bytes(block.data(), &self.sig) - } -} - -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct DidDocMeta { - pub did: Did, - pub user_url: String, - pub service_url: String, - pub recovery_didkey: String, - pub signing_didkey: String, -} - -impl DidDocMeta { - pub fn did_doc(&self) -> serde_json::Value { - let key_type = "EcdsaSecp256r1VerificationKey2019"; - json!({ - "@context": [ - "https://www.w3.org/ns/did/v1", - "https://w3id.org/security/suites/ecdsa-2019/v1" - ], - "id": self.did.to_string(), - "alsoKnownAs": [ self.user_url ], - "verificationMethod": [ - { - "id": format!("{}#signingKey)", self.did), - "type": key_type, - "controller": self.did.to_string(), - "publicKeyMultibase": self.signing_didkey - }, - { - "id": format!("{}#recoveryKey)", self.did), - "type": key_type, - "controller": self.did.to_string(), - "publicKeyMultibase": self.recovery_didkey - } - ], - "assertionMethod": [ format!("{}#signingKey)", self.did)], - "capabilityInvocation": [ format!("{}#signingKey)", self.did) ], - "capabilityDelegation": [ format!("{}#signingKey)", self.did) ], - "service": [ - { - "id": format!("{}#atpPds)", self.did), - "type": "AtpPersonalDataServer", - "serviceEndpoint": self.service_url - } - ] - }) - } -} - -#[test] -fn test_debug_did_signing() { - let op = UnsignedCreateOp { - op_type: "create".to_string(), - signingKey: "did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE".to_string(), - recoveryKey: "did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE".to_string(), - username: "carla.test".to_string(), - service: "http://localhost:2583".to_string(), - prev: None, - }; - let block = - Block::::encode(DagCborCodec, Code::Sha2_256, &op).expect("encode DAG-CBOR"); - let op_bytes = block.data(); - - let _key_bytes = vec![ - 4, 30, 224, 8, 198, 84, 108, 1, 58, 193, 91, 176, 212, 45, 4, 36, 28, 252, 242, 95, 20, 85, - 87, 246, 79, 134, 42, 113, 5, 216, 238, 235, 21, 146, 16, 88, 239, 217, 36, 252, 148, 197, - 203, 22, 29, 2, 52, 152, 77, 208, 21, 88, 2, 85, 219, 212, 148, 139, 104, 200, 15, 119, 46, - 178, 186, - ]; - - let pub_key = - PubKey::from_did_key("did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE").unwrap(); - //let keypair = KeyPair::from_bytes(&key_bytes).unwrap(); - //assert_eq!(keypair.to_bytes(), key_bytes); - - let encoded_bytes = vec![ - 166, 100, 112, 114, 101, 118, 246, 100, 116, 121, 112, 101, 102, 99, 114, 101, 97, 116, - 101, 103, 115, 101, 114, 118, 105, 99, 101, 117, 104, 116, 116, 112, 58, 47, 47, 108, 111, - 99, 97, 108, 104, 111, 115, 116, 58, 50, 53, 56, 51, 104, 117, 115, 101, 114, 110, 97, 109, - 101, 106, 99, 97, 114, 108, 97, 46, 116, 101, 115, 116, 106, 115, 105, 103, 110, 105, 110, - 103, 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, - 83, 87, 86, 81, 121, 87, 56, 68, 83, 70, 54, 109, 68, 119, 84, 57, 106, 56, 89, 114, 122, - 68, 87, 68, 115, 56, 104, 54, 80, 80, 106, 117, 84, 99, 105, 112, 122, 71, 56, 52, 105, 67, - 66, 69, 107, 114, 101, 99, 111, 118, 101, 114, 121, 75, 101, 121, 120, 57, 100, 105, 100, - 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, 83, 87, 86, 81, 121, 87, 56, 68, 83, 70, 54, - 109, 68, 119, 84, 57, 106, 56, 89, 114, 122, 68, 87, 68, 115, 56, 104, 54, 80, 80, 106, - 117, 84, 99, 105, 112, 122, 71, 56, 52, 105, 67, 66, 69, - ]; - assert_eq!(encoded_bytes, op_bytes); - - let _sig_bytes = vec![ - 131, 115, 47, 143, 89, 68, 79, 73, 121, 198, 70, 76, 91, 64, 171, 25, 18, 139, 244, 94, - 123, 224, 205, 32, 241, 174, 36, 120, 199, 206, 199, 202, 216, 154, 2, 10, 247, 101, 138, - 170, 85, 95, 142, 164, 50, 203, 92, 23, 247, 218, 231, 224, 78, 68, 55, 104, 243, 145, 243, - 4, 219, 102, 44, 227, - ]; - let sig_str = - "g3Mvj1lET0l5xkZMW0CrGRKL9F574M0g8a4keMfOx8rYmgIK92WKqlVfjqQyy1wX99rn4E5EN2jzkfME22Ys4w"; - - pub_key.verify_bytes(op_bytes, sig_str).unwrap(); - - let signed = op.into_signed(sig_str.to_string()); - signed.verify_self().unwrap(); -} - -/* ------------------------------------- -OP: -{ - type: 'create', - signingKey: 'did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN', - recoveryKey: 'did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN', - username: 'carla.test', - service: 'http://localhost:2583', - prev: null, - sig: 'VYGxmZs-D5830YdQSNrZpbxVyOPB4nCJtO-x0XElt35AE5wjvJFa2vJu8qjURG6TvEbMvfbekDo_eXEMhdPWdg' -} -ENCODED: -{"0":167,"1":99,"2":115,"3":105,"4":103,"5":120,"6":86,"7":86,"8":89,"9":71,"10":120,"11":109,"12":90,"13":115,"14":45,"15":68,"16":53,"17":56,"18":51,"19":48,"20":89,"21":100,"22":81,"23":83,"24":78,"25":114,"26":90,"27":112,"28":98,"29":120,"30":86,"31":121,"32":79,"33":80,"34":66,"35":52,"36":110,"37":67,"38":74,"39":116,"40":79,"41":45,"42":120,"43":48,"44":88,"45":69,"46":108,"47":116,"48":51,"49":53,"50":65,"51":69,"52":53,"53":119,"54":106,"55":118,"56":74,"57":70,"58":97,"59":50,"60":118,"61":74,"62":117,"63":56,"64":113,"65":106,"66":85,"67":82,"68":71,"69":54,"70":84,"71":118,"72":69,"73":98,"74":77,"75":118,"76":102,"77":98,"78":101,"79":107,"80":68,"81":111,"82":95,"83":101,"84":88,"85":69,"86":77,"87":104,"88":100,"89":80,"90":87,"91":100,"92":103,"93":100,"94":112,"95":114,"96":101,"97":118,"98":246,"99":100,"100":116,"101":121,"102":112,"103":101,"104":102,"105":99,"106":114,"107":101,"108":97,"109":116,"110":101,"111":103,"112":115,"113":101,"114":114,"115":118,"116":105,"117":99,"118":101,"119":117,"120":104,"121":116,"122":116,"123":112,"124":58,"125":47,"126":47,"127":108,"128":111,"129":99,"130":97,"131":108,"132":104,"133":111,"134":115,"135":116,"136":58,"137":50,"138":53,"139":56,"140":51,"141":104,"142":117,"143":115,"144":101,"145":114,"146":110,"147":97,"148":109,"149":101,"150":106,"151":99,"152":97,"153":114,"154":108,"155":97,"156":46,"157":116,"158":101,"159":115,"160":116,"161":106,"162":115,"163":105,"164":103,"165":110,"166":105,"167":110,"168":103,"169":75,"170":101,"171":121,"172":120,"173":57,"174":100,"175":105,"176":100,"177":58,"178":107,"179":101,"180":121,"181":58,"182":122,"183":68,"184":110,"185":97,"186":101,"187":115,"188":111,"189":120,"190":90,"191":98,"192":56,"193":109,"194":76,"195":106,"196":102,"197":49,"198":54,"199":101,"200":52,"201":80,"202":87,"203":115,"204":78,"205":113,"206":76,"207":76,"208":106,"209":57,"210":117,"211":87,"212":77,"213":57,"214":84,"215":81,"216":56,"217":110,"218":78,"219":119,"220":120,"221":113,"222":69,"223":114,"224":68,"225":109,"226":75,"227":88,"228":76,"229":65,"230":78,"231":107,"232":114,"233":101,"234":99,"235":111,"236":118,"237":101,"238":114,"239":121,"240":75,"241":101,"242":121,"243":120,"244":57,"245":100,"246":105,"247":100,"248":58,"249":107,"250":101,"251":121,"252":58,"253":122,"254":68,"255":110,"256":97,"257":101,"258":115,"259":111,"260":120,"261":90,"262":98,"263":56,"264":109,"265":76,"266":106,"267":102,"268":49,"269":54,"270":101,"271":52,"272":80,"273":87,"274":115,"275":78,"276":113,"277":76,"278":76,"279":106,"280":57,"281":117,"282":87,"283":77,"284":57,"285":84,"286":81,"287":56,"288":110,"289":78,"290":119,"291":120,"292":113,"293":69,"294":114,"295":68,"296":109,"297":75,"298":88,"299":76,"300":65,"301":78} -SHA256 base32: -cg2dfxdh5voabmdjzw2abw3sgvtjymknh2bmpvtwot7t2ih4v7za -did:plc:cg2dfxdh5voabmdjzw2abw3s ------------------------------------- - -*/ - -#[test] -fn test_debug_did_plc() { - let op = CreateOp { - op_type: "create".to_string(), - signingKey: "did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN".to_string(), - recoveryKey: "did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN".to_string(), - username: "carla.test".to_string(), - service: "http://localhost:2583".to_string(), - prev: None, - sig: - "VYGxmZs-D5830YdQSNrZpbxVyOPB4nCJtO-x0XElt35AE5wjvJFa2vJu8qjURG6TvEbMvfbekDo_eXEMhdPWdg" - .to_string(), - }; - op.verify_self().unwrap(); - let block = - Block::::encode(DagCborCodec, Code::Sha2_256, &op).expect("encode DAG-CBOR"); - let op_bytes = block.data(); - - let encoded_bytes = vec![ - 167, 99, 115, 105, 103, 120, 86, 86, 89, 71, 120, 109, 90, 115, 45, 68, 53, 56, 51, 48, 89, - 100, 81, 83, 78, 114, 90, 112, 98, 120, 86, 121, 79, 80, 66, 52, 110, 67, 74, 116, 79, 45, - 120, 48, 88, 69, 108, 116, 51, 53, 65, 69, 53, 119, 106, 118, 74, 70, 97, 50, 118, 74, 117, - 56, 113, 106, 85, 82, 71, 54, 84, 118, 69, 98, 77, 118, 102, 98, 101, 107, 68, 111, 95, - 101, 88, 69, 77, 104, 100, 80, 87, 100, 103, 100, 112, 114, 101, 118, 246, 100, 116, 121, - 112, 101, 102, 99, 114, 101, 97, 116, 101, 103, 115, 101, 114, 118, 105, 99, 101, 117, 104, - 116, 116, 112, 58, 47, 47, 108, 111, 99, 97, 108, 104, 111, 115, 116, 58, 50, 53, 56, 51, - 104, 117, 115, 101, 114, 110, 97, 109, 101, 106, 99, 97, 114, 108, 97, 46, 116, 101, 115, - 116, 106, 115, 105, 103, 110, 105, 110, 103, 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, - 101, 121, 58, 122, 68, 110, 97, 101, 115, 111, 120, 90, 98, 56, 109, 76, 106, 102, 49, 54, - 101, 52, 80, 87, 115, 78, 113, 76, 76, 106, 57, 117, 87, 77, 57, 84, 81, 56, 110, 78, 119, - 120, 113, 69, 114, 68, 109, 75, 88, 76, 65, 78, 107, 114, 101, 99, 111, 118, 101, 114, 121, - 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, 115, - 111, 120, 90, 98, 56, 109, 76, 106, 102, 49, 54, 101, 52, 80, 87, 115, 78, 113, 76, 76, - 106, 57, 117, 87, 77, 57, 84, 81, 56, 110, 78, 119, 120, 113, 69, 114, 68, 109, 75, 88, 76, - 65, 78, - ]; - assert_eq!(op_bytes, encoded_bytes); - - let sha256_str = "cg2dfxdh5voabmdjzw2abw3sgvtjymknh2bmpvtwot7t2ih4v7za"; - let _did_plc = "did:plc:cg2dfxdh5voabmdjzw2abw3s"; - - let digest_bytes: Vec = data_encoding::HEXLOWER - .decode(&sha256::digest(op_bytes).as_bytes()) - .expect("SHA-256 digest is always hex string"); - let digest_b32 = data_encoding::BASE32_NOPAD - .encode(&digest_bytes) - .to_ascii_lowercase(); - assert_eq!(digest_b32, sha256_str); -} - -#[test] -fn test_did_plc_examples() { - // https://atproto.com/specs/did-plc - let op = CreateOp { - op_type: "create".to_string(), - signingKey: "did:key:zDnaejYFhgFiVF89LhJ4UipACLKuqo6PteZf8eKDVKeExXUPk".to_string(), - recoveryKey: "did:key:zDnaeSezF2TgCD71b5DiiFyhHQwKAfsBVqTTHRMvP597Z5Ztn".to_string(), - username: "alice.example.com".to_string(), - service: "https://example.com".to_string(), - prev: None, - sig: - "vi6JAl5W4FfyViD5_BKL9p0rbI3MxTWuh0g_egTFAjtf7gwoSfSe1O3qMOEUPX6QH3H0Q9M4y7gOLGblWkEwfQ" - .to_string(), - }; - op.verify_self().unwrap(); - assert_eq!( - &op.did_plc().to_string(), - "did:plc:7iza6de2dwap2sbkpav7c6c6" - ); - - // interacting with PDS / PLC server - let op = CreateOp { - op_type: "create".to_string(), - signingKey: "did:key:zDnaekmbFffmpo7LZ4C7bEFjGKPk11N47kKN8j7jtAcGUabw3".to_string(), - recoveryKey: "did:key:zDnaekmbFffmpo7LZ4C7bEFjGKPk11N47kKN8j7jtAcGUabw3".to_string(), - username: "voltaire.test".to_string(), - service: "http://localhost:2583".to_string(), - prev: None, - sig: - "HNfQUg6SMnYKp1l3LtAIsoAblmi33mYiHE9JH1j7w3B-hd8xWpmCUBUoqKfQXmsAs0K1z8Izt19yYk6PqVFgyg" - .to_string(), - }; - op.verify_self().unwrap(); - assert_eq!( - &op.did_plc().to_string(), - "did:plc:bmrcg7zrxoiw2kiml3tkw2xv" - ); -} - -#[test] -fn test_self_verify() { - let keypair = KeyPair::new_random(); - let op = CreateOp::new( - "dummy-handle".to_string(), - "https://dummy.service".to_string(), - &keypair, - None, - ); - println!("{:?}", op); - op.verify_self().unwrap(); -} - -#[test] -fn test_known_key() { - let keypair = KeyPair::new_random(); - let op = CreateOp::new( - "dummy-handle".to_string(), - "https://dummy.service".to_string(), - &keypair, - None, - ); - println!("{:?}", op); - op.verify_self().unwrap(); -} diff --git a/adenosine-pds/src/lib.rs b/adenosine-pds/src/lib.rs index 3664ce0..07905f9 100644 --- a/adenosine-pds/src/lib.rs +++ b/adenosine-pds/src/lib.rs @@ -15,24 +15,18 @@ use std::str::FromStr; use std::sync::Mutex; mod bsky; -mod car; -mod crypto; mod db; -mod did; pub mod models; -pub mod mst; -mod repo; -mod ucan_p256; -mod vendored; mod web; +pub use adenosine::crypto::{KeyPair, PubKey}; +pub use adenosine::did; +pub use adenosine::did::DidDocMeta; +pub use adenosine::repo::{Mutation, RepoCommit, RepoStore}; +pub use adenosine::ucan_p256::P256KeyMaterial; use bsky::*; -pub use crypto::{KeyPair, PubKey}; pub use db::AtpDatabase; -pub use did::DidDocMeta; pub use models::*; -pub use repo::{Mutation, RepoCommit, RepoStore}; -pub use ucan_p256::P256KeyMaterial; use web::*; pub struct AtpService { diff --git a/adenosine-pds/src/mst.rs b/adenosine-pds/src/mst.rs deleted file mode 100644 index 1d75da1..0000000 --- a/adenosine-pds/src/mst.rs +++ /dev/null @@ -1,399 +0,0 @@ -/// This is a simple immutable implemenation of of a Merkle Search Tree (MST) for atproto. -/// -/// Mutations on the data structure are not implemented; instead the entire tree is read into a -/// BTreeMap, that is mutated, and then the entire tree is regenerated. This makes implementation -/// much simpler, at the obvious cost of performance in some situations. -/// -/// The MST is basically a sorted key/value store where the key is a string and the value is a CID. -/// Tree nodes are stored as DAG-CBOG IPLD blocks, with references as CIDs. -/// -/// In the atproto MST implementation, SHA-256 is the hashing algorithm, and "leading zeros" are -/// counted in blocks of 4 bits (so a leading zero byte counts as two zeros). This happens to match -/// simple hex encoding of the SHA-256 hash. -use anyhow::{anyhow, Context, Result}; -use ipfs_sqlite_block_store::BlockStore; -use libipld::cbor::DagCborCodec; -use libipld::multihash::Code; -use libipld::prelude::Codec; -use libipld::store::DefaultParams; -use libipld::Block; -use libipld::{Cid, DagCbor}; -use log::{debug, error, info}; -use std::collections::BTreeMap; -use std::path::PathBuf; - -#[derive(Debug, DagCbor, PartialEq, Eq)] -pub struct CommitNode { - pub root: Cid, - pub sig: Box<[u8]>, -} - -#[derive(Debug, DagCbor, PartialEq, Eq)] -pub struct RootNode { - pub auth_token: Option, - pub prev: Option, - // TODO: not 'metadata'? - pub meta: Cid, - pub data: Cid, -} - -#[derive(Debug, DagCbor, PartialEq, Eq)] -pub struct MetadataNode { - pub datastore: String, // "mst" - pub did: String, - pub version: u8, // 1 -} - -#[derive(Debug, DagCbor, PartialEq, Eq)] -struct MstEntry { - p: u32, - k: String, - v: Cid, - t: Option, -} - -#[derive(Debug, DagCbor, PartialEq)] -struct MstNode { - l: Option, - e: Vec, -} - -struct WipEntry { - height: u8, - key: String, - val: Cid, - right: Option>, -} - -struct WipNode { - height: u8, - left: Option>, - entries: Vec, -} - -fn get_mst_node(db: &mut BlockStore, cid: &Cid) -> Result { - let block = &db - .get_block(cid)? - .ok_or(anyhow!("reading MST node from blockstore"))?; - //println!("{:?}", block); - let mst_node: MstNode = DagCborCodec - .decode(block) - .context("parsing MST DAG-CBOR IPLD node from blockstore")?; - Ok(mst_node) -} - -pub fn print_mst_keys(db: &mut BlockStore, cid: &Cid) -> Result<()> { - let node = get_mst_node(db, cid)?; - if let Some(ref left) = node.l { - print_mst_keys(db, left)?; - } - let mut key: String = "".to_string(); - for entry in node.e.iter() { - key = format!("{}{}", &key[0..entry.p as usize], entry.k); - println!("\t{}\t-> {}", key, entry.v); - if let Some(ref right) = entry.t { - print_mst_keys(db, right)?; - } - } - Ok(()) -} - -pub fn dump_mst_keys(db_path: &PathBuf) -> Result<()> { - let mut db: BlockStore = BlockStore::open(db_path, Default::default())?; - - let all_aliases: Vec<(Vec, Cid)> = db.aliases()?; - if all_aliases.is_empty() { - error!("expected at least one alias in block store"); - std::process::exit(-1); - } - - // print all the aliases - for (alias, commit_cid) in all_aliases.iter() { - let did = String::from_utf8_lossy(alias); - println!("{} -> {}", did, commit_cid); - } - - let (did, commit_cid) = all_aliases[0].clone(); - let did = String::from_utf8_lossy(&did); - info!("starting from {} [{}]", commit_cid, did); - - // NOTE: the faster way to develop would have been to decode to libipld::ipld::Ipld first? meh - - debug!( - "raw commit: {:?}", - &db.get_block(&commit_cid)? - .ok_or(anyhow!("expected commit block in store"))? - ); - let commit: CommitNode = DagCborCodec.decode( - &db.get_block(&commit_cid)? - .ok_or(anyhow!("expected commit block in store"))?, - )?; - debug!("Commit: {:?}", commit); - let root: RootNode = DagCborCodec.decode( - &db.get_block(&commit.root)? - .ok_or(anyhow!("expected root block in store"))?, - )?; - debug!("Root: {:?}", root); - let metadata: MetadataNode = DagCborCodec.decode( - &db.get_block(&root.meta)? - .ok_or(anyhow!("expected metadata block in store"))?, - )?; - debug!("Metadata: {:?}", metadata); - let mst_node: MstNode = DagCborCodec.decode( - &db.get_block(&root.data)? - .ok_or(anyhow!("expected block in store"))?, - )?; - debug!("MST root node: {:?}", mst_node); - debug!("============"); - - println!("{}", did); - print_mst_keys(&mut db, &root.data)?; - Ok(()) -} - -pub fn collect_mst_keys( - db: &mut BlockStore, - cid: &Cid, - map: &mut BTreeMap, -) -> Result<()> { - let node = get_mst_node(db, cid)?; - if let Some(ref left) = node.l { - collect_mst_keys(db, left, map)?; - } - let mut key: String = "".to_string(); - for entry in node.e.iter() { - key = format!("{}{}", &key[0..entry.p as usize], entry.k); - map.insert(key.clone(), entry.v); - if let Some(ref right) = entry.t { - collect_mst_keys(db, right, map)?; - } - } - Ok(()) -} - -fn leading_zeros(key: &str) -> u8 { - let digest = sha256::digest(key); - let digest = digest.as_bytes(); - for (i, c) in digest.iter().enumerate() { - if *c != b'0' { - return i as u8; - } - } - digest.len() as u8 -} - -// # python code to generate test cases -// import hashlib -// seed = b"asdf" -// while True: -// out = hashlib.sha256(seed).hexdigest() -// if out.startswith("00"): -// print(f"{seed} -> {out}") -// seed = b"app.bsky.feed.post/" + out.encode('utf8')[:12] - -#[test] -fn test_leading_zeros() { - assert_eq!(leading_zeros(""), 0); - assert_eq!(leading_zeros("asdf"), 0); - assert_eq!(leading_zeros("2653ae71"), 0); - assert_eq!(leading_zeros("88bfafc7"), 1); - assert_eq!(leading_zeros("2a92d355"), 2); - assert_eq!(leading_zeros("884976f5"), 3); - assert_eq!(leading_zeros("app.bsky.feed.post/454397e440ec"), 2); - assert_eq!(leading_zeros("app.bsky.feed.post/9adeb165882c"), 4); -} - -pub fn generate_mst( - db: &mut BlockStore, - map: &BTreeMap, -) -> Result { - // construct a "WIP" tree - let mut root: Option = None; - for (key, val) in map { - let height = leading_zeros(key); - let entry = WipEntry { - height, - key: key.clone(), - val: *val, - right: None, - }; - if let Some(node) = root { - root = Some(insert_entry(node, entry)); - } else { - root = Some(WipNode { - height: entry.height, - left: None, - entries: vec![entry], - }); - } - } - let empty_node = WipNode { - height: 0, - left: None, - entries: vec![], - }; - serialize_wip_tree(db, root.unwrap_or(empty_node)) -} - -// this routine assumes that entries are added in sorted key order. AKA, the `entry` being added is -// "further right" in the tree than any existing entries -fn insert_entry(mut node: WipNode, entry: WipEntry) -> WipNode { - // if we are higher on tree than existing node, replace it (recursively) with new layers first - while entry.height > node.height { - node = WipNode { - height: node.height + 1, - left: Some(Box::new(node)), - entries: vec![], - } - } - // if we are lower on tree, then need to descend first - if entry.height < node.height { - // if no entries at this node, then we should insert down "left" (which is just "down", not - // "before" any entries) - if node.entries.is_empty() { - if let Some(left) = node.left { - node.left = Some(Box::new(insert_entry(*left, entry))); - return node; - } else { - panic!("hit existing totally empty MST node"); - } - } - let mut last = node.entries.pop().expect("hit empty existing entry list"); - assert!(entry.key > last.key); - if last.right.is_some() { - last.right = Some(Box::new(insert_entry(*last.right.unwrap(), entry))); - } else { - let mut new_node = WipNode { - height: entry.height, - left: None, - entries: vec![entry], - }; - // may need to (recursively) insert multiple filler layers - while new_node.height + 1 < node.height { - new_node = WipNode { - height: new_node.height + 1, - left: Some(Box::new(new_node)), - entries: vec![], - } - } - last.right = Some(Box::new(new_node)); - } - node.entries.push(last); - return node; - } - // same height, simply append to end (but verify first) - assert!(node.height == entry.height); - if !node.entries.is_empty() { - let last = &node.entries.last().unwrap(); - assert!(entry.key > last.key); - } - node.entries.push(entry); - node -} - -/// returns the length of common characters between the two strings. Strings must be simple ASCII, -/// which should hold for current ATP MST keys (collection plus TID) -fn common_prefix_len(a: &str, b: &str) -> usize { - let a = a.as_bytes(); - let b = b.as_bytes(); - for i in 0..std::cmp::min(a.len(), b.len()) { - if a[i] != b[i] { - return i; - } - } - // strings are the same, up to common length - std::cmp::min(a.len(), b.len()) -} - -#[test] -fn test_common_prefix_len() { - assert_eq!(common_prefix_len("abc", "abc"), 3); - assert_eq!(common_prefix_len("", "abc"), 0); - assert_eq!(common_prefix_len("abc", ""), 0); - assert_eq!(common_prefix_len("ab", "abc"), 2); - assert_eq!(common_prefix_len("abc", "ab"), 2); - assert_eq!(common_prefix_len("abcde", "abc"), 3); - assert_eq!(common_prefix_len("abc", "abcde"), 3); - assert_eq!(common_prefix_len("abcde", "abc1"), 3); - assert_eq!(common_prefix_len("abcde", "abb"), 2); - assert_eq!(common_prefix_len("abcde", "qbb"), 0); - assert_eq!(common_prefix_len("abc", "abc\x00"), 3); - assert_eq!(common_prefix_len("abc\x00", "abc"), 3); -} - -#[test] -fn test_common_prefix_len_wide() { - // TODO: these are not cross-language consistent! - assert_eq!("jalapeño".len(), 9); // 8 in javascript - assert_eq!("💩".len(), 4); // 2 in javascript - assert_eq!("👩‍👧‍👧".len(), 18); // 8 in javascript - - // many of the below are different in JS; in Rust we *must* cast down to bytes to count - assert_eq!(common_prefix_len("jalapeño", "jalapeno"), 6); - assert_eq!(common_prefix_len("jalapeñoA", "jalapeñoB"), 9); - assert_eq!(common_prefix_len("coöperative", "coüperative"), 3); - assert_eq!(common_prefix_len("abc💩abc", "abcabc"), 3); - assert_eq!(common_prefix_len("💩abc", "💩ab"), 6); - assert_eq!(common_prefix_len("abc👩‍👦‍👦de", "abc👩‍👧‍👧de"), 13); -} - -fn serialize_wip_tree( - db: &mut BlockStore, - wip_node: WipNode, -) -> Result { - let left: Option = if let Some(left) = wip_node.left { - Some(serialize_wip_tree(db, *left)?) - } else { - None - }; - - let mut entries: Vec = vec![]; - let mut last_key = "".to_string(); - for wip_entry in wip_node.entries { - let right: Option = if let Some(right) = wip_entry.right { - Some(serialize_wip_tree(db, *right)?) - } else { - None - }; - let prefix_len = common_prefix_len(&last_key, &wip_entry.key); - entries.push(MstEntry { - k: wip_entry.key[prefix_len..].to_string(), - p: prefix_len as u32, - v: wip_entry.val, - t: right, - }); - last_key = wip_entry.key; - } - let mst_node = MstNode { - l: left, - e: entries, - }; - let block = Block::::encode(DagCborCodec, Code::Sha2_256, &mst_node)?; - let cid = *block.cid(); - db.put_block(block, None)?; - Ok(cid) -} - -#[test] -fn test_mst_node_cbor() { - use std::str::FromStr; - let cid1 = - Cid::from_str("bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454").unwrap(); - let node = MstNode { - l: None, - e: vec![MstEntry { - k: "asdf".to_string(), - p: 0, - v: cid1, - t: None, - }], - }; - let block = Block::::encode(DagCborCodec, Code::Sha2_256, &node).unwrap(); - println!("{:?}", block); - //assert_eq!(1, 2); - let cid = *block.cid(); - assert_eq!( - cid.to_string(), - "bafyreidaftbr35xhh4lzmv5jcoeufqjh75ohzmz6u56v7n2ippbtxdgqqe" - ); -} diff --git a/adenosine-pds/src/repo.rs b/adenosine-pds/src/repo.rs deleted file mode 100644 index f77b459..0000000 --- a/adenosine-pds/src/repo.rs +++ /dev/null @@ -1,466 +0,0 @@ -use crate::car::{ - load_car_bytes_to_blockstore, load_car_path_to_blockstore, read_car_bytes_from_blockstore, -}; -use crate::mst::{collect_mst_keys, generate_mst, CommitNode, MetadataNode, RootNode}; -use crate::KeyPair; -use adenosine::identifiers::{Did, Nsid, Tid}; -use anyhow::{anyhow, ensure, Context, Result}; -use ipfs_sqlite_block_store::BlockStore; -use libipld::cbor::DagCborCodec; -use libipld::multihash::Code; -use libipld::prelude::Codec; -use libipld::store::DefaultParams; -use libipld::{Block, Cid, Ipld}; -use serde_json::{json, Value}; -use std::borrow::Cow; -use std::collections::BTreeMap; -use std::collections::HashSet; -use std::path::PathBuf; -use std::str::FromStr; - -#[derive(Debug, serde::Serialize)] -pub struct RepoCommit { - pub sig: Box<[u8]>, - pub commit_cid: Cid, - pub root_cid: Cid, - pub did: Did, - pub prev: Option, - pub meta_cid: Cid, - pub mst_cid: Cid, -} - -impl RepoCommit { - /// Returns a JSON object version of this struct, with CIDs and signatures in expected format - /// (aka, CID as a string, not an array of bytes). - pub fn to_pretty_json(&self) -> Value { - json!({ - "sig": data_encoding::HEXUPPER.encode(&self.sig), - "commit_cid": self.commit_cid.to_string(), - "root_cid": self.root_cid.to_string(), - "did": self.did.to_string(), - "prev": self.prev.map(|v| v.to_string()), - "meta_cid": self.meta_cid.to_string(), - "mst_cid": self.mst_cid.to_string(), - }) - } -} - -pub struct RepoStore { - // TODO: only public for test/debug; should wrap instead - pub db: BlockStore, -} - -pub enum Mutation { - Create(Nsid, Tid, Ipld), - Update(Nsid, Tid, Ipld), - Delete(Nsid, Tid), -} - -impl RepoStore { - pub fn open(db_path: &PathBuf) -> Result { - Ok(RepoStore { - db: BlockStore::open(db_path, Default::default())?, - }) - } - - pub fn open_ephemeral() -> Result { - Ok(RepoStore { - db: BlockStore::open_path(ipfs_sqlite_block_store::DbPath::Memory, Default::default())?, - }) - } - - pub fn new_connection(&mut self) -> Result { - Ok(RepoStore { - db: self.db.additional_connection()?, - }) - } - - pub fn get_ipld(&mut self, cid: &Cid) -> Result { - if let Some(b) = self.db.get_block(cid)? { - let block: Block = Block::new(*cid, b)?; - block.ipld() - } else { - Err(anyhow!("missing IPLD CID: {}", cid)) - } - } - - pub fn get_blob(&mut self, cid: &Cid) -> Result>> { - Ok(self.db.get_block(cid)?) - } - - /// Returns CID that was inserted - pub fn put_ipld>(&mut self, record: &S) -> Result { - let block = Block::::encode(DagCborCodec, Code::Sha2_256, record)?; - let cid = *block.cid(); - self.db - .put_block(block, None) - .context("writing IPLD DAG-CBOR record to blockstore")?; - Ok(cid) - } - - /// Returns CID that was inserted - pub fn put_blob(&mut self, data: &[u8]) -> Result { - let block = Block::::encode(libipld::raw::RawCodec, Code::Sha2_256, data)?; - let cid = *block.cid(); - self.db - .put_block(block, None) - .context("writing non-record blob to blockstore")?; - Ok(cid) - } - - /// Quick alias lookup - pub fn lookup_commit(&mut self, did: &Did) -> Result> { - Ok(self.db.resolve(Cow::from(did.as_bytes()))?) - } - - pub fn get_commit(&mut self, commit_cid: &Cid) -> Result { - // read records by CID: commit, root, meta - let commit_node: CommitNode = DagCborCodec - .decode( - &self - .db - .get_block(commit_cid)? - .ok_or(anyhow!("expected commit block in store"))?, - ) - .context("parsing commit IPLD node from blockstore")?; - let root_node: RootNode = DagCborCodec - .decode( - &self - .db - .get_block(&commit_node.root)? - .ok_or(anyhow!("expected root block in store"))?, - ) - .context("parsing root IPLD node from blockstore")?; - let metadata_node: MetadataNode = DagCborCodec - .decode( - &self - .db - .get_block(&root_node.meta)? - .ok_or(anyhow!("expected metadata block in store"))?, - ) - .context("parsing metadata IPLD node from blockstore")?; - ensure!( - metadata_node.datastore == "mst", - "unexpected repo metadata.datastore: {}", - metadata_node.datastore - ); - ensure!( - metadata_node.version == 1, - "unexpected repo metadata.version: {}", - metadata_node.version - ); - Ok(RepoCommit { - sig: commit_node.sig, - commit_cid: *commit_cid, - root_cid: commit_node.root, - meta_cid: root_node.meta, - did: Did::from_str(&metadata_node.did)?, - prev: root_node.prev, - mst_cid: root_node.data, - }) - } - - pub fn get_mst_record_by_key(&mut self, mst_cid: &Cid, key: &str) -> Result> { - let map = self.mst_to_map(mst_cid)?; - if let Some(cid) = map.get(key) { - self.get_ipld(cid).map(Some) - } else { - Ok(None) - } - } - - pub fn collections(&mut self, did: &Did) -> Result> { - let commit = if let Some(c) = self.lookup_commit(did)? { - self.get_commit(&c)? - } else { - return Err(anyhow!("DID not found in repositories: {}", did)); - }; - let map = self.mst_to_map(&commit.mst_cid)?; - let mut collections: HashSet = Default::default(); - // XXX: confirm that keys actually start with leading slash - for k in map.keys() { - let coll = k.split('/').nth(1).unwrap(); - collections.insert(coll.to_string()); - } - Ok(collections.into_iter().collect()) - } - - pub fn get_atp_record( - &mut self, - did: &Did, - collection: &Nsid, - tid: &Tid, - ) -> Result> { - let commit = if let Some(c) = self.lookup_commit(did)? { - self.get_commit(&c)? - } else { - return Ok(None); - }; - let record_key = format!("/{}/{}", collection, tid); - self.get_mst_record_by_key(&commit.mst_cid, &record_key) - } - - pub fn write_metadata(&mut self, did: &Did) -> Result { - self.put_ipld(&MetadataNode { - datastore: "mst".to_string(), - did: did.to_string(), - version: 1, - }) - } - - pub fn write_root(&mut self, meta_cid: Cid, prev: Option, mst_cid: Cid) -> Result { - self.put_ipld(&RootNode { - auth_token: None, - prev, - meta: meta_cid, - data: mst_cid, - }) - } - - pub fn write_commit(&mut self, did: &Did, root_cid: Cid, sig: &str) -> Result { - let commit_cid = self.put_ipld(&CommitNode { - root: root_cid, - sig: sig.as_bytes().to_vec().into_boxed_slice(), - })?; - self.db.alias(did.as_bytes().to_vec(), Some(&commit_cid))?; - Ok(commit_cid) - } - - pub fn mst_from_map(&mut self, map: &BTreeMap) -> Result { - let mst_cid = generate_mst(&mut self.db, map)?; - Ok(mst_cid) - } - - pub fn mst_to_map(&mut self, mst_cid: &Cid) -> Result> { - let mut cid_map: BTreeMap = Default::default(); - collect_mst_keys(&mut self.db, mst_cid, &mut cid_map) - .context("reading repo MST from blockstore")?; - Ok(cid_map) - } - - pub fn update_mst(&mut self, mst_cid: &Cid, mutations: &[Mutation]) -> Result { - let mut cid_map = self.mst_to_map(mst_cid)?; - for m in mutations.iter() { - match m { - Mutation::Create(collection, tid, val) => { - let cid = self.put_ipld(val)?; - cid_map.insert(format!("/{}/{}", collection, tid), cid); - } - Mutation::Update(collection, tid, val) => { - let cid = self.put_ipld(val)?; - cid_map.insert(format!("/{}/{}", collection, tid), cid); - } - Mutation::Delete(collection, tid) => { - cid_map.remove(&format!("/{}/{}", collection, tid)); - } - } - } - let mst_cid = generate_mst(&mut self.db, &cid_map)?; - Ok(mst_cid) - } - - /// High-level helper to write a batch of mutations to the repo corresponding to the DID, and - /// signing the resulting new root CID with the given keypair. - pub fn mutate_repo( - &mut self, - did: &Did, - mutations: &[Mutation], - signing_key: &KeyPair, - ) -> Result { - let commit_cid = self.lookup_commit(did)?.unwrap(); - let last_commit = self.get_commit(&commit_cid)?; - let new_mst_cid = self - .update_mst(&last_commit.mst_cid, mutations) - .context("updating MST in repo")?; - let new_root_cid = self.write_root( - last_commit.meta_cid, - Some(last_commit.commit_cid), - new_mst_cid, - )?; - // TODO: is this how signatures are supposed to work? - let sig = signing_key.sign_bytes(new_root_cid.to_string().as_bytes()); - self.write_commit(did, new_root_cid, &sig) - } - - /// Reads in a full MST tree starting at a repo commit, then re-builds and re-writes the tree - /// in to the repo, and verifies that both the MST root CIDs and the repo root CIDs are identical. - pub fn verify_repo_mst(&mut self, commit_cid: &Cid) -> Result<()> { - // load existing commit and MST tree - let existing_commit = self.get_commit(commit_cid)?; - let repo_map = self.mst_to_map(&existing_commit.mst_cid)?; - - // write MST tree, and verify root CID - let new_mst_cid = self.mst_from_map(&repo_map)?; - if new_mst_cid != existing_commit.mst_cid { - Err(anyhow!( - "MST root CID did not verify: {} != {}", - existing_commit.mst_cid, - new_mst_cid - ))?; - } - - let new_root_cid = - self.write_root(existing_commit.meta_cid, existing_commit.prev, new_mst_cid)?; - if new_root_cid != existing_commit.root_cid { - Err(anyhow!( - "repo root CID did not verify: {} != {}", - existing_commit.root_cid, - new_root_cid - ))?; - } - - Ok(()) - } - - /// Import blocks from a CAR file in memory, optionally setting an alias pointing to the input - /// (eg, a DID identifier). - /// - /// Does not currently do any validation of, eg, signatures. It is naive and incomplete to use - /// this to simply import CAR content from users, remote servers, etc. - /// - /// Returns the root commit from the CAR file, which may or may not actually be a "commit" - /// block. - pub fn import_car_bytes(&mut self, car_bytes: &[u8], alias: Option) -> Result { - let cid = load_car_bytes_to_blockstore(&mut self.db, car_bytes)?; - self.verify_repo_mst(&cid)?; - if let Some(alias) = alias { - self.db.alias(alias.as_bytes().to_vec(), Some(&cid))?; - } - Ok(cid) - } - - /// Similar to import_car_bytes(), but reads from a local file on disk instead of from memory. - pub fn import_car_path(&mut self, car_path: &PathBuf, alias: Option) -> Result { - let cid = load_car_path_to_blockstore(&mut self.db, car_path)?; - self.verify_repo_mst(&cid)?; - if let Some(alias) = alias { - self.db.alias(alias.as_bytes().to_vec(), Some(&cid))?; - } - Ok(cid) - } - - /// Exports in CAR format to a Writer - /// - /// The "from" commit CID feature is not implemented. - pub fn export_car( - &mut self, - commit_cid: &Cid, - _from_commit_cid: Option<&Cid>, - ) -> Result> { - // TODO: from_commit_cid - read_car_bytes_from_blockstore(&mut self.db, commit_cid) - } -} - -#[test] -fn test_repo_mst() { - use libipld::ipld; - - let mut repo = RepoStore::open_ephemeral().unwrap(); - let did = Did::from_str("did:plc:dummy").unwrap(); - - // basic blob and IPLD record put/get - let blob = b"beware the swamp thing"; - let blob_cid = repo.put_blob(blob).unwrap(); - - let record = ipld!({"some-thing": 123}); - let record_cid = repo.put_ipld(&record).unwrap(); - - repo.get_blob(&blob_cid).unwrap().unwrap(); - repo.get_ipld(&record_cid).unwrap(); - - // basic MST get/put - let mut map: BTreeMap = Default::default(); - let empty_map_cid = repo.mst_from_map(&map).unwrap(); - assert_eq!(map, repo.mst_to_map(&empty_map_cid).unwrap()); - assert!(repo - .get_mst_record_by_key(&empty_map_cid, "/test.records/44444444444444") - .unwrap() - .is_none()); - - map.insert("/blobs/1".to_string(), blob_cid.clone()); - map.insert("/blobs/2".to_string(), blob_cid.clone()); - map.insert( - "/test.records/44444444444444".to_string(), - record_cid.clone(), - ); - map.insert( - "/test.records/22222222222222".to_string(), - record_cid.clone(), - ); - let simple_map_cid = repo.mst_from_map(&map).unwrap(); - assert_eq!(map, repo.mst_to_map(&simple_map_cid).unwrap()); - - // create root and commit IPLD nodes - let meta_cid = repo.write_metadata(&did).unwrap(); - let simple_root_cid = repo.write_root(meta_cid, None, simple_map_cid).unwrap(); - let simple_commit_cid = repo - .write_commit(&did, simple_root_cid, "dummy-sig") - .unwrap(); - assert_eq!( - Some(record.clone()), - repo.get_mst_record_by_key(&simple_map_cid, "/test.records/44444444444444") - .unwrap() - ); - assert_eq!( - Some(record.clone()), - repo.get_atp_record( - &did, - &Nsid::from_str("test.records").unwrap(), - &Tid::from_str("44444444444444").unwrap() - ) - .unwrap() - ); - assert!(repo - .get_mst_record_by_key(&simple_map_cid, "/test.records/33333333333333") - .unwrap() - .is_none()); - assert!(repo - .get_atp_record( - &did, - &Nsid::from_str("test.records").unwrap(), - &Tid::from_str("33333333333333").unwrap() - ) - .unwrap() - .is_none()); - assert_eq!( - Some(simple_commit_cid.clone()), - repo.lookup_commit(&did).unwrap() - ); - - map.insert( - "/test.records/33333333333333".to_string(), - record_cid.clone(), - ); - let simple3_map_cid = repo.mst_from_map(&map).unwrap(); - let simple3_root_cid = repo - .write_root(meta_cid, Some(simple_commit_cid), simple3_map_cid) - .unwrap(); - let simple3_commit_cid = repo - .write_commit(&did, simple3_root_cid, "dummy-sig3") - .unwrap(); - assert_eq!(map, repo.mst_to_map(&simple3_map_cid).unwrap()); - assert_eq!( - Some(record.clone()), - repo.get_mst_record_by_key(&simple3_map_cid, "/test.records/33333333333333") - .unwrap() - ); - assert_eq!( - Some(record.clone()), - repo.get_atp_record( - &did, - &Nsid::from_str("test.records").unwrap(), - &Tid::from_str("33333333333333").unwrap() - ) - .unwrap() - ); - let commit = repo.get_commit(&simple3_commit_cid).unwrap(); - assert_eq!(commit.sig.to_vec(), b"dummy-sig3".to_vec()); - assert_eq!(commit.did, did); - assert_eq!(commit.prev, Some(simple_commit_cid)); - assert_eq!(commit.mst_cid, simple3_map_cid); - assert_eq!( - Some(simple3_commit_cid.clone()), - repo.lookup_commit(&did).unwrap() - ); -} diff --git a/adenosine-pds/src/ucan_p256.rs b/adenosine-pds/src/ucan_p256.rs deleted file mode 100644 index b8b6cd2..0000000 --- a/adenosine-pds/src/ucan_p256.rs +++ /dev/null @@ -1,85 +0,0 @@ -/// Implement UCAN KeyMaterial trait for p256 -/// -/// This is needed because the 'ucan-key-support' crate does not include support for this key type. -use anyhow::{anyhow, Result}; -use async_trait::async_trait; - -use p256::ecdsa::signature::{Signer, Verifier}; -use p256::ecdsa::{Signature, SigningKey as P256PrivateKey, VerifyingKey as P256PublicKey}; - -use ucan::crypto::KeyMaterial; - -pub use ucan::crypto::{did::P256_MAGIC_BYTES, JwtSignatureAlgorithm}; - -#[derive(Clone)] -pub struct P256KeyMaterial(pub P256PublicKey, pub Option); - -#[cfg_attr(target_arch="wasm32", async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait)] -impl KeyMaterial for P256KeyMaterial { - fn get_jwt_algorithm_name(&self) -> String { - JwtSignatureAlgorithm::ES256.to_string() - } - - async fn get_did(&self) -> Result { - let bytes = [P256_MAGIC_BYTES, &self.0.to_encoded_point(true).to_bytes()].concat(); - Ok(format!("did:key:z{}", bs58::encode(bytes).into_string())) - } - - async fn sign(&self, payload: &[u8]) -> Result> { - match self.1 { - Some(ref private_key) => { - let signature = private_key.sign(payload); - Ok(signature.to_vec()) - } - None => Err(anyhow!("No private key; cannot sign data")), - } - } - - async fn verify(&self, payload: &[u8], signature: &[u8]) -> Result<()> { - let signature = Signature::try_from(signature)?; - self.0 - .verify(payload, &signature) - .map_err(|error| anyhow!("Could not verify signature: {:?}", error)) - } -} - -#[cfg(test)] -mod tests { - use super::{P256KeyMaterial, Result, P256_MAGIC_BYTES}; - use p256::ecdsa::{SigningKey as P256PrivateKey, VerifyingKey as P256PublicKey}; - use ucan::{ - builder::UcanBuilder, - crypto::{did::DidParser, KeyMaterial}, - ucan::Ucan, - }; - - pub fn bytes_to_p256_key(bytes: Vec) -> Result> { - let public_key = P256PublicKey::try_from(bytes.as_slice())?; - Ok(Box::new(P256KeyMaterial(public_key, None))) - } - - #[cfg_attr(not(target_arch = "wasm32"), tokio::test)] - async fn it_can_sign_and_verify_a_ucan() { - let private_key = P256PrivateKey::random(&mut p256::elliptic_curve::rand_core::OsRng); - let public_key = P256PublicKey::from(&private_key); - - let key_material = P256KeyMaterial(public_key, Some(private_key)); - let token_string = UcanBuilder::default() - .issued_by(&key_material) - .for_audience(key_material.get_did().await.unwrap().as_str()) - .with_lifetime(60) - .build() - .unwrap() - .sign() - .await - .unwrap() - .encode() - .unwrap(); - - let mut did_parser = DidParser::new(&[(P256_MAGIC_BYTES, bytes_to_p256_key)]); - - let ucan = Ucan::try_from_token_string(&token_string).unwrap(); - ucan.check_signature(&mut did_parser).await.unwrap(); - } -} diff --git a/adenosine-pds/src/vendored.rs b/adenosine-pds/src/vendored.rs deleted file mode 100644 index 74584ad..0000000 --- a/adenosine-pds/src/vendored.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod iroh_car; diff --git a/adenosine-pds/src/vendored/iroh_car/README.md b/adenosine-pds/src/vendored/iroh_car/README.md deleted file mode 100644 index 0cad81b..0000000 --- a/adenosine-pds/src/vendored/iroh_car/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# iroh-car - -[CAR file](https://ipld.io/specs/transport/car/) support for iroh. "CAR" stands -for Content Addressable aRchives. A CAR file typically contains a serialized -representation of an [IPLD -DAG](https://docs.ipfs.tech/concepts/merkle-dag/#merkle-directed-acyclic-graphs-dags), -though is general enough to contain arbitrary IPLD blocks. - -Currently supports only [v1](https://ipld.io/specs/transport/car/carv1/). - -It is part of [iroh](https://github.com/n0-computer/iroh). - -## License - - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. - - diff --git a/adenosine-pds/src/vendored/iroh_car/error.rs b/adenosine-pds/src/vendored/iroh_car/error.rs deleted file mode 100644 index 1edcefe..0000000 --- a/adenosine-pds/src/vendored/iroh_car/error.rs +++ /dev/null @@ -1,29 +0,0 @@ -use libipld::cid; -use thiserror::Error; - -/// Car utility error -#[derive(Debug, Error)] -pub enum Error { - #[error("Failed to parse CAR file: {0}")] - Parsing(String), - #[error("Invalid CAR file: {0}")] - InvalidFile(String), - #[error("Io error: {0}")] - Io(#[from] std::io::Error), - #[error("Cbor encoding error: {0}")] - Cbor(#[from] libipld::error::Error), - #[error("ld read too large {0}")] - LdReadTooLarge(usize), -} - -impl From for Error { - fn from(err: cid::Error) -> Error { - Error::Parsing(err.to_string()) - } -} - -impl From for Error { - fn from(err: cid::multihash::Error) -> Error { - Error::Parsing(err.to_string()) - } -} diff --git a/adenosine-pds/src/vendored/iroh_car/header.rs b/adenosine-pds/src/vendored/iroh_car/header.rs deleted file mode 100644 index cd0feb7..0000000 --- a/adenosine-pds/src/vendored/iroh_car/header.rs +++ /dev/null @@ -1,107 +0,0 @@ -#![allow(unused)] - -use libipld::cbor::DagCborCodec; -use libipld::codec::Codec; -use libipld::ipld; -use libipld::Cid; - -use super::error::Error; - -/// A car header. -#[derive(Debug, Clone, PartialEq, Eq)] -#[non_exhaustive] -pub enum CarHeader { - V1(CarHeaderV1), -} - -impl CarHeader { - pub fn new_v1(roots: Vec) -> Self { - Self::V1(roots.into()) - } - - pub fn decode(buffer: &[u8]) -> Result { - let header: CarHeaderV1 = DagCborCodec - .decode(buffer) - .map_err(|e| Error::Parsing(e.to_string()))?; - - if header.roots.is_empty() { - return Err(Error::Parsing("empty CAR file".to_owned())); - } - - if header.version != 1 { - return Err(Error::InvalidFile( - "Only CAR file version 1 is supported".to_string(), - )); - } - - Ok(CarHeader::V1(header)) - } - - pub fn encode(&self) -> Result, Error> { - match self { - CarHeader::V1(ref header) => { - let res = DagCborCodec.encode(header)?; - Ok(res) - } - } - } - - pub fn roots(&self) -> &[Cid] { - match self { - CarHeader::V1(header) => &header.roots, - } - } - - pub fn version(&self) -> u64 { - match self { - CarHeader::V1(_) => 1, - } - } -} - -/// CAR file header version 1. -#[derive(Debug, Clone, Default, libipld::DagCbor, PartialEq, Eq)] -pub struct CarHeaderV1 { - #[ipld] - pub roots: Vec, - #[ipld] - pub version: u64, -} - -impl CarHeaderV1 { - /// Creates a new CAR file header - pub fn new(roots: Vec, version: u64) -> Self { - Self { roots, version } - } -} - -impl From> for CarHeaderV1 { - fn from(roots: Vec) -> Self { - Self { roots, version: 1 } - } -} - -#[cfg(test)] -mod tests { - use libipld::cbor::DagCborCodec; - use libipld::codec::{Decode, Encode}; - use multihash::MultihashDigest; - - use super::*; - - #[test] - fn symmetric_header_v1() { - let digest = multihash::Code::Blake2b256.digest(b"test"); - let cid = Cid::new_v1(DagCborCodec.into(), digest); - - let header = CarHeaderV1::from(vec![cid]); - - let mut bytes = Vec::new(); - header.encode(DagCborCodec, &mut bytes).unwrap(); - - assert_eq!( - CarHeaderV1::decode(DagCborCodec, &mut std::io::Cursor::new(&bytes)).unwrap(), - header - ); - } -} diff --git a/adenosine-pds/src/vendored/iroh_car/lib.rs b/adenosine-pds/src/vendored/iroh_car/lib.rs deleted file mode 100644 index d4e5f66..0000000 --- a/adenosine-pds/src/vendored/iroh_car/lib.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! Implementation of the [car](https://ipld.io/specs/transport/car/) format. - -mod error; -mod header; -mod reader; -mod util; -mod writer; - -pub use crate::header::CarHeader; -pub use crate::reader::CarReader; -pub use crate::writer::CarWriter; diff --git a/adenosine-pds/src/vendored/iroh_car/mod.rs b/adenosine-pds/src/vendored/iroh_car/mod.rs deleted file mode 100644 index b40e046..0000000 --- a/adenosine-pds/src/vendored/iroh_car/mod.rs +++ /dev/null @@ -1,10 +0,0 @@ -/// Module version of lib.rs -mod error; -mod header; -mod reader; -mod util; -mod writer; - -pub use header::CarHeader; -pub use reader::CarReader; -pub use writer::CarWriter; diff --git a/adenosine-pds/src/vendored/iroh_car/reader.rs b/adenosine-pds/src/vendored/iroh_car/reader.rs deleted file mode 100644 index 90313f5..0000000 --- a/adenosine-pds/src/vendored/iroh_car/reader.rs +++ /dev/null @@ -1,101 +0,0 @@ -#![allow(unused)] - -use futures::Stream; -use libipld::Cid; -use tokio::io::AsyncRead; - -use super::{ - error::Error, - header::CarHeader, - util::{ld_read, read_node}, -}; - -/// Reads CAR files that are in a BufReader -pub struct CarReader { - reader: R, - header: CarHeader, - buffer: Vec, -} - -impl CarReader -where - R: AsyncRead + Send + Unpin, -{ - /// Creates a new CarReader and parses the CarHeader - pub async fn new(mut reader: R) -> Result { - let mut buffer = Vec::new(); - - match ld_read(&mut reader, &mut buffer).await? { - Some(buf) => { - let header = CarHeader::decode(buf)?; - - Ok(CarReader { - reader, - header, - buffer, - }) - } - None => Err(Error::Parsing( - "failed to parse uvarint for header".to_string(), - )), - } - } - - /// Returns the header of this car file. - pub fn header(&self) -> &CarHeader { - &self.header - } - - /// Returns the next IPLD Block in the buffer - pub async fn next_block(&mut self) -> Result)>, Error> { - read_node(&mut self.reader, &mut self.buffer).await - } - - pub fn stream(self) -> impl Stream), Error>> { - futures::stream::try_unfold(self, |mut this| async move { - let maybe_block = read_node(&mut this.reader, &mut this.buffer).await?; - Ok(maybe_block.map(|b| (b, this))) - }) - } -} - -#[cfg(test)] -mod tests { - use std::io::Cursor; - - use futures::TryStreamExt; - use libipld::cbor::DagCborCodec; - use libipld::Cid; - use multihash::MultihashDigest; - - use super::super::{header::CarHeaderV1, writer::CarWriter}; - - use super::*; - - #[tokio::test] - async fn car_write_read() { - let digest_test = multihash::Code::Blake2b256.digest(b"test"); - let cid_test = Cid::new_v1(DagCborCodec.into(), digest_test); - - let digest_foo = multihash::Code::Blake2b256.digest(b"foo"); - let cid_foo = Cid::new_v1(DagCborCodec.into(), digest_foo); - - let header = CarHeader::V1(CarHeaderV1::from(vec![cid_foo])); - - let mut buffer = Vec::new(); - let mut writer = CarWriter::new(header, &mut buffer); - writer.write(cid_test, b"test").await.unwrap(); - writer.write(cid_foo, b"foo").await.unwrap(); - writer.finish().await.unwrap(); - - let reader = Cursor::new(&buffer); - let car_reader = CarReader::new(reader).await.unwrap(); - let files: Vec<_> = car_reader.stream().try_collect().await.unwrap(); - - assert_eq!(files.len(), 2); - assert_eq!(files[0].0, cid_test); - assert_eq!(files[0].1, b"test"); - assert_eq!(files[1].0, cid_foo); - assert_eq!(files[1].1, b"foo"); - } -} diff --git a/adenosine-pds/src/vendored/iroh_car/util.rs b/adenosine-pds/src/vendored/iroh_car/util.rs deleted file mode 100644 index 90435b1..0000000 --- a/adenosine-pds/src/vendored/iroh_car/util.rs +++ /dev/null @@ -1,95 +0,0 @@ -use integer_encoding::VarIntAsyncReader; -use libipld::Cid; -use tokio::io::{AsyncRead, AsyncReadExt}; - -use super::error::Error; - -/// Maximum size that is used for single node. -pub(crate) const MAX_ALLOC: usize = 4 * 1024 * 1024; - -pub(crate) async fn ld_read(mut reader: R, buf: &mut Vec) -> Result, Error> -where - R: AsyncRead + Send + Unpin, -{ - let length: usize = match VarIntAsyncReader::read_varint_async(&mut reader).await { - Ok(len) => len, - Err(e) => { - if e.kind() == std::io::ErrorKind::UnexpectedEof { - return Ok(None); - } - return Err(Error::Parsing(e.to_string())); - } - }; - - if length > MAX_ALLOC { - return Err(Error::LdReadTooLarge(length)); - } - if length > buf.len() { - buf.resize(length, 0); - } - - reader - .read_exact(&mut buf[..length]) - .await - .map_err(|e| Error::Parsing(e.to_string()))?; - - Ok(Some(&buf[..length])) -} - -pub(crate) async fn read_node( - buf_reader: &mut R, - buf: &mut Vec, -) -> Result)>, Error> -where - R: AsyncRead + Send + Unpin, -{ - if let Some(buf) = ld_read(buf_reader, buf).await? { - let mut cursor = std::io::Cursor::new(buf); - let c = Cid::read_bytes(&mut cursor)?; - let pos = cursor.position() as usize; - - return Ok(Some((c, buf[pos..].to_vec()))); - } - Ok(None) -} - -#[cfg(test)] -mod tests { - use integer_encoding::VarIntAsyncWriter; - use tokio::io::{AsyncWrite, AsyncWriteExt}; - - use super::*; - - async fn ld_write<'a, W>(writer: &mut W, bytes: &[u8]) -> Result<(), Error> - where - W: AsyncWrite + Send + Unpin, - { - writer.write_varint_async(bytes.len()).await?; - writer.write_all(bytes).await?; - writer.flush().await?; - Ok(()) - } - - #[tokio::test] - async fn ld_read_write_good() { - let mut buffer = Vec::::new(); - ld_write(&mut buffer, b"test bytes").await.unwrap(); - let reader = std::io::Cursor::new(buffer); - - let mut buffer = vec![1u8; 1024]; - let read = ld_read(reader, &mut buffer).await.unwrap().unwrap(); - assert_eq!(read, b"test bytes"); - } - - #[tokio::test] - async fn ld_read_write_fail() { - let mut buffer = Vec::::new(); - let size = MAX_ALLOC + 1; - ld_write(&mut buffer, &vec![2u8; size]).await.unwrap(); - let reader = std::io::Cursor::new(buffer); - - let mut buffer = vec![1u8; 1024]; - let read = ld_read(reader, &mut buffer).await; - assert!(matches!(read, Err(Error::LdReadTooLarge(_)))); - } -} diff --git a/adenosine-pds/src/vendored/iroh_car/writer.rs b/adenosine-pds/src/vendored/iroh_car/writer.rs deleted file mode 100644 index b7e25d3..0000000 --- a/adenosine-pds/src/vendored/iroh_car/writer.rs +++ /dev/null @@ -1,73 +0,0 @@ -#![allow(unused)] - -use integer_encoding::VarIntAsyncWriter; -use libipld::Cid; -use tokio::io::{AsyncWrite, AsyncWriteExt}; - -use super::{error::Error, header::CarHeader}; - -#[derive(Debug)] -pub struct CarWriter { - header: CarHeader, - writer: W, - cid_buffer: Vec, - is_header_written: bool, -} - -impl CarWriter -where - W: AsyncWrite + Send + Unpin, -{ - pub fn new(header: CarHeader, writer: W) -> Self { - CarWriter { - header, - writer, - cid_buffer: Vec::new(), - is_header_written: false, - } - } - - /// Writes header and stream of data to writer in Car format. - pub async fn write(&mut self, cid: Cid, data: T) -> Result<(), Error> - where - T: AsRef<[u8]>, - { - if !self.is_header_written { - // Write header bytes - let header_bytes = self.header.encode()?; - self.writer.write_varint_async(header_bytes.len()).await?; - self.writer.write_all(&header_bytes).await?; - self.is_header_written = true; - } - - // Write the given block. - self.cid_buffer.clear(); - cid.write_bytes(&mut self.cid_buffer).expect("vec write"); - - let data = data.as_ref(); - let len = self.cid_buffer.len() + data.len(); - - self.writer.write_varint_async(len).await?; - self.writer.write_all(&self.cid_buffer).await?; - self.writer.write_all(data).await?; - - Ok(()) - } - - /// Finishes writing, including flushing and returns the writer. - pub async fn finish(mut self) -> Result { - self.flush().await?; - Ok(self.writer) - } - - /// Flushes the underlying writer. - pub async fn flush(&mut self) -> Result<(), Error> { - self.writer.flush().await?; - Ok(()) - } - - /// Consumes the [`CarWriter`] and returns the underlying writer. - pub fn into_inner(self) -> W { - self.writer - } -} diff --git a/adenosine-pds/src/web.rs b/adenosine-pds/src/web.rs index fed9814..0d80b4e 100644 --- a/adenosine-pds/src/web.rs +++ b/adenosine-pds/src/web.rs @@ -1,6 +1,6 @@ use crate::models::*; -use crate::repo::RepoCommit; use adenosine::identifiers::{Did, Nsid, Tid}; +use adenosine::repo::RepoCommit; use askama::Template; #[derive(Template)] diff --git a/adenosine/Cargo.toml b/adenosine/Cargo.toml index a05aa78..456b0fe 100644 --- a/adenosine/Cargo.toml +++ b/adenosine/Cargo.toml @@ -24,6 +24,22 @@ lazy_static = "1" data-encoding = "2" rand = "0.8" time = { version = "0.3", features = ["formatting"] } +k256 = { version = "0.11", features = ["ecdsa"] } +p256 = { version = "0.11", features = ["ecdsa"] } +bs58 = "0.4" +libipld = "0.14" +ipfs-sqlite-block-store = "0.13" +tokio = { version = "1", features = ["full"] } +futures = "0.3" +sha256 = "1" +multibase = "0.9" +ucan = "0.7.0-alpha.1" +async-trait = "0.1" + +# for vendored iroh-car +thiserror = "1.0" +integer-encoding = { version = "3", features = ["tokio_async"] } +multihash = "0.16" # uncertain about these... anyhow = "1" diff --git a/adenosine/src/car.rs b/adenosine/src/car.rs new file mode 100644 index 0000000..22f83bc --- /dev/null +++ b/adenosine/src/car.rs @@ -0,0 +1,100 @@ +use anyhow::Result; + +use crate::vendored::iroh_car::{CarHeader, CarReader, CarWriter}; +use futures::TryStreamExt; +use ipfs_sqlite_block_store::BlockStore; +use libipld::{Block, Cid}; +use std::path::PathBuf; +use tokio::fs::File; +use tokio::io::{AsyncRead, BufReader}; + +/// Synchronous wrapper for loading in-memory CAR bytes (`&[u8]`) into a blockstore. +/// +/// Does not do any pinning, even temporarily. Returns the root CID indicated in the CAR file +/// header. +pub fn load_car_bytes_to_blockstore( + db: &mut BlockStore, + car_bytes: &[u8], +) -> Result { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build()?; + rt.block_on(inner_car_bytes_loader(db, car_bytes)) +} + +/// Synchronous wrapper for loading on-disk CAR file (by path) into a blockstore. +/// +/// Does not do any pinning, even temporarily. Returns the root CID indicated in the CAR file +/// header. +pub fn load_car_path_to_blockstore( + db: &mut BlockStore, + car_path: &PathBuf, +) -> Result { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build()?; + rt.block_on(inner_car_path_loader(db, car_path)) +} + +pub fn read_car_bytes_from_blockstore( + db: &mut BlockStore, + root: &Cid, +) -> Result> { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build()?; + rt.block_on(inner_car_bytes_reader(db, root)) +} + +async fn inner_car_bytes_loader( + db: &mut BlockStore, + car_bytes: &[u8], +) -> Result { + let car_reader = CarReader::new(car_bytes).await?; + inner_car_loader(db, car_reader).await +} + +async fn inner_car_path_loader( + db: &mut BlockStore, + car_path: &PathBuf, +) -> Result { + let car_reader = { + let file = File::open(car_path).await?; + let buf_reader = BufReader::new(file); + CarReader::new(buf_reader).await? + }; + inner_car_loader(db, car_reader).await +} + +async fn inner_car_loader( + db: &mut BlockStore, + car_reader: CarReader, +) -> Result { + let car_header = car_reader.header().clone(); + car_reader + .stream() + .try_for_each(|(cid, raw)| { + // TODO: error handling here instead of unwrap (?) + let block = Block::new(cid, raw).unwrap(); + db.put_block(block, None).unwrap(); + futures::future::ready(Ok(())) + }) + .await?; + Ok(car_header.roots()[0]) +} + +async fn inner_car_bytes_reader( + db: &mut BlockStore, + root: &Cid, +) -> Result> { + let car_header = CarHeader::new_v1(vec![*root]); + let buf: Vec = Default::default(); + let mut car_writer = CarWriter::new(car_header, buf); + + let cid_list = db.get_descendants::>(root)?; + for cid in cid_list { + let block = db.get_block(&cid)?.expect("block content"); + car_writer.write(cid, block).await?; + } + Ok(car_writer.finish().await?) +} diff --git a/adenosine/src/crypto.rs b/adenosine/src/crypto.rs new file mode 100644 index 0000000..3647928 --- /dev/null +++ b/adenosine/src/crypto.rs @@ -0,0 +1,280 @@ +use crate::identifiers::Did; +use crate::ucan_p256::P256KeyMaterial; +use anyhow::{anyhow, ensure, Result}; +use p256::ecdsa::signature::{Signer, Verifier}; +use std::str::FromStr; +use ucan::builder::UcanBuilder; + +// Need to: +// +// - generate new random keypair +// - generate keypair from seed +// - read/write secret keypair (eg, for PDS config loading) +// - sign bytes (and ipld?) using keypair +// - verify signature bytes (and ipld?) using pubkey + +const MULTICODE_P256_BYTES: [u8; 2] = [0x80, 0x24]; +const MULTICODE_K256_BYTES: [u8; 2] = [0xe7, 0x01]; + +#[derive(Clone, PartialEq, Eq)] +pub struct KeyPair { + public: p256::ecdsa::VerifyingKey, + secret: p256::ecdsa::SigningKey, +} + +#[derive(Clone, PartialEq, Eq)] +pub enum PubKey { + P256(p256::ecdsa::VerifyingKey), + K256(k256::ecdsa::VerifyingKey), +} + +impl KeyPair { + pub fn new_random() -> Self { + let signing = p256::ecdsa::SigningKey::random(&mut p256::elliptic_curve::rand_core::OsRng); + KeyPair { + public: signing.verifying_key(), + secret: signing, + } + } + + pub fn from_bytes(bytes: &[u8]) -> Result { + let signing = p256::ecdsa::SigningKey::from_bytes(bytes)?; + Ok(KeyPair { + public: signing.verifying_key(), + secret: signing, + }) + } + + pub fn to_bytes(&self) -> Vec { + self.secret.to_bytes().to_vec() + } + + pub fn pubkey(&self) -> PubKey { + PubKey::P256(self.public) + } + + pub fn sign_bytes(&self, data: &[u8]) -> String { + let sig = self.secret.sign(data); + data_encoding::BASE64URL_NOPAD.encode(&sig.to_vec()) + } + + fn ucan_keymaterial(&self) -> P256KeyMaterial { + P256KeyMaterial(self.public, Some(self.secret.clone())) + } + + /// This is currently just an un-validated token; we don't actually verify these. + pub fn ucan(&self, did: &Did) -> Result { + let key_material = self.ucan_keymaterial(); + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build()?; + rt.block_on(build_ucan(key_material, did)) + } + + pub fn to_hex(&self) -> String { + data_encoding::HEXUPPER.encode(&self.to_bytes()) + } + + pub fn from_hex(hex: &str) -> Result { + Self::from_bytes(&data_encoding::HEXUPPER.decode(hex.as_bytes())?) + } +} + +async fn build_ucan(key_material: P256KeyMaterial, did: &Did) -> Result { + let token_string = UcanBuilder::default() + .issued_by(&key_material) + .for_audience(did) + .with_nonce() + .with_lifetime(60 * 60 * 24 * 90) + .build()? + .sign() + .await? + .encode()?; + Ok(token_string) +} + +impl PubKey { + pub fn verify_bytes(&self, data: &[u8], sig: &str) -> Result<()> { + let sig_bytes = data_encoding::BASE64URL_NOPAD.decode(sig.as_bytes())?; + // TODO: better way other than this re-encoding? + let sig_hex = data_encoding::HEXUPPER.encode(&sig_bytes); + match self { + PubKey::P256(key) => { + let sig = p256::ecdsa::Signature::from_str(&sig_hex)?; + Ok(key.verify(data, &sig)?) + } + PubKey::K256(key) => { + let sig = k256::ecdsa::Signature::from_str(&sig_hex)?; + Ok(key.verify(data, &sig)?) + } + } + } + + pub fn key_type(&self) -> String { + match self { + PubKey::P256(_) => "EcdsaSecp256r1VerificationKey2019", + PubKey::K256(_) => "EcdsaSecp256k1VerificationKey2019", + } + .to_string() + } + + /// This public verification key encoded as base58btc multibase string, not 'compressed', as + /// included in DID documents ('publicKeyMultibase'). + /// + /// Note that the did:key serialization does 'compress' the key into a smaller size. + pub fn to_multibase(&self) -> String { + let mut bytes: Vec = vec![]; + match self { + PubKey::P256(key) => { + bytes.extend_from_slice(&MULTICODE_P256_BYTES); + bytes.extend_from_slice(&key.to_encoded_point(false).to_bytes()); + } + PubKey::K256(key) => { + bytes.extend_from_slice(&MULTICODE_K256_BYTES); + bytes.extend_from_slice(&key.to_bytes()); + } + } + multibase::encode(multibase::Base::Base58Btc, &bytes) + } + + /// Serializes as a 'did:key' string. + pub fn to_did_key(&self) -> String { + let mut bytes: Vec = vec![]; + match self { + PubKey::P256(key) => { + bytes.extend_from_slice(&MULTICODE_P256_BYTES); + bytes.extend_from_slice(&key.to_encoded_point(true).to_bytes()); + } + PubKey::K256(key) => { + bytes.extend_from_slice(&MULTICODE_K256_BYTES); + bytes.extend_from_slice(&key.to_bytes()); + } + } + format!( + "did:key:{}", + multibase::encode(multibase::Base::Base58Btc, &bytes) + ) + } + + pub fn from_did_key(did_key: &str) -> Result { + if !did_key.starts_with("did:key:") || did_key.len() < 20 { + return Err(anyhow!("does not look like a did:key: {}", did_key)); + } + let (key_type, bytes) = multibase::decode(&did_key[8..])?; + ensure!( + key_type == multibase::Base::Base58Btc, + "base58btc-encoded key" + ); + // prefix bytes + let prefix: [u8; 2] = [bytes[0], bytes[1]]; + match prefix { + MULTICODE_K256_BYTES => Ok(PubKey::K256(k256::ecdsa::VerifyingKey::from_sec1_bytes( + &bytes[2..], + )?)), + MULTICODE_P256_BYTES => Ok(PubKey::P256(p256::ecdsa::VerifyingKey::from_sec1_bytes( + &bytes[2..], + )?)), + _ => Err(anyhow!( + "key type (multicodec) not handled when parsing DID key: {}", + did_key + )), + } + } + + pub fn to_bytes(&self) -> Vec { + match self { + PubKey::P256(key) => key.to_encoded_point(true).to_bytes().to_vec(), + PubKey::K256(key) => key.to_bytes().to_vec(), + } + } + + pub fn ucan_keymaterial(&self) -> P256KeyMaterial { + match self { + PubKey::P256(key) => P256KeyMaterial(*key, None), + PubKey::K256(_key) => unimplemented!(), + } + } +} + +impl std::fmt::Display for PubKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.to_did_key()) + } +} + +#[test] +fn test_did_secp256k1_p256() { + // did:key secp256k1 test vectors from W3C + // https://github.com/w3c-ccg/did-method-key/blob/main/test-vectors/secp256k1.json + // via atproto repo + let pairs = vec![ + ( + "9085d2bef69286a6cbb51623c8fa258629945cd55ca705cc4e66700396894e0c", + "did:key:zQ3shokFTS3brHcDQrn82RUDfCZESWL1ZdCEJwekUDPQiYBme", + ), + ( + "f0f4df55a2b3ff13051ea814a8f24ad00f2e469af73c363ac7e9fb999a9072ed", + "did:key:zQ3shtxV1FrJfhqE1dvxYRcCknWNjHc3c5X1y3ZSoPDi2aur2", + ), + ( + "6b0b91287ae3348f8c2f2552d766f30e3604867e34adc37ccbb74a8e6b893e02", + "did:key:zQ3shZc2QzApp2oymGvQbzP8eKheVshBHbU4ZYjeXqwSKEn6N", + ), + ( + "c0a6a7c560d37d7ba81ecee9543721ff48fea3e0fb827d42c1868226540fac15", + "did:key:zQ3shadCps5JLAHcZiuX5YUtWHHL8ysBJqFLWvjZDKAWUBGzy", + ), + ( + "175a232d440be1e0788f25488a73d9416c04b6f924bea6354bf05dd2f1a75133", + "did:key:zQ3shptjE6JwdkeKN4fcpnYQY3m9Cet3NiHdAfpvSUZBFoKBj", + ), + ]; + + // test decode/encode did:key + for (_hex, did) in pairs.iter() { + assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key()); + } + + let p256_dids = vec![ + "did:key:zDnaerx9CtbPJ1q36T5Ln5wYt3MQYeGRG5ehnPAmxcf5mDZpv", + "did:key:zDnaerDaTF5BXEavCrfRZEk316dpbLsfPDZ3WJ5hRTPFU2169", + ]; + for did in p256_dids { + assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key()); + } +} + +#[test] +fn test_did_plc_examples() { + // https://atproto.com/specs/did-plc + let example_dids = vec![ + "did:key:zDnaejYFhgFiVF89LhJ4UipACLKuqo6PteZf8eKDVKeExXUPk", + "did:key:zDnaeSezF2TgCD71b5DiiFyhHQwKAfsBVqTTHRMvP597Z5Ztn", + "did:key:zDnaeh9v2RmcMo13Du2d6pjUf5bZwtauYxj3n9dYjw4EZUAR7", + "did:key:zDnaedvvAsDE6H3BDdBejpx9ve2Tz95cymyCAKF66JbyMh1Lt", + ]; + + for did in example_dids { + assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key()); + } +} + +#[test] +fn test_signing() { + let msg = b"you have found the secret message"; + let keypair = KeyPair::new_random(); + let sig_str = keypair.sign_bytes(msg); + keypair.pubkey().verify_bytes(msg, &sig_str).unwrap(); + + // and with pubkey that has been serialized/deserialized + let did_key = keypair.pubkey().to_did_key(); + let pubkey = PubKey::from_did_key(&did_key).unwrap(); + pubkey.verify_bytes(msg, &sig_str).unwrap(); +} + +#[test] +fn test_keypair_hex() { + let before = KeyPair::new_random(); + let after = KeyPair::from_hex(&before.to_hex()).unwrap(); + assert!(before == after); +} diff --git a/adenosine/src/did.rs b/adenosine/src/did.rs new file mode 100644 index 0000000..c7d7d10 --- /dev/null +++ b/adenosine/src/did.rs @@ -0,0 +1,368 @@ +use crate::crypto::{KeyPair, PubKey}; +/// DID and 'did:plc' stuff +/// +/// This is currently a partial/skeleton implementation, which only generates local/testing did:plc +/// DIDs (and DID documents) using a single 'create' genesis block. Key rotation, etc, is not +/// supported. +use crate::identifiers::Did; +use anyhow::Result; +use libipld::cbor::DagCborCodec; +use libipld::multihash::Code; +use libipld::{Block, Cid, DagCbor, DefaultParams}; +use serde_json::json; +use std::str::FromStr; + +#[allow(non_snake_case)] +#[derive(Debug, DagCbor, PartialEq, Eq, Clone)] +pub struct CreateOp { + #[ipld(rename = "type")] + pub op_type: String, + pub signingKey: String, + pub recoveryKey: String, + pub username: String, + pub service: String, + pub prev: Option, + pub sig: String, +} + +#[allow(non_snake_case)] +#[derive(Debug, DagCbor, PartialEq, Eq, Clone)] +struct UnsignedCreateOp { + #[ipld(rename = "type")] + pub op_type: String, + pub signingKey: String, + pub recoveryKey: String, + pub username: String, + pub service: String, + pub prev: Option, +} + +impl UnsignedCreateOp { + fn into_signed(self, sig: String) -> CreateOp { + CreateOp { + op_type: self.op_type, + prev: self.prev, + sig, + signingKey: self.signingKey, + recoveryKey: self.recoveryKey, + username: self.username, + service: self.service, + } + } +} + +impl CreateOp { + pub fn new( + username: String, + atp_pds: String, + keypair: &KeyPair, + recovery_key: Option, + ) -> Self { + let signing_key = keypair.pubkey().to_did_key(); + let recovery_key = recovery_key.unwrap_or(signing_key.clone()); + let unsigned = UnsignedCreateOp { + op_type: "create".to_string(), + prev: None, + signingKey: signing_key, + recoveryKey: recovery_key, + username, + service: atp_pds, + }; + let block = Block::::encode(DagCborCodec, Code::Sha2_256, &unsigned) + .expect("encode DAG-CBOR"); + let sig = keypair.sign_bytes(block.data()); + unsigned.into_signed(sig) + } + + pub fn did_plc(&self) -> Did { + // dump DAG-CBOR + let block = Block::::encode(DagCborCodec, Code::Sha2_256, self) + .expect("encode DAG-CBOR"); + let bin = block.data(); + // hash SHA-256 + let digest_bytes: Vec = data_encoding::HEXLOWER + .decode(sha256::digest(bin).as_bytes()) + .expect("SHA-256 digest is always hex string"); + // encode base32 + let digest_b32 = data_encoding::BASE32_NOPAD + .encode(&digest_bytes) + .to_ascii_lowercase(); + // truncate + Did::from_str(&format!("did:plc:{}", &digest_b32[0..24])).unwrap() + } + + fn into_unsigned(self) -> UnsignedCreateOp { + UnsignedCreateOp { + op_type: self.op_type, + prev: self.prev, + signingKey: self.signingKey, + recoveryKey: self.recoveryKey, + username: self.username, + service: self.service, + } + } + + pub fn did_doc(&self) -> serde_json::Value { + let meta = DidDocMeta { + did: self.did_plc(), + // TODO + user_url: format!("https://{}", self.username), + service_url: self.service.clone(), + recovery_didkey: self.recoveryKey.clone(), + signing_didkey: self.signingKey.clone(), + }; + meta.did_doc() + } + + /// This method only makes sense on the "genesis" create object + pub fn verify_self(&self) -> Result<()> { + let key = PubKey::from_did_key(&self.signingKey)?; + let unsigned = { + let cpy = (*self).clone(); + cpy.into_unsigned() + }; + //println!("unsigned: {:?}", unsigned); + let block = Block::::encode(DagCborCodec, Code::Sha2_256, &unsigned) + .expect("encode DAG-CBOR"); + key.verify_bytes(block.data(), &self.sig) + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct DidDocMeta { + pub did: Did, + pub user_url: String, + pub service_url: String, + pub recovery_didkey: String, + pub signing_didkey: String, +} + +impl DidDocMeta { + pub fn did_doc(&self) -> serde_json::Value { + let key_type = "EcdsaSecp256r1VerificationKey2019"; + json!({ + "@context": [ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/ecdsa-2019/v1" + ], + "id": self.did.to_string(), + "alsoKnownAs": [ self.user_url ], + "verificationMethod": [ + { + "id": format!("{}#signingKey)", self.did), + "type": key_type, + "controller": self.did.to_string(), + "publicKeyMultibase": self.signing_didkey + }, + { + "id": format!("{}#recoveryKey)", self.did), + "type": key_type, + "controller": self.did.to_string(), + "publicKeyMultibase": self.recovery_didkey + } + ], + "assertionMethod": [ format!("{}#signingKey)", self.did)], + "capabilityInvocation": [ format!("{}#signingKey)", self.did) ], + "capabilityDelegation": [ format!("{}#signingKey)", self.did) ], + "service": [ + { + "id": format!("{}#atpPds)", self.did), + "type": "AtpPersonalDataServer", + "serviceEndpoint": self.service_url + } + ] + }) + } +} + +#[test] +fn test_debug_did_signing() { + let op = UnsignedCreateOp { + op_type: "create".to_string(), + signingKey: "did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE".to_string(), + recoveryKey: "did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE".to_string(), + username: "carla.test".to_string(), + service: "http://localhost:2583".to_string(), + prev: None, + }; + let block = + Block::::encode(DagCborCodec, Code::Sha2_256, &op).expect("encode DAG-CBOR"); + let op_bytes = block.data(); + + let _key_bytes = vec![ + 4, 30, 224, 8, 198, 84, 108, 1, 58, 193, 91, 176, 212, 45, 4, 36, 28, 252, 242, 95, 20, 85, + 87, 246, 79, 134, 42, 113, 5, 216, 238, 235, 21, 146, 16, 88, 239, 217, 36, 252, 148, 197, + 203, 22, 29, 2, 52, 152, 77, 208, 21, 88, 2, 85, 219, 212, 148, 139, 104, 200, 15, 119, 46, + 178, 186, + ]; + + let pub_key = + PubKey::from_did_key("did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE").unwrap(); + //let keypair = KeyPair::from_bytes(&key_bytes).unwrap(); + //assert_eq!(keypair.to_bytes(), key_bytes); + + let encoded_bytes = vec![ + 166, 100, 112, 114, 101, 118, 246, 100, 116, 121, 112, 101, 102, 99, 114, 101, 97, 116, + 101, 103, 115, 101, 114, 118, 105, 99, 101, 117, 104, 116, 116, 112, 58, 47, 47, 108, 111, + 99, 97, 108, 104, 111, 115, 116, 58, 50, 53, 56, 51, 104, 117, 115, 101, 114, 110, 97, 109, + 101, 106, 99, 97, 114, 108, 97, 46, 116, 101, 115, 116, 106, 115, 105, 103, 110, 105, 110, + 103, 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, + 83, 87, 86, 81, 121, 87, 56, 68, 83, 70, 54, 109, 68, 119, 84, 57, 106, 56, 89, 114, 122, + 68, 87, 68, 115, 56, 104, 54, 80, 80, 106, 117, 84, 99, 105, 112, 122, 71, 56, 52, 105, 67, + 66, 69, 107, 114, 101, 99, 111, 118, 101, 114, 121, 75, 101, 121, 120, 57, 100, 105, 100, + 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, 83, 87, 86, 81, 121, 87, 56, 68, 83, 70, 54, + 109, 68, 119, 84, 57, 106, 56, 89, 114, 122, 68, 87, 68, 115, 56, 104, 54, 80, 80, 106, + 117, 84, 99, 105, 112, 122, 71, 56, 52, 105, 67, 66, 69, + ]; + assert_eq!(encoded_bytes, op_bytes); + + let _sig_bytes = vec![ + 131, 115, 47, 143, 89, 68, 79, 73, 121, 198, 70, 76, 91, 64, 171, 25, 18, 139, 244, 94, + 123, 224, 205, 32, 241, 174, 36, 120, 199, 206, 199, 202, 216, 154, 2, 10, 247, 101, 138, + 170, 85, 95, 142, 164, 50, 203, 92, 23, 247, 218, 231, 224, 78, 68, 55, 104, 243, 145, 243, + 4, 219, 102, 44, 227, + ]; + let sig_str = + "g3Mvj1lET0l5xkZMW0CrGRKL9F574M0g8a4keMfOx8rYmgIK92WKqlVfjqQyy1wX99rn4E5EN2jzkfME22Ys4w"; + + pub_key.verify_bytes(op_bytes, sig_str).unwrap(); + + let signed = op.into_signed(sig_str.to_string()); + signed.verify_self().unwrap(); +} + +/* +------------------------------------ +OP: +{ + type: 'create', + signingKey: 'did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN', + recoveryKey: 'did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN', + username: 'carla.test', + service: 'http://localhost:2583', + prev: null, + sig: 'VYGxmZs-D5830YdQSNrZpbxVyOPB4nCJtO-x0XElt35AE5wjvJFa2vJu8qjURG6TvEbMvfbekDo_eXEMhdPWdg' +} +ENCODED: +{"0":167,"1":99,"2":115,"3":105,"4":103,"5":120,"6":86,"7":86,"8":89,"9":71,"10":120,"11":109,"12":90,"13":115,"14":45,"15":68,"16":53,"17":56,"18":51,"19":48,"20":89,"21":100,"22":81,"23":83,"24":78,"25":114,"26":90,"27":112,"28":98,"29":120,"30":86,"31":121,"32":79,"33":80,"34":66,"35":52,"36":110,"37":67,"38":74,"39":116,"40":79,"41":45,"42":120,"43":48,"44":88,"45":69,"46":108,"47":116,"48":51,"49":53,"50":65,"51":69,"52":53,"53":119,"54":106,"55":118,"56":74,"57":70,"58":97,"59":50,"60":118,"61":74,"62":117,"63":56,"64":113,"65":106,"66":85,"67":82,"68":71,"69":54,"70":84,"71":118,"72":69,"73":98,"74":77,"75":118,"76":102,"77":98,"78":101,"79":107,"80":68,"81":111,"82":95,"83":101,"84":88,"85":69,"86":77,"87":104,"88":100,"89":80,"90":87,"91":100,"92":103,"93":100,"94":112,"95":114,"96":101,"97":118,"98":246,"99":100,"100":116,"101":121,"102":112,"103":101,"104":102,"105":99,"106":114,"107":101,"108":97,"109":116,"110":101,"111":103,"112":115,"113":101,"114":114,"115":118,"116":105,"117":99,"118":101,"119":117,"120":104,"121":116,"122":116,"123":112,"124":58,"125":47,"126":47,"127":108,"128":111,"129":99,"130":97,"131":108,"132":104,"133":111,"134":115,"135":116,"136":58,"137":50,"138":53,"139":56,"140":51,"141":104,"142":117,"143":115,"144":101,"145":114,"146":110,"147":97,"148":109,"149":101,"150":106,"151":99,"152":97,"153":114,"154":108,"155":97,"156":46,"157":116,"158":101,"159":115,"160":116,"161":106,"162":115,"163":105,"164":103,"165":110,"166":105,"167":110,"168":103,"169":75,"170":101,"171":121,"172":120,"173":57,"174":100,"175":105,"176":100,"177":58,"178":107,"179":101,"180":121,"181":58,"182":122,"183":68,"184":110,"185":97,"186":101,"187":115,"188":111,"189":120,"190":90,"191":98,"192":56,"193":109,"194":76,"195":106,"196":102,"197":49,"198":54,"199":101,"200":52,"201":80,"202":87,"203":115,"204":78,"205":113,"206":76,"207":76,"208":106,"209":57,"210":117,"211":87,"212":77,"213":57,"214":84,"215":81,"216":56,"217":110,"218":78,"219":119,"220":120,"221":113,"222":69,"223":114,"224":68,"225":109,"226":75,"227":88,"228":76,"229":65,"230":78,"231":107,"232":114,"233":101,"234":99,"235":111,"236":118,"237":101,"238":114,"239":121,"240":75,"241":101,"242":121,"243":120,"244":57,"245":100,"246":105,"247":100,"248":58,"249":107,"250":101,"251":121,"252":58,"253":122,"254":68,"255":110,"256":97,"257":101,"258":115,"259":111,"260":120,"261":90,"262":98,"263":56,"264":109,"265":76,"266":106,"267":102,"268":49,"269":54,"270":101,"271":52,"272":80,"273":87,"274":115,"275":78,"276":113,"277":76,"278":76,"279":106,"280":57,"281":117,"282":87,"283":77,"284":57,"285":84,"286":81,"287":56,"288":110,"289":78,"290":119,"291":120,"292":113,"293":69,"294":114,"295":68,"296":109,"297":75,"298":88,"299":76,"300":65,"301":78} +SHA256 base32: +cg2dfxdh5voabmdjzw2abw3sgvtjymknh2bmpvtwot7t2ih4v7za +did:plc:cg2dfxdh5voabmdjzw2abw3s +------------------------------------ + +*/ + +#[test] +fn test_debug_did_plc() { + let op = CreateOp { + op_type: "create".to_string(), + signingKey: "did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN".to_string(), + recoveryKey: "did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN".to_string(), + username: "carla.test".to_string(), + service: "http://localhost:2583".to_string(), + prev: None, + sig: + "VYGxmZs-D5830YdQSNrZpbxVyOPB4nCJtO-x0XElt35AE5wjvJFa2vJu8qjURG6TvEbMvfbekDo_eXEMhdPWdg" + .to_string(), + }; + op.verify_self().unwrap(); + let block = + Block::::encode(DagCborCodec, Code::Sha2_256, &op).expect("encode DAG-CBOR"); + let op_bytes = block.data(); + + let encoded_bytes = vec![ + 167, 99, 115, 105, 103, 120, 86, 86, 89, 71, 120, 109, 90, 115, 45, 68, 53, 56, 51, 48, 89, + 100, 81, 83, 78, 114, 90, 112, 98, 120, 86, 121, 79, 80, 66, 52, 110, 67, 74, 116, 79, 45, + 120, 48, 88, 69, 108, 116, 51, 53, 65, 69, 53, 119, 106, 118, 74, 70, 97, 50, 118, 74, 117, + 56, 113, 106, 85, 82, 71, 54, 84, 118, 69, 98, 77, 118, 102, 98, 101, 107, 68, 111, 95, + 101, 88, 69, 77, 104, 100, 80, 87, 100, 103, 100, 112, 114, 101, 118, 246, 100, 116, 121, + 112, 101, 102, 99, 114, 101, 97, 116, 101, 103, 115, 101, 114, 118, 105, 99, 101, 117, 104, + 116, 116, 112, 58, 47, 47, 108, 111, 99, 97, 108, 104, 111, 115, 116, 58, 50, 53, 56, 51, + 104, 117, 115, 101, 114, 110, 97, 109, 101, 106, 99, 97, 114, 108, 97, 46, 116, 101, 115, + 116, 106, 115, 105, 103, 110, 105, 110, 103, 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, + 101, 121, 58, 122, 68, 110, 97, 101, 115, 111, 120, 90, 98, 56, 109, 76, 106, 102, 49, 54, + 101, 52, 80, 87, 115, 78, 113, 76, 76, 106, 57, 117, 87, 77, 57, 84, 81, 56, 110, 78, 119, + 120, 113, 69, 114, 68, 109, 75, 88, 76, 65, 78, 107, 114, 101, 99, 111, 118, 101, 114, 121, + 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, 115, + 111, 120, 90, 98, 56, 109, 76, 106, 102, 49, 54, 101, 52, 80, 87, 115, 78, 113, 76, 76, + 106, 57, 117, 87, 77, 57, 84, 81, 56, 110, 78, 119, 120, 113, 69, 114, 68, 109, 75, 88, 76, + 65, 78, + ]; + assert_eq!(op_bytes, encoded_bytes); + + let sha256_str = "cg2dfxdh5voabmdjzw2abw3sgvtjymknh2bmpvtwot7t2ih4v7za"; + let _did_plc = "did:plc:cg2dfxdh5voabmdjzw2abw3s"; + + let digest_bytes: Vec = data_encoding::HEXLOWER + .decode(&sha256::digest(op_bytes).as_bytes()) + .expect("SHA-256 digest is always hex string"); + let digest_b32 = data_encoding::BASE32_NOPAD + .encode(&digest_bytes) + .to_ascii_lowercase(); + assert_eq!(digest_b32, sha256_str); +} + +#[test] +fn test_did_plc_examples() { + // https://atproto.com/specs/did-plc + let op = CreateOp { + op_type: "create".to_string(), + signingKey: "did:key:zDnaejYFhgFiVF89LhJ4UipACLKuqo6PteZf8eKDVKeExXUPk".to_string(), + recoveryKey: "did:key:zDnaeSezF2TgCD71b5DiiFyhHQwKAfsBVqTTHRMvP597Z5Ztn".to_string(), + username: "alice.example.com".to_string(), + service: "https://example.com".to_string(), + prev: None, + sig: + "vi6JAl5W4FfyViD5_BKL9p0rbI3MxTWuh0g_egTFAjtf7gwoSfSe1O3qMOEUPX6QH3H0Q9M4y7gOLGblWkEwfQ" + .to_string(), + }; + op.verify_self().unwrap(); + assert_eq!( + &op.did_plc().to_string(), + "did:plc:7iza6de2dwap2sbkpav7c6c6" + ); + + // interacting with PDS / PLC server + let op = CreateOp { + op_type: "create".to_string(), + signingKey: "did:key:zDnaekmbFffmpo7LZ4C7bEFjGKPk11N47kKN8j7jtAcGUabw3".to_string(), + recoveryKey: "did:key:zDnaekmbFffmpo7LZ4C7bEFjGKPk11N47kKN8j7jtAcGUabw3".to_string(), + username: "voltaire.test".to_string(), + service: "http://localhost:2583".to_string(), + prev: None, + sig: + "HNfQUg6SMnYKp1l3LtAIsoAblmi33mYiHE9JH1j7w3B-hd8xWpmCUBUoqKfQXmsAs0K1z8Izt19yYk6PqVFgyg" + .to_string(), + }; + op.verify_self().unwrap(); + assert_eq!( + &op.did_plc().to_string(), + "did:plc:bmrcg7zrxoiw2kiml3tkw2xv" + ); +} + +#[test] +fn test_self_verify() { + let keypair = KeyPair::new_random(); + let op = CreateOp::new( + "dummy-handle".to_string(), + "https://dummy.service".to_string(), + &keypair, + None, + ); + println!("{:?}", op); + op.verify_self().unwrap(); +} + +#[test] +fn test_known_key() { + let keypair = KeyPair::new_random(); + let op = CreateOp::new( + "dummy-handle".to_string(), + "https://dummy.service".to_string(), + &keypair, + None, + ); + println!("{:?}", op); + op.verify_self().unwrap(); +} diff --git a/adenosine/src/lib.rs b/adenosine/src/lib.rs index 461930f..dc4a1b9 100644 --- a/adenosine/src/lib.rs +++ b/adenosine/src/lib.rs @@ -6,7 +6,14 @@ use std::collections::HashMap; use std::str::FromStr; use std::time::Duration; +pub mod car; +pub mod crypto; +pub mod did; pub mod identifiers; +pub mod mst; +pub mod repo; +pub mod ucan_p256; +pub mod vendored; use identifiers::Nsid; static APP_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),); diff --git a/adenosine/src/mst.rs b/adenosine/src/mst.rs new file mode 100644 index 0000000..1d75da1 --- /dev/null +++ b/adenosine/src/mst.rs @@ -0,0 +1,399 @@ +/// This is a simple immutable implemenation of of a Merkle Search Tree (MST) for atproto. +/// +/// Mutations on the data structure are not implemented; instead the entire tree is read into a +/// BTreeMap, that is mutated, and then the entire tree is regenerated. This makes implementation +/// much simpler, at the obvious cost of performance in some situations. +/// +/// The MST is basically a sorted key/value store where the key is a string and the value is a CID. +/// Tree nodes are stored as DAG-CBOG IPLD blocks, with references as CIDs. +/// +/// In the atproto MST implementation, SHA-256 is the hashing algorithm, and "leading zeros" are +/// counted in blocks of 4 bits (so a leading zero byte counts as two zeros). This happens to match +/// simple hex encoding of the SHA-256 hash. +use anyhow::{anyhow, Context, Result}; +use ipfs_sqlite_block_store::BlockStore; +use libipld::cbor::DagCborCodec; +use libipld::multihash::Code; +use libipld::prelude::Codec; +use libipld::store::DefaultParams; +use libipld::Block; +use libipld::{Cid, DagCbor}; +use log::{debug, error, info}; +use std::collections::BTreeMap; +use std::path::PathBuf; + +#[derive(Debug, DagCbor, PartialEq, Eq)] +pub struct CommitNode { + pub root: Cid, + pub sig: Box<[u8]>, +} + +#[derive(Debug, DagCbor, PartialEq, Eq)] +pub struct RootNode { + pub auth_token: Option, + pub prev: Option, + // TODO: not 'metadata'? + pub meta: Cid, + pub data: Cid, +} + +#[derive(Debug, DagCbor, PartialEq, Eq)] +pub struct MetadataNode { + pub datastore: String, // "mst" + pub did: String, + pub version: u8, // 1 +} + +#[derive(Debug, DagCbor, PartialEq, Eq)] +struct MstEntry { + p: u32, + k: String, + v: Cid, + t: Option, +} + +#[derive(Debug, DagCbor, PartialEq)] +struct MstNode { + l: Option, + e: Vec, +} + +struct WipEntry { + height: u8, + key: String, + val: Cid, + right: Option>, +} + +struct WipNode { + height: u8, + left: Option>, + entries: Vec, +} + +fn get_mst_node(db: &mut BlockStore, cid: &Cid) -> Result { + let block = &db + .get_block(cid)? + .ok_or(anyhow!("reading MST node from blockstore"))?; + //println!("{:?}", block); + let mst_node: MstNode = DagCborCodec + .decode(block) + .context("parsing MST DAG-CBOR IPLD node from blockstore")?; + Ok(mst_node) +} + +pub fn print_mst_keys(db: &mut BlockStore, cid: &Cid) -> Result<()> { + let node = get_mst_node(db, cid)?; + if let Some(ref left) = node.l { + print_mst_keys(db, left)?; + } + let mut key: String = "".to_string(); + for entry in node.e.iter() { + key = format!("{}{}", &key[0..entry.p as usize], entry.k); + println!("\t{}\t-> {}", key, entry.v); + if let Some(ref right) = entry.t { + print_mst_keys(db, right)?; + } + } + Ok(()) +} + +pub fn dump_mst_keys(db_path: &PathBuf) -> Result<()> { + let mut db: BlockStore = BlockStore::open(db_path, Default::default())?; + + let all_aliases: Vec<(Vec, Cid)> = db.aliases()?; + if all_aliases.is_empty() { + error!("expected at least one alias in block store"); + std::process::exit(-1); + } + + // print all the aliases + for (alias, commit_cid) in all_aliases.iter() { + let did = String::from_utf8_lossy(alias); + println!("{} -> {}", did, commit_cid); + } + + let (did, commit_cid) = all_aliases[0].clone(); + let did = String::from_utf8_lossy(&did); + info!("starting from {} [{}]", commit_cid, did); + + // NOTE: the faster way to develop would have been to decode to libipld::ipld::Ipld first? meh + + debug!( + "raw commit: {:?}", + &db.get_block(&commit_cid)? + .ok_or(anyhow!("expected commit block in store"))? + ); + let commit: CommitNode = DagCborCodec.decode( + &db.get_block(&commit_cid)? + .ok_or(anyhow!("expected commit block in store"))?, + )?; + debug!("Commit: {:?}", commit); + let root: RootNode = DagCborCodec.decode( + &db.get_block(&commit.root)? + .ok_or(anyhow!("expected root block in store"))?, + )?; + debug!("Root: {:?}", root); + let metadata: MetadataNode = DagCborCodec.decode( + &db.get_block(&root.meta)? + .ok_or(anyhow!("expected metadata block in store"))?, + )?; + debug!("Metadata: {:?}", metadata); + let mst_node: MstNode = DagCborCodec.decode( + &db.get_block(&root.data)? + .ok_or(anyhow!("expected block in store"))?, + )?; + debug!("MST root node: {:?}", mst_node); + debug!("============"); + + println!("{}", did); + print_mst_keys(&mut db, &root.data)?; + Ok(()) +} + +pub fn collect_mst_keys( + db: &mut BlockStore, + cid: &Cid, + map: &mut BTreeMap, +) -> Result<()> { + let node = get_mst_node(db, cid)?; + if let Some(ref left) = node.l { + collect_mst_keys(db, left, map)?; + } + let mut key: String = "".to_string(); + for entry in node.e.iter() { + key = format!("{}{}", &key[0..entry.p as usize], entry.k); + map.insert(key.clone(), entry.v); + if let Some(ref right) = entry.t { + collect_mst_keys(db, right, map)?; + } + } + Ok(()) +} + +fn leading_zeros(key: &str) -> u8 { + let digest = sha256::digest(key); + let digest = digest.as_bytes(); + for (i, c) in digest.iter().enumerate() { + if *c != b'0' { + return i as u8; + } + } + digest.len() as u8 +} + +// # python code to generate test cases +// import hashlib +// seed = b"asdf" +// while True: +// out = hashlib.sha256(seed).hexdigest() +// if out.startswith("00"): +// print(f"{seed} -> {out}") +// seed = b"app.bsky.feed.post/" + out.encode('utf8')[:12] + +#[test] +fn test_leading_zeros() { + assert_eq!(leading_zeros(""), 0); + assert_eq!(leading_zeros("asdf"), 0); + assert_eq!(leading_zeros("2653ae71"), 0); + assert_eq!(leading_zeros("88bfafc7"), 1); + assert_eq!(leading_zeros("2a92d355"), 2); + assert_eq!(leading_zeros("884976f5"), 3); + assert_eq!(leading_zeros("app.bsky.feed.post/454397e440ec"), 2); + assert_eq!(leading_zeros("app.bsky.feed.post/9adeb165882c"), 4); +} + +pub fn generate_mst( + db: &mut BlockStore, + map: &BTreeMap, +) -> Result { + // construct a "WIP" tree + let mut root: Option = None; + for (key, val) in map { + let height = leading_zeros(key); + let entry = WipEntry { + height, + key: key.clone(), + val: *val, + right: None, + }; + if let Some(node) = root { + root = Some(insert_entry(node, entry)); + } else { + root = Some(WipNode { + height: entry.height, + left: None, + entries: vec![entry], + }); + } + } + let empty_node = WipNode { + height: 0, + left: None, + entries: vec![], + }; + serialize_wip_tree(db, root.unwrap_or(empty_node)) +} + +// this routine assumes that entries are added in sorted key order. AKA, the `entry` being added is +// "further right" in the tree than any existing entries +fn insert_entry(mut node: WipNode, entry: WipEntry) -> WipNode { + // if we are higher on tree than existing node, replace it (recursively) with new layers first + while entry.height > node.height { + node = WipNode { + height: node.height + 1, + left: Some(Box::new(node)), + entries: vec![], + } + } + // if we are lower on tree, then need to descend first + if entry.height < node.height { + // if no entries at this node, then we should insert down "left" (which is just "down", not + // "before" any entries) + if node.entries.is_empty() { + if let Some(left) = node.left { + node.left = Some(Box::new(insert_entry(*left, entry))); + return node; + } else { + panic!("hit existing totally empty MST node"); + } + } + let mut last = node.entries.pop().expect("hit empty existing entry list"); + assert!(entry.key > last.key); + if last.right.is_some() { + last.right = Some(Box::new(insert_entry(*last.right.unwrap(), entry))); + } else { + let mut new_node = WipNode { + height: entry.height, + left: None, + entries: vec![entry], + }; + // may need to (recursively) insert multiple filler layers + while new_node.height + 1 < node.height { + new_node = WipNode { + height: new_node.height + 1, + left: Some(Box::new(new_node)), + entries: vec![], + } + } + last.right = Some(Box::new(new_node)); + } + node.entries.push(last); + return node; + } + // same height, simply append to end (but verify first) + assert!(node.height == entry.height); + if !node.entries.is_empty() { + let last = &node.entries.last().unwrap(); + assert!(entry.key > last.key); + } + node.entries.push(entry); + node +} + +/// returns the length of common characters between the two strings. Strings must be simple ASCII, +/// which should hold for current ATP MST keys (collection plus TID) +fn common_prefix_len(a: &str, b: &str) -> usize { + let a = a.as_bytes(); + let b = b.as_bytes(); + for i in 0..std::cmp::min(a.len(), b.len()) { + if a[i] != b[i] { + return i; + } + } + // strings are the same, up to common length + std::cmp::min(a.len(), b.len()) +} + +#[test] +fn test_common_prefix_len() { + assert_eq!(common_prefix_len("abc", "abc"), 3); + assert_eq!(common_prefix_len("", "abc"), 0); + assert_eq!(common_prefix_len("abc", ""), 0); + assert_eq!(common_prefix_len("ab", "abc"), 2); + assert_eq!(common_prefix_len("abc", "ab"), 2); + assert_eq!(common_prefix_len("abcde", "abc"), 3); + assert_eq!(common_prefix_len("abc", "abcde"), 3); + assert_eq!(common_prefix_len("abcde", "abc1"), 3); + assert_eq!(common_prefix_len("abcde", "abb"), 2); + assert_eq!(common_prefix_len("abcde", "qbb"), 0); + assert_eq!(common_prefix_len("abc", "abc\x00"), 3); + assert_eq!(common_prefix_len("abc\x00", "abc"), 3); +} + +#[test] +fn test_common_prefix_len_wide() { + // TODO: these are not cross-language consistent! + assert_eq!("jalapeño".len(), 9); // 8 in javascript + assert_eq!("💩".len(), 4); // 2 in javascript + assert_eq!("👩‍👧‍👧".len(), 18); // 8 in javascript + + // many of the below are different in JS; in Rust we *must* cast down to bytes to count + assert_eq!(common_prefix_len("jalapeño", "jalapeno"), 6); + assert_eq!(common_prefix_len("jalapeñoA", "jalapeñoB"), 9); + assert_eq!(common_prefix_len("coöperative", "coüperative"), 3); + assert_eq!(common_prefix_len("abc💩abc", "abcabc"), 3); + assert_eq!(common_prefix_len("💩abc", "💩ab"), 6); + assert_eq!(common_prefix_len("abc👩‍👦‍👦de", "abc👩‍👧‍👧de"), 13); +} + +fn serialize_wip_tree( + db: &mut BlockStore, + wip_node: WipNode, +) -> Result { + let left: Option = if let Some(left) = wip_node.left { + Some(serialize_wip_tree(db, *left)?) + } else { + None + }; + + let mut entries: Vec = vec![]; + let mut last_key = "".to_string(); + for wip_entry in wip_node.entries { + let right: Option = if let Some(right) = wip_entry.right { + Some(serialize_wip_tree(db, *right)?) + } else { + None + }; + let prefix_len = common_prefix_len(&last_key, &wip_entry.key); + entries.push(MstEntry { + k: wip_entry.key[prefix_len..].to_string(), + p: prefix_len as u32, + v: wip_entry.val, + t: right, + }); + last_key = wip_entry.key; + } + let mst_node = MstNode { + l: left, + e: entries, + }; + let block = Block::::encode(DagCborCodec, Code::Sha2_256, &mst_node)?; + let cid = *block.cid(); + db.put_block(block, None)?; + Ok(cid) +} + +#[test] +fn test_mst_node_cbor() { + use std::str::FromStr; + let cid1 = + Cid::from_str("bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454").unwrap(); + let node = MstNode { + l: None, + e: vec![MstEntry { + k: "asdf".to_string(), + p: 0, + v: cid1, + t: None, + }], + }; + let block = Block::::encode(DagCborCodec, Code::Sha2_256, &node).unwrap(); + println!("{:?}", block); + //assert_eq!(1, 2); + let cid = *block.cid(); + assert_eq!( + cid.to_string(), + "bafyreidaftbr35xhh4lzmv5jcoeufqjh75ohzmz6u56v7n2ippbtxdgqqe" + ); +} diff --git a/adenosine/src/repo.rs b/adenosine/src/repo.rs new file mode 100644 index 0000000..2383e52 --- /dev/null +++ b/adenosine/src/repo.rs @@ -0,0 +1,466 @@ +use crate::car::{ + load_car_bytes_to_blockstore, load_car_path_to_blockstore, read_car_bytes_from_blockstore, +}; +use crate::crypto::KeyPair; +use crate::identifiers::{Did, Nsid, Tid}; +use crate::mst::{collect_mst_keys, generate_mst, CommitNode, MetadataNode, RootNode}; +use anyhow::{anyhow, ensure, Context, Result}; +use ipfs_sqlite_block_store::BlockStore; +use libipld::cbor::DagCborCodec; +use libipld::multihash::Code; +use libipld::prelude::Codec; +use libipld::store::DefaultParams; +use libipld::{Block, Cid, Ipld}; +use serde_json::{json, Value}; +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::collections::HashSet; +use std::path::PathBuf; +use std::str::FromStr; + +#[derive(Debug, serde::Serialize)] +pub struct RepoCommit { + pub sig: Box<[u8]>, + pub commit_cid: Cid, + pub root_cid: Cid, + pub did: Did, + pub prev: Option, + pub meta_cid: Cid, + pub mst_cid: Cid, +} + +impl RepoCommit { + /// Returns a JSON object version of this struct, with CIDs and signatures in expected format + /// (aka, CID as a string, not an array of bytes). + pub fn to_pretty_json(&self) -> Value { + json!({ + "sig": data_encoding::HEXUPPER.encode(&self.sig), + "commit_cid": self.commit_cid.to_string(), + "root_cid": self.root_cid.to_string(), + "did": self.did.to_string(), + "prev": self.prev.map(|v| v.to_string()), + "meta_cid": self.meta_cid.to_string(), + "mst_cid": self.mst_cid.to_string(), + }) + } +} + +pub struct RepoStore { + // TODO: only public for test/debug; should wrap instead + pub db: BlockStore, +} + +pub enum Mutation { + Create(Nsid, Tid, Ipld), + Update(Nsid, Tid, Ipld), + Delete(Nsid, Tid), +} + +impl RepoStore { + pub fn open(db_path: &PathBuf) -> Result { + Ok(RepoStore { + db: BlockStore::open(db_path, Default::default())?, + }) + } + + pub fn open_ephemeral() -> Result { + Ok(RepoStore { + db: BlockStore::open_path(ipfs_sqlite_block_store::DbPath::Memory, Default::default())?, + }) + } + + pub fn new_connection(&mut self) -> Result { + Ok(RepoStore { + db: self.db.additional_connection()?, + }) + } + + pub fn get_ipld(&mut self, cid: &Cid) -> Result { + if let Some(b) = self.db.get_block(cid)? { + let block: Block = Block::new(*cid, b)?; + block.ipld() + } else { + Err(anyhow!("missing IPLD CID: {}", cid)) + } + } + + pub fn get_blob(&mut self, cid: &Cid) -> Result>> { + Ok(self.db.get_block(cid)?) + } + + /// Returns CID that was inserted + pub fn put_ipld>(&mut self, record: &S) -> Result { + let block = Block::::encode(DagCborCodec, Code::Sha2_256, record)?; + let cid = *block.cid(); + self.db + .put_block(block, None) + .context("writing IPLD DAG-CBOR record to blockstore")?; + Ok(cid) + } + + /// Returns CID that was inserted + pub fn put_blob(&mut self, data: &[u8]) -> Result { + let block = Block::::encode(libipld::raw::RawCodec, Code::Sha2_256, data)?; + let cid = *block.cid(); + self.db + .put_block(block, None) + .context("writing non-record blob to blockstore")?; + Ok(cid) + } + + /// Quick alias lookup + pub fn lookup_commit(&mut self, did: &Did) -> Result> { + Ok(self.db.resolve(Cow::from(did.as_bytes()))?) + } + + pub fn get_commit(&mut self, commit_cid: &Cid) -> Result { + // read records by CID: commit, root, meta + let commit_node: CommitNode = DagCborCodec + .decode( + &self + .db + .get_block(commit_cid)? + .ok_or(anyhow!("expected commit block in store"))?, + ) + .context("parsing commit IPLD node from blockstore")?; + let root_node: RootNode = DagCborCodec + .decode( + &self + .db + .get_block(&commit_node.root)? + .ok_or(anyhow!("expected root block in store"))?, + ) + .context("parsing root IPLD node from blockstore")?; + let metadata_node: MetadataNode = DagCborCodec + .decode( + &self + .db + .get_block(&root_node.meta)? + .ok_or(anyhow!("expected metadata block in store"))?, + ) + .context("parsing metadata IPLD node from blockstore")?; + ensure!( + metadata_node.datastore == "mst", + "unexpected repo metadata.datastore: {}", + metadata_node.datastore + ); + ensure!( + metadata_node.version == 1, + "unexpected repo metadata.version: {}", + metadata_node.version + ); + Ok(RepoCommit { + sig: commit_node.sig, + commit_cid: *commit_cid, + root_cid: commit_node.root, + meta_cid: root_node.meta, + did: Did::from_str(&metadata_node.did)?, + prev: root_node.prev, + mst_cid: root_node.data, + }) + } + + pub fn get_mst_record_by_key(&mut self, mst_cid: &Cid, key: &str) -> Result> { + let map = self.mst_to_map(mst_cid)?; + if let Some(cid) = map.get(key) { + self.get_ipld(cid).map(Some) + } else { + Ok(None) + } + } + + pub fn collections(&mut self, did: &Did) -> Result> { + let commit = if let Some(c) = self.lookup_commit(did)? { + self.get_commit(&c)? + } else { + return Err(anyhow!("DID not found in repositories: {}", did)); + }; + let map = self.mst_to_map(&commit.mst_cid)?; + let mut collections: HashSet = Default::default(); + // XXX: confirm that keys actually start with leading slash + for k in map.keys() { + let coll = k.split('/').nth(1).unwrap(); + collections.insert(coll.to_string()); + } + Ok(collections.into_iter().collect()) + } + + pub fn get_atp_record( + &mut self, + did: &Did, + collection: &Nsid, + tid: &Tid, + ) -> Result> { + let commit = if let Some(c) = self.lookup_commit(did)? { + self.get_commit(&c)? + } else { + return Ok(None); + }; + let record_key = format!("/{}/{}", collection, tid); + self.get_mst_record_by_key(&commit.mst_cid, &record_key) + } + + pub fn write_metadata(&mut self, did: &Did) -> Result { + self.put_ipld(&MetadataNode { + datastore: "mst".to_string(), + did: did.to_string(), + version: 1, + }) + } + + pub fn write_root(&mut self, meta_cid: Cid, prev: Option, mst_cid: Cid) -> Result { + self.put_ipld(&RootNode { + auth_token: None, + prev, + meta: meta_cid, + data: mst_cid, + }) + } + + pub fn write_commit(&mut self, did: &Did, root_cid: Cid, sig: &str) -> Result { + let commit_cid = self.put_ipld(&CommitNode { + root: root_cid, + sig: sig.as_bytes().to_vec().into_boxed_slice(), + })?; + self.db.alias(did.as_bytes().to_vec(), Some(&commit_cid))?; + Ok(commit_cid) + } + + pub fn mst_from_map(&mut self, map: &BTreeMap) -> Result { + let mst_cid = generate_mst(&mut self.db, map)?; + Ok(mst_cid) + } + + pub fn mst_to_map(&mut self, mst_cid: &Cid) -> Result> { + let mut cid_map: BTreeMap = Default::default(); + collect_mst_keys(&mut self.db, mst_cid, &mut cid_map) + .context("reading repo MST from blockstore")?; + Ok(cid_map) + } + + pub fn update_mst(&mut self, mst_cid: &Cid, mutations: &[Mutation]) -> Result { + let mut cid_map = self.mst_to_map(mst_cid)?; + for m in mutations.iter() { + match m { + Mutation::Create(collection, tid, val) => { + let cid = self.put_ipld(val)?; + cid_map.insert(format!("/{}/{}", collection, tid), cid); + } + Mutation::Update(collection, tid, val) => { + let cid = self.put_ipld(val)?; + cid_map.insert(format!("/{}/{}", collection, tid), cid); + } + Mutation::Delete(collection, tid) => { + cid_map.remove(&format!("/{}/{}", collection, tid)); + } + } + } + let mst_cid = generate_mst(&mut self.db, &cid_map)?; + Ok(mst_cid) + } + + /// High-level helper to write a batch of mutations to the repo corresponding to the DID, and + /// signing the resulting new root CID with the given keypair. + pub fn mutate_repo( + &mut self, + did: &Did, + mutations: &[Mutation], + signing_key: &KeyPair, + ) -> Result { + let commit_cid = self.lookup_commit(did)?.unwrap(); + let last_commit = self.get_commit(&commit_cid)?; + let new_mst_cid = self + .update_mst(&last_commit.mst_cid, mutations) + .context("updating MST in repo")?; + let new_root_cid = self.write_root( + last_commit.meta_cid, + Some(last_commit.commit_cid), + new_mst_cid, + )?; + // TODO: is this how signatures are supposed to work? + let sig = signing_key.sign_bytes(new_root_cid.to_string().as_bytes()); + self.write_commit(did, new_root_cid, &sig) + } + + /// Reads in a full MST tree starting at a repo commit, then re-builds and re-writes the tree + /// in to the repo, and verifies that both the MST root CIDs and the repo root CIDs are identical. + pub fn verify_repo_mst(&mut self, commit_cid: &Cid) -> Result<()> { + // load existing commit and MST tree + let existing_commit = self.get_commit(commit_cid)?; + let repo_map = self.mst_to_map(&existing_commit.mst_cid)?; + + // write MST tree, and verify root CID + let new_mst_cid = self.mst_from_map(&repo_map)?; + if new_mst_cid != existing_commit.mst_cid { + Err(anyhow!( + "MST root CID did not verify: {} != {}", + existing_commit.mst_cid, + new_mst_cid + ))?; + } + + let new_root_cid = + self.write_root(existing_commit.meta_cid, existing_commit.prev, new_mst_cid)?; + if new_root_cid != existing_commit.root_cid { + Err(anyhow!( + "repo root CID did not verify: {} != {}", + existing_commit.root_cid, + new_root_cid + ))?; + } + + Ok(()) + } + + /// Import blocks from a CAR file in memory, optionally setting an alias pointing to the input + /// (eg, a DID identifier). + /// + /// Does not currently do any validation of, eg, signatures. It is naive and incomplete to use + /// this to simply import CAR content from users, remote servers, etc. + /// + /// Returns the root commit from the CAR file, which may or may not actually be a "commit" + /// block. + pub fn import_car_bytes(&mut self, car_bytes: &[u8], alias: Option) -> Result { + let cid = load_car_bytes_to_blockstore(&mut self.db, car_bytes)?; + self.verify_repo_mst(&cid)?; + if let Some(alias) = alias { + self.db.alias(alias.as_bytes().to_vec(), Some(&cid))?; + } + Ok(cid) + } + + /// Similar to import_car_bytes(), but reads from a local file on disk instead of from memory. + pub fn import_car_path(&mut self, car_path: &PathBuf, alias: Option) -> Result { + let cid = load_car_path_to_blockstore(&mut self.db, car_path)?; + self.verify_repo_mst(&cid)?; + if let Some(alias) = alias { + self.db.alias(alias.as_bytes().to_vec(), Some(&cid))?; + } + Ok(cid) + } + + /// Exports in CAR format to a Writer + /// + /// The "from" commit CID feature is not implemented. + pub fn export_car( + &mut self, + commit_cid: &Cid, + _from_commit_cid: Option<&Cid>, + ) -> Result> { + // TODO: from_commit_cid + read_car_bytes_from_blockstore(&mut self.db, commit_cid) + } +} + +#[test] +fn test_repo_mst() { + use libipld::ipld; + + let mut repo = RepoStore::open_ephemeral().unwrap(); + let did = Did::from_str("did:plc:dummy").unwrap(); + + // basic blob and IPLD record put/get + let blob = b"beware the swamp thing"; + let blob_cid = repo.put_blob(blob).unwrap(); + + let record = ipld!({"some-thing": 123}); + let record_cid = repo.put_ipld(&record).unwrap(); + + repo.get_blob(&blob_cid).unwrap().unwrap(); + repo.get_ipld(&record_cid).unwrap(); + + // basic MST get/put + let mut map: BTreeMap = Default::default(); + let empty_map_cid = repo.mst_from_map(&map).unwrap(); + assert_eq!(map, repo.mst_to_map(&empty_map_cid).unwrap()); + assert!(repo + .get_mst_record_by_key(&empty_map_cid, "/test.records/44444444444444") + .unwrap() + .is_none()); + + map.insert("/blobs/1".to_string(), blob_cid.clone()); + map.insert("/blobs/2".to_string(), blob_cid.clone()); + map.insert( + "/test.records/44444444444444".to_string(), + record_cid.clone(), + ); + map.insert( + "/test.records/22222222222222".to_string(), + record_cid.clone(), + ); + let simple_map_cid = repo.mst_from_map(&map).unwrap(); + assert_eq!(map, repo.mst_to_map(&simple_map_cid).unwrap()); + + // create root and commit IPLD nodes + let meta_cid = repo.write_metadata(&did).unwrap(); + let simple_root_cid = repo.write_root(meta_cid, None, simple_map_cid).unwrap(); + let simple_commit_cid = repo + .write_commit(&did, simple_root_cid, "dummy-sig") + .unwrap(); + assert_eq!( + Some(record.clone()), + repo.get_mst_record_by_key(&simple_map_cid, "/test.records/44444444444444") + .unwrap() + ); + assert_eq!( + Some(record.clone()), + repo.get_atp_record( + &did, + &Nsid::from_str("test.records").unwrap(), + &Tid::from_str("44444444444444").unwrap() + ) + .unwrap() + ); + assert!(repo + .get_mst_record_by_key(&simple_map_cid, "/test.records/33333333333333") + .unwrap() + .is_none()); + assert!(repo + .get_atp_record( + &did, + &Nsid::from_str("test.records").unwrap(), + &Tid::from_str("33333333333333").unwrap() + ) + .unwrap() + .is_none()); + assert_eq!( + Some(simple_commit_cid.clone()), + repo.lookup_commit(&did).unwrap() + ); + + map.insert( + "/test.records/33333333333333".to_string(), + record_cid.clone(), + ); + let simple3_map_cid = repo.mst_from_map(&map).unwrap(); + let simple3_root_cid = repo + .write_root(meta_cid, Some(simple_commit_cid), simple3_map_cid) + .unwrap(); + let simple3_commit_cid = repo + .write_commit(&did, simple3_root_cid, "dummy-sig3") + .unwrap(); + assert_eq!(map, repo.mst_to_map(&simple3_map_cid).unwrap()); + assert_eq!( + Some(record.clone()), + repo.get_mst_record_by_key(&simple3_map_cid, "/test.records/33333333333333") + .unwrap() + ); + assert_eq!( + Some(record.clone()), + repo.get_atp_record( + &did, + &Nsid::from_str("test.records").unwrap(), + &Tid::from_str("33333333333333").unwrap() + ) + .unwrap() + ); + let commit = repo.get_commit(&simple3_commit_cid).unwrap(); + assert_eq!(commit.sig.to_vec(), b"dummy-sig3".to_vec()); + assert_eq!(commit.did, did); + assert_eq!(commit.prev, Some(simple_commit_cid)); + assert_eq!(commit.mst_cid, simple3_map_cid); + assert_eq!( + Some(simple3_commit_cid.clone()), + repo.lookup_commit(&did).unwrap() + ); +} diff --git a/adenosine/src/ucan_p256.rs b/adenosine/src/ucan_p256.rs new file mode 100644 index 0000000..b8b6cd2 --- /dev/null +++ b/adenosine/src/ucan_p256.rs @@ -0,0 +1,85 @@ +/// Implement UCAN KeyMaterial trait for p256 +/// +/// This is needed because the 'ucan-key-support' crate does not include support for this key type. +use anyhow::{anyhow, Result}; +use async_trait::async_trait; + +use p256::ecdsa::signature::{Signer, Verifier}; +use p256::ecdsa::{Signature, SigningKey as P256PrivateKey, VerifyingKey as P256PublicKey}; + +use ucan::crypto::KeyMaterial; + +pub use ucan::crypto::{did::P256_MAGIC_BYTES, JwtSignatureAlgorithm}; + +#[derive(Clone)] +pub struct P256KeyMaterial(pub P256PublicKey, pub Option); + +#[cfg_attr(target_arch="wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +impl KeyMaterial for P256KeyMaterial { + fn get_jwt_algorithm_name(&self) -> String { + JwtSignatureAlgorithm::ES256.to_string() + } + + async fn get_did(&self) -> Result { + let bytes = [P256_MAGIC_BYTES, &self.0.to_encoded_point(true).to_bytes()].concat(); + Ok(format!("did:key:z{}", bs58::encode(bytes).into_string())) + } + + async fn sign(&self, payload: &[u8]) -> Result> { + match self.1 { + Some(ref private_key) => { + let signature = private_key.sign(payload); + Ok(signature.to_vec()) + } + None => Err(anyhow!("No private key; cannot sign data")), + } + } + + async fn verify(&self, payload: &[u8], signature: &[u8]) -> Result<()> { + let signature = Signature::try_from(signature)?; + self.0 + .verify(payload, &signature) + .map_err(|error| anyhow!("Could not verify signature: {:?}", error)) + } +} + +#[cfg(test)] +mod tests { + use super::{P256KeyMaterial, Result, P256_MAGIC_BYTES}; + use p256::ecdsa::{SigningKey as P256PrivateKey, VerifyingKey as P256PublicKey}; + use ucan::{ + builder::UcanBuilder, + crypto::{did::DidParser, KeyMaterial}, + ucan::Ucan, + }; + + pub fn bytes_to_p256_key(bytes: Vec) -> Result> { + let public_key = P256PublicKey::try_from(bytes.as_slice())?; + Ok(Box::new(P256KeyMaterial(public_key, None))) + } + + #[cfg_attr(not(target_arch = "wasm32"), tokio::test)] + async fn it_can_sign_and_verify_a_ucan() { + let private_key = P256PrivateKey::random(&mut p256::elliptic_curve::rand_core::OsRng); + let public_key = P256PublicKey::from(&private_key); + + let key_material = P256KeyMaterial(public_key, Some(private_key)); + let token_string = UcanBuilder::default() + .issued_by(&key_material) + .for_audience(key_material.get_did().await.unwrap().as_str()) + .with_lifetime(60) + .build() + .unwrap() + .sign() + .await + .unwrap() + .encode() + .unwrap(); + + let mut did_parser = DidParser::new(&[(P256_MAGIC_BYTES, bytes_to_p256_key)]); + + let ucan = Ucan::try_from_token_string(&token_string).unwrap(); + ucan.check_signature(&mut did_parser).await.unwrap(); + } +} diff --git a/adenosine/src/vendored.rs b/adenosine/src/vendored.rs new file mode 100644 index 0000000..74584ad --- /dev/null +++ b/adenosine/src/vendored.rs @@ -0,0 +1 @@ +pub mod iroh_car; diff --git a/adenosine/src/vendored/iroh_car/README.md b/adenosine/src/vendored/iroh_car/README.md new file mode 100644 index 0000000..0cad81b --- /dev/null +++ b/adenosine/src/vendored/iroh_car/README.md @@ -0,0 +1,27 @@ +# iroh-car + +[CAR file](https://ipld.io/specs/transport/car/) support for iroh. "CAR" stands +for Content Addressable aRchives. A CAR file typically contains a serialized +representation of an [IPLD +DAG](https://docs.ipfs.tech/concepts/merkle-dag/#merkle-directed-acyclic-graphs-dags), +though is general enough to contain arbitrary IPLD blocks. + +Currently supports only [v1](https://ipld.io/specs/transport/car/carv1/). + +It is part of [iroh](https://github.com/n0-computer/iroh). + +## License + + +Licensed under either of Apache License, Version +2.0 or MIT license at your option. + + +
+ + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in this crate by you, as defined in the Apache-2.0 license, shall +be dual licensed as above, without any additional terms or conditions. + + diff --git a/adenosine/src/vendored/iroh_car/error.rs b/adenosine/src/vendored/iroh_car/error.rs new file mode 100644 index 0000000..1edcefe --- /dev/null +++ b/adenosine/src/vendored/iroh_car/error.rs @@ -0,0 +1,29 @@ +use libipld::cid; +use thiserror::Error; + +/// Car utility error +#[derive(Debug, Error)] +pub enum Error { + #[error("Failed to parse CAR file: {0}")] + Parsing(String), + #[error("Invalid CAR file: {0}")] + InvalidFile(String), + #[error("Io error: {0}")] + Io(#[from] std::io::Error), + #[error("Cbor encoding error: {0}")] + Cbor(#[from] libipld::error::Error), + #[error("ld read too large {0}")] + LdReadTooLarge(usize), +} + +impl From for Error { + fn from(err: cid::Error) -> Error { + Error::Parsing(err.to_string()) + } +} + +impl From for Error { + fn from(err: cid::multihash::Error) -> Error { + Error::Parsing(err.to_string()) + } +} diff --git a/adenosine/src/vendored/iroh_car/header.rs b/adenosine/src/vendored/iroh_car/header.rs new file mode 100644 index 0000000..cd0feb7 --- /dev/null +++ b/adenosine/src/vendored/iroh_car/header.rs @@ -0,0 +1,107 @@ +#![allow(unused)] + +use libipld::cbor::DagCborCodec; +use libipld::codec::Codec; +use libipld::ipld; +use libipld::Cid; + +use super::error::Error; + +/// A car header. +#[derive(Debug, Clone, PartialEq, Eq)] +#[non_exhaustive] +pub enum CarHeader { + V1(CarHeaderV1), +} + +impl CarHeader { + pub fn new_v1(roots: Vec) -> Self { + Self::V1(roots.into()) + } + + pub fn decode(buffer: &[u8]) -> Result { + let header: CarHeaderV1 = DagCborCodec + .decode(buffer) + .map_err(|e| Error::Parsing(e.to_string()))?; + + if header.roots.is_empty() { + return Err(Error::Parsing("empty CAR file".to_owned())); + } + + if header.version != 1 { + return Err(Error::InvalidFile( + "Only CAR file version 1 is supported".to_string(), + )); + } + + Ok(CarHeader::V1(header)) + } + + pub fn encode(&self) -> Result, Error> { + match self { + CarHeader::V1(ref header) => { + let res = DagCborCodec.encode(header)?; + Ok(res) + } + } + } + + pub fn roots(&self) -> &[Cid] { + match self { + CarHeader::V1(header) => &header.roots, + } + } + + pub fn version(&self) -> u64 { + match self { + CarHeader::V1(_) => 1, + } + } +} + +/// CAR file header version 1. +#[derive(Debug, Clone, Default, libipld::DagCbor, PartialEq, Eq)] +pub struct CarHeaderV1 { + #[ipld] + pub roots: Vec, + #[ipld] + pub version: u64, +} + +impl CarHeaderV1 { + /// Creates a new CAR file header + pub fn new(roots: Vec, version: u64) -> Self { + Self { roots, version } + } +} + +impl From> for CarHeaderV1 { + fn from(roots: Vec) -> Self { + Self { roots, version: 1 } + } +} + +#[cfg(test)] +mod tests { + use libipld::cbor::DagCborCodec; + use libipld::codec::{Decode, Encode}; + use multihash::MultihashDigest; + + use super::*; + + #[test] + fn symmetric_header_v1() { + let digest = multihash::Code::Blake2b256.digest(b"test"); + let cid = Cid::new_v1(DagCborCodec.into(), digest); + + let header = CarHeaderV1::from(vec![cid]); + + let mut bytes = Vec::new(); + header.encode(DagCborCodec, &mut bytes).unwrap(); + + assert_eq!( + CarHeaderV1::decode(DagCborCodec, &mut std::io::Cursor::new(&bytes)).unwrap(), + header + ); + } +} diff --git a/adenosine/src/vendored/iroh_car/lib.rs b/adenosine/src/vendored/iroh_car/lib.rs new file mode 100644 index 0000000..d4e5f66 --- /dev/null +++ b/adenosine/src/vendored/iroh_car/lib.rs @@ -0,0 +1,11 @@ +//! Implementation of the [car](https://ipld.io/specs/transport/car/) format. + +mod error; +mod header; +mod reader; +mod util; +mod writer; + +pub use crate::header::CarHeader; +pub use crate::reader::CarReader; +pub use crate::writer::CarWriter; diff --git a/adenosine/src/vendored/iroh_car/mod.rs b/adenosine/src/vendored/iroh_car/mod.rs new file mode 100644 index 0000000..b40e046 --- /dev/null +++ b/adenosine/src/vendored/iroh_car/mod.rs @@ -0,0 +1,10 @@ +/// Module version of lib.rs +mod error; +mod header; +mod reader; +mod util; +mod writer; + +pub use header::CarHeader; +pub use reader::CarReader; +pub use writer::CarWriter; diff --git a/adenosine/src/vendored/iroh_car/reader.rs b/adenosine/src/vendored/iroh_car/reader.rs new file mode 100644 index 0000000..90313f5 --- /dev/null +++ b/adenosine/src/vendored/iroh_car/reader.rs @@ -0,0 +1,101 @@ +#![allow(unused)] + +use futures::Stream; +use libipld::Cid; +use tokio::io::AsyncRead; + +use super::{ + error::Error, + header::CarHeader, + util::{ld_read, read_node}, +}; + +/// Reads CAR files that are in a BufReader +pub struct CarReader { + reader: R, + header: CarHeader, + buffer: Vec, +} + +impl CarReader +where + R: AsyncRead + Send + Unpin, +{ + /// Creates a new CarReader and parses the CarHeader + pub async fn new(mut reader: R) -> Result { + let mut buffer = Vec::new(); + + match ld_read(&mut reader, &mut buffer).await? { + Some(buf) => { + let header = CarHeader::decode(buf)?; + + Ok(CarReader { + reader, + header, + buffer, + }) + } + None => Err(Error::Parsing( + "failed to parse uvarint for header".to_string(), + )), + } + } + + /// Returns the header of this car file. + pub fn header(&self) -> &CarHeader { + &self.header + } + + /// Returns the next IPLD Block in the buffer + pub async fn next_block(&mut self) -> Result)>, Error> { + read_node(&mut self.reader, &mut self.buffer).await + } + + pub fn stream(self) -> impl Stream), Error>> { + futures::stream::try_unfold(self, |mut this| async move { + let maybe_block = read_node(&mut this.reader, &mut this.buffer).await?; + Ok(maybe_block.map(|b| (b, this))) + }) + } +} + +#[cfg(test)] +mod tests { + use std::io::Cursor; + + use futures::TryStreamExt; + use libipld::cbor::DagCborCodec; + use libipld::Cid; + use multihash::MultihashDigest; + + use super::super::{header::CarHeaderV1, writer::CarWriter}; + + use super::*; + + #[tokio::test] + async fn car_write_read() { + let digest_test = multihash::Code::Blake2b256.digest(b"test"); + let cid_test = Cid::new_v1(DagCborCodec.into(), digest_test); + + let digest_foo = multihash::Code::Blake2b256.digest(b"foo"); + let cid_foo = Cid::new_v1(DagCborCodec.into(), digest_foo); + + let header = CarHeader::V1(CarHeaderV1::from(vec![cid_foo])); + + let mut buffer = Vec::new(); + let mut writer = CarWriter::new(header, &mut buffer); + writer.write(cid_test, b"test").await.unwrap(); + writer.write(cid_foo, b"foo").await.unwrap(); + writer.finish().await.unwrap(); + + let reader = Cursor::new(&buffer); + let car_reader = CarReader::new(reader).await.unwrap(); + let files: Vec<_> = car_reader.stream().try_collect().await.unwrap(); + + assert_eq!(files.len(), 2); + assert_eq!(files[0].0, cid_test); + assert_eq!(files[0].1, b"test"); + assert_eq!(files[1].0, cid_foo); + assert_eq!(files[1].1, b"foo"); + } +} diff --git a/adenosine/src/vendored/iroh_car/util.rs b/adenosine/src/vendored/iroh_car/util.rs new file mode 100644 index 0000000..90435b1 --- /dev/null +++ b/adenosine/src/vendored/iroh_car/util.rs @@ -0,0 +1,95 @@ +use integer_encoding::VarIntAsyncReader; +use libipld::Cid; +use tokio::io::{AsyncRead, AsyncReadExt}; + +use super::error::Error; + +/// Maximum size that is used for single node. +pub(crate) const MAX_ALLOC: usize = 4 * 1024 * 1024; + +pub(crate) async fn ld_read(mut reader: R, buf: &mut Vec) -> Result, Error> +where + R: AsyncRead + Send + Unpin, +{ + let length: usize = match VarIntAsyncReader::read_varint_async(&mut reader).await { + Ok(len) => len, + Err(e) => { + if e.kind() == std::io::ErrorKind::UnexpectedEof { + return Ok(None); + } + return Err(Error::Parsing(e.to_string())); + } + }; + + if length > MAX_ALLOC { + return Err(Error::LdReadTooLarge(length)); + } + if length > buf.len() { + buf.resize(length, 0); + } + + reader + .read_exact(&mut buf[..length]) + .await + .map_err(|e| Error::Parsing(e.to_string()))?; + + Ok(Some(&buf[..length])) +} + +pub(crate) async fn read_node( + buf_reader: &mut R, + buf: &mut Vec, +) -> Result)>, Error> +where + R: AsyncRead + Send + Unpin, +{ + if let Some(buf) = ld_read(buf_reader, buf).await? { + let mut cursor = std::io::Cursor::new(buf); + let c = Cid::read_bytes(&mut cursor)?; + let pos = cursor.position() as usize; + + return Ok(Some((c, buf[pos..].to_vec()))); + } + Ok(None) +} + +#[cfg(test)] +mod tests { + use integer_encoding::VarIntAsyncWriter; + use tokio::io::{AsyncWrite, AsyncWriteExt}; + + use super::*; + + async fn ld_write<'a, W>(writer: &mut W, bytes: &[u8]) -> Result<(), Error> + where + W: AsyncWrite + Send + Unpin, + { + writer.write_varint_async(bytes.len()).await?; + writer.write_all(bytes).await?; + writer.flush().await?; + Ok(()) + } + + #[tokio::test] + async fn ld_read_write_good() { + let mut buffer = Vec::::new(); + ld_write(&mut buffer, b"test bytes").await.unwrap(); + let reader = std::io::Cursor::new(buffer); + + let mut buffer = vec![1u8; 1024]; + let read = ld_read(reader, &mut buffer).await.unwrap().unwrap(); + assert_eq!(read, b"test bytes"); + } + + #[tokio::test] + async fn ld_read_write_fail() { + let mut buffer = Vec::::new(); + let size = MAX_ALLOC + 1; + ld_write(&mut buffer, &vec![2u8; size]).await.unwrap(); + let reader = std::io::Cursor::new(buffer); + + let mut buffer = vec![1u8; 1024]; + let read = ld_read(reader, &mut buffer).await; + assert!(matches!(read, Err(Error::LdReadTooLarge(_)))); + } +} diff --git a/adenosine/src/vendored/iroh_car/writer.rs b/adenosine/src/vendored/iroh_car/writer.rs new file mode 100644 index 0000000..b7e25d3 --- /dev/null +++ b/adenosine/src/vendored/iroh_car/writer.rs @@ -0,0 +1,73 @@ +#![allow(unused)] + +use integer_encoding::VarIntAsyncWriter; +use libipld::Cid; +use tokio::io::{AsyncWrite, AsyncWriteExt}; + +use super::{error::Error, header::CarHeader}; + +#[derive(Debug)] +pub struct CarWriter { + header: CarHeader, + writer: W, + cid_buffer: Vec, + is_header_written: bool, +} + +impl CarWriter +where + W: AsyncWrite + Send + Unpin, +{ + pub fn new(header: CarHeader, writer: W) -> Self { + CarWriter { + header, + writer, + cid_buffer: Vec::new(), + is_header_written: false, + } + } + + /// Writes header and stream of data to writer in Car format. + pub async fn write(&mut self, cid: Cid, data: T) -> Result<(), Error> + where + T: AsRef<[u8]>, + { + if !self.is_header_written { + // Write header bytes + let header_bytes = self.header.encode()?; + self.writer.write_varint_async(header_bytes.len()).await?; + self.writer.write_all(&header_bytes).await?; + self.is_header_written = true; + } + + // Write the given block. + self.cid_buffer.clear(); + cid.write_bytes(&mut self.cid_buffer).expect("vec write"); + + let data = data.as_ref(); + let len = self.cid_buffer.len() + data.len(); + + self.writer.write_varint_async(len).await?; + self.writer.write_all(&self.cid_buffer).await?; + self.writer.write_all(data).await?; + + Ok(()) + } + + /// Finishes writing, including flushing and returns the writer. + pub async fn finish(mut self) -> Result { + self.flush().await?; + Ok(self.writer) + } + + /// Flushes the underlying writer. + pub async fn flush(&mut self) -> Result<(), Error> { + self.writer.flush().await?; + Ok(()) + } + + /// Consumes the [`CarWriter`] and returns the underlying writer. + pub fn into_inner(self) -> W { + self.writer + } +} -- cgit v1.2.3