aboutsummaryrefslogtreecommitdiffstats
path: root/adenosine
diff options
context:
space:
mode:
authorbryan newbold <bnewbold@robocracy.org>2023-02-19 15:37:20 -0800
committerbryan newbold <bnewbold@robocracy.org>2023-02-19 15:37:20 -0800
commitb8ba815b4cafdff48694d14c994e862738d342ef (patch)
tree5719721c9f32ad242622d44eea7ec7212ce49312 /adenosine
parent2c3977da3ee2229721d4551982537235066e22c8 (diff)
downloadadenosine-b8ba815b4cafdff48694d14c994e862738d342ef.tar.gz
adenosine-b8ba815b4cafdff48694d14c994e862738d342ef.zip
move a bunch of code from pds to common
Diffstat (limited to 'adenosine')
-rw-r--r--adenosine/Cargo.toml16
-rw-r--r--adenosine/src/car.rs100
-rw-r--r--adenosine/src/crypto.rs280
-rw-r--r--adenosine/src/did.rs368
-rw-r--r--adenosine/src/lib.rs7
-rw-r--r--adenosine/src/mst.rs399
-rw-r--r--adenosine/src/repo.rs466
-rw-r--r--adenosine/src/ucan_p256.rs85
-rw-r--r--adenosine/src/vendored.rs1
-rw-r--r--adenosine/src/vendored/iroh_car/README.md27
-rw-r--r--adenosine/src/vendored/iroh_car/error.rs29
-rw-r--r--adenosine/src/vendored/iroh_car/header.rs107
-rw-r--r--adenosine/src/vendored/iroh_car/lib.rs11
-rw-r--r--adenosine/src/vendored/iroh_car/mod.rs10
-rw-r--r--adenosine/src/vendored/iroh_car/reader.rs101
-rw-r--r--adenosine/src/vendored/iroh_car/util.rs95
-rw-r--r--adenosine/src/vendored/iroh_car/writer.rs73
17 files changed, 2175 insertions, 0 deletions
diff --git a/adenosine/Cargo.toml b/adenosine/Cargo.toml
index a05aa78..456b0fe 100644
--- a/adenosine/Cargo.toml
+++ b/adenosine/Cargo.toml
@@ -24,6 +24,22 @@ lazy_static = "1"
data-encoding = "2"
rand = "0.8"
time = { version = "0.3", features = ["formatting"] }
+k256 = { version = "0.11", features = ["ecdsa"] }
+p256 = { version = "0.11", features = ["ecdsa"] }
+bs58 = "0.4"
+libipld = "0.14"
+ipfs-sqlite-block-store = "0.13"
+tokio = { version = "1", features = ["full"] }
+futures = "0.3"
+sha256 = "1"
+multibase = "0.9"
+ucan = "0.7.0-alpha.1"
+async-trait = "0.1"
+
+# for vendored iroh-car
+thiserror = "1.0"
+integer-encoding = { version = "3", features = ["tokio_async"] }
+multihash = "0.16"
# uncertain about these...
anyhow = "1"
diff --git a/adenosine/src/car.rs b/adenosine/src/car.rs
new file mode 100644
index 0000000..22f83bc
--- /dev/null
+++ b/adenosine/src/car.rs
@@ -0,0 +1,100 @@
+use anyhow::Result;
+
+use crate::vendored::iroh_car::{CarHeader, CarReader, CarWriter};
+use futures::TryStreamExt;
+use ipfs_sqlite_block_store::BlockStore;
+use libipld::{Block, Cid};
+use std::path::PathBuf;
+use tokio::fs::File;
+use tokio::io::{AsyncRead, BufReader};
+
+/// Synchronous wrapper for loading in-memory CAR bytes (`&[u8]`) into a blockstore.
+///
+/// Does not do any pinning, even temporarily. Returns the root CID indicated in the CAR file
+/// header.
+pub fn load_car_bytes_to_blockstore(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ car_bytes: &[u8],
+) -> Result<Cid> {
+ let rt = tokio::runtime::Builder::new_current_thread()
+ .enable_all()
+ .build()?;
+ rt.block_on(inner_car_bytes_loader(db, car_bytes))
+}
+
+/// Synchronous wrapper for loading on-disk CAR file (by path) into a blockstore.
+///
+/// Does not do any pinning, even temporarily. Returns the root CID indicated in the CAR file
+/// header.
+pub fn load_car_path_to_blockstore(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ car_path: &PathBuf,
+) -> Result<Cid> {
+ let rt = tokio::runtime::Builder::new_current_thread()
+ .enable_all()
+ .build()?;
+ rt.block_on(inner_car_path_loader(db, car_path))
+}
+
+pub fn read_car_bytes_from_blockstore(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ root: &Cid,
+) -> Result<Vec<u8>> {
+ let rt = tokio::runtime::Builder::new_current_thread()
+ .enable_all()
+ .build()?;
+ rt.block_on(inner_car_bytes_reader(db, root))
+}
+
+async fn inner_car_bytes_loader(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ car_bytes: &[u8],
+) -> Result<Cid> {
+ let car_reader = CarReader::new(car_bytes).await?;
+ inner_car_loader(db, car_reader).await
+}
+
+async fn inner_car_path_loader(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ car_path: &PathBuf,
+) -> Result<Cid> {
+ let car_reader = {
+ let file = File::open(car_path).await?;
+ let buf_reader = BufReader::new(file);
+ CarReader::new(buf_reader).await?
+ };
+ inner_car_loader(db, car_reader).await
+}
+
+async fn inner_car_loader<R: AsyncRead + Send + Unpin>(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ car_reader: CarReader<R>,
+) -> Result<Cid> {
+ let car_header = car_reader.header().clone();
+ car_reader
+ .stream()
+ .try_for_each(|(cid, raw)| {
+ // TODO: error handling here instead of unwrap (?)
+ let block = Block::new(cid, raw).unwrap();
+ db.put_block(block, None).unwrap();
+ futures::future::ready(Ok(()))
+ })
+ .await?;
+ Ok(car_header.roots()[0])
+}
+
+async fn inner_car_bytes_reader(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ root: &Cid,
+) -> Result<Vec<u8>> {
+ let car_header = CarHeader::new_v1(vec![*root]);
+ let buf: Vec<u8> = Default::default();
+ let mut car_writer = CarWriter::new(car_header, buf);
+
+ let cid_list = db.get_descendants::<Vec<_>>(root)?;
+ for cid in cid_list {
+ let block = db.get_block(&cid)?.expect("block content");
+ car_writer.write(cid, block).await?;
+ }
+ Ok(car_writer.finish().await?)
+}
diff --git a/adenosine/src/crypto.rs b/adenosine/src/crypto.rs
new file mode 100644
index 0000000..3647928
--- /dev/null
+++ b/adenosine/src/crypto.rs
@@ -0,0 +1,280 @@
+use crate::identifiers::Did;
+use crate::ucan_p256::P256KeyMaterial;
+use anyhow::{anyhow, ensure, Result};
+use p256::ecdsa::signature::{Signer, Verifier};
+use std::str::FromStr;
+use ucan::builder::UcanBuilder;
+
+// Need to:
+//
+// - generate new random keypair
+// - generate keypair from seed
+// - read/write secret keypair (eg, for PDS config loading)
+// - sign bytes (and ipld?) using keypair
+// - verify signature bytes (and ipld?) using pubkey
+
+const MULTICODE_P256_BYTES: [u8; 2] = [0x80, 0x24];
+const MULTICODE_K256_BYTES: [u8; 2] = [0xe7, 0x01];
+
+#[derive(Clone, PartialEq, Eq)]
+pub struct KeyPair {
+ public: p256::ecdsa::VerifyingKey,
+ secret: p256::ecdsa::SigningKey,
+}
+
+#[derive(Clone, PartialEq, Eq)]
+pub enum PubKey {
+ P256(p256::ecdsa::VerifyingKey),
+ K256(k256::ecdsa::VerifyingKey),
+}
+
+impl KeyPair {
+ pub fn new_random() -> Self {
+ let signing = p256::ecdsa::SigningKey::random(&mut p256::elliptic_curve::rand_core::OsRng);
+ KeyPair {
+ public: signing.verifying_key(),
+ secret: signing,
+ }
+ }
+
+ pub fn from_bytes(bytes: &[u8]) -> Result<KeyPair> {
+ let signing = p256::ecdsa::SigningKey::from_bytes(bytes)?;
+ Ok(KeyPair {
+ public: signing.verifying_key(),
+ secret: signing,
+ })
+ }
+
+ pub fn to_bytes(&self) -> Vec<u8> {
+ self.secret.to_bytes().to_vec()
+ }
+
+ pub fn pubkey(&self) -> PubKey {
+ PubKey::P256(self.public)
+ }
+
+ pub fn sign_bytes(&self, data: &[u8]) -> String {
+ let sig = self.secret.sign(data);
+ data_encoding::BASE64URL_NOPAD.encode(&sig.to_vec())
+ }
+
+ fn ucan_keymaterial(&self) -> P256KeyMaterial {
+ P256KeyMaterial(self.public, Some(self.secret.clone()))
+ }
+
+ /// This is currently just an un-validated token; we don't actually verify these.
+ pub fn ucan(&self, did: &Did) -> Result<String> {
+ let key_material = self.ucan_keymaterial();
+ let rt = tokio::runtime::Builder::new_current_thread()
+ .enable_all()
+ .build()?;
+ rt.block_on(build_ucan(key_material, did))
+ }
+
+ pub fn to_hex(&self) -> String {
+ data_encoding::HEXUPPER.encode(&self.to_bytes())
+ }
+
+ pub fn from_hex(hex: &str) -> Result<Self> {
+ Self::from_bytes(&data_encoding::HEXUPPER.decode(hex.as_bytes())?)
+ }
+}
+
+async fn build_ucan(key_material: P256KeyMaterial, did: &Did) -> Result<String> {
+ let token_string = UcanBuilder::default()
+ .issued_by(&key_material)
+ .for_audience(did)
+ .with_nonce()
+ .with_lifetime(60 * 60 * 24 * 90)
+ .build()?
+ .sign()
+ .await?
+ .encode()?;
+ Ok(token_string)
+}
+
+impl PubKey {
+ pub fn verify_bytes(&self, data: &[u8], sig: &str) -> Result<()> {
+ let sig_bytes = data_encoding::BASE64URL_NOPAD.decode(sig.as_bytes())?;
+ // TODO: better way other than this re-encoding?
+ let sig_hex = data_encoding::HEXUPPER.encode(&sig_bytes);
+ match self {
+ PubKey::P256(key) => {
+ let sig = p256::ecdsa::Signature::from_str(&sig_hex)?;
+ Ok(key.verify(data, &sig)?)
+ }
+ PubKey::K256(key) => {
+ let sig = k256::ecdsa::Signature::from_str(&sig_hex)?;
+ Ok(key.verify(data, &sig)?)
+ }
+ }
+ }
+
+ pub fn key_type(&self) -> String {
+ match self {
+ PubKey::P256(_) => "EcdsaSecp256r1VerificationKey2019",
+ PubKey::K256(_) => "EcdsaSecp256k1VerificationKey2019",
+ }
+ .to_string()
+ }
+
+ /// This public verification key encoded as base58btc multibase string, not 'compressed', as
+ /// included in DID documents ('publicKeyMultibase').
+ ///
+ /// Note that the did:key serialization does 'compress' the key into a smaller size.
+ pub fn to_multibase(&self) -> String {
+ let mut bytes: Vec<u8> = vec![];
+ match self {
+ PubKey::P256(key) => {
+ bytes.extend_from_slice(&MULTICODE_P256_BYTES);
+ bytes.extend_from_slice(&key.to_encoded_point(false).to_bytes());
+ }
+ PubKey::K256(key) => {
+ bytes.extend_from_slice(&MULTICODE_K256_BYTES);
+ bytes.extend_from_slice(&key.to_bytes());
+ }
+ }
+ multibase::encode(multibase::Base::Base58Btc, &bytes)
+ }
+
+ /// Serializes as a 'did:key' string.
+ pub fn to_did_key(&self) -> String {
+ let mut bytes: Vec<u8> = vec![];
+ match self {
+ PubKey::P256(key) => {
+ bytes.extend_from_slice(&MULTICODE_P256_BYTES);
+ bytes.extend_from_slice(&key.to_encoded_point(true).to_bytes());
+ }
+ PubKey::K256(key) => {
+ bytes.extend_from_slice(&MULTICODE_K256_BYTES);
+ bytes.extend_from_slice(&key.to_bytes());
+ }
+ }
+ format!(
+ "did:key:{}",
+ multibase::encode(multibase::Base::Base58Btc, &bytes)
+ )
+ }
+
+ pub fn from_did_key(did_key: &str) -> Result<Self> {
+ if !did_key.starts_with("did:key:") || did_key.len() < 20 {
+ return Err(anyhow!("does not look like a did:key: {}", did_key));
+ }
+ let (key_type, bytes) = multibase::decode(&did_key[8..])?;
+ ensure!(
+ key_type == multibase::Base::Base58Btc,
+ "base58btc-encoded key"
+ );
+ // prefix bytes
+ let prefix: [u8; 2] = [bytes[0], bytes[1]];
+ match prefix {
+ MULTICODE_K256_BYTES => Ok(PubKey::K256(k256::ecdsa::VerifyingKey::from_sec1_bytes(
+ &bytes[2..],
+ )?)),
+ MULTICODE_P256_BYTES => Ok(PubKey::P256(p256::ecdsa::VerifyingKey::from_sec1_bytes(
+ &bytes[2..],
+ )?)),
+ _ => Err(anyhow!(
+ "key type (multicodec) not handled when parsing DID key: {}",
+ did_key
+ )),
+ }
+ }
+
+ pub fn to_bytes(&self) -> Vec<u8> {
+ match self {
+ PubKey::P256(key) => key.to_encoded_point(true).to_bytes().to_vec(),
+ PubKey::K256(key) => key.to_bytes().to_vec(),
+ }
+ }
+
+ pub fn ucan_keymaterial(&self) -> P256KeyMaterial {
+ match self {
+ PubKey::P256(key) => P256KeyMaterial(*key, None),
+ PubKey::K256(_key) => unimplemented!(),
+ }
+ }
+}
+
+impl std::fmt::Display for PubKey {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.to_did_key())
+ }
+}
+
+#[test]
+fn test_did_secp256k1_p256() {
+ // did:key secp256k1 test vectors from W3C
+ // https://github.com/w3c-ccg/did-method-key/blob/main/test-vectors/secp256k1.json
+ // via atproto repo
+ let pairs = vec![
+ (
+ "9085d2bef69286a6cbb51623c8fa258629945cd55ca705cc4e66700396894e0c",
+ "did:key:zQ3shokFTS3brHcDQrn82RUDfCZESWL1ZdCEJwekUDPQiYBme",
+ ),
+ (
+ "f0f4df55a2b3ff13051ea814a8f24ad00f2e469af73c363ac7e9fb999a9072ed",
+ "did:key:zQ3shtxV1FrJfhqE1dvxYRcCknWNjHc3c5X1y3ZSoPDi2aur2",
+ ),
+ (
+ "6b0b91287ae3348f8c2f2552d766f30e3604867e34adc37ccbb74a8e6b893e02",
+ "did:key:zQ3shZc2QzApp2oymGvQbzP8eKheVshBHbU4ZYjeXqwSKEn6N",
+ ),
+ (
+ "c0a6a7c560d37d7ba81ecee9543721ff48fea3e0fb827d42c1868226540fac15",
+ "did:key:zQ3shadCps5JLAHcZiuX5YUtWHHL8ysBJqFLWvjZDKAWUBGzy",
+ ),
+ (
+ "175a232d440be1e0788f25488a73d9416c04b6f924bea6354bf05dd2f1a75133",
+ "did:key:zQ3shptjE6JwdkeKN4fcpnYQY3m9Cet3NiHdAfpvSUZBFoKBj",
+ ),
+ ];
+
+ // test decode/encode did:key
+ for (_hex, did) in pairs.iter() {
+ assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key());
+ }
+
+ let p256_dids = vec![
+ "did:key:zDnaerx9CtbPJ1q36T5Ln5wYt3MQYeGRG5ehnPAmxcf5mDZpv",
+ "did:key:zDnaerDaTF5BXEavCrfRZEk316dpbLsfPDZ3WJ5hRTPFU2169",
+ ];
+ for did in p256_dids {
+ assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key());
+ }
+}
+
+#[test]
+fn test_did_plc_examples() {
+ // https://atproto.com/specs/did-plc
+ let example_dids = vec![
+ "did:key:zDnaejYFhgFiVF89LhJ4UipACLKuqo6PteZf8eKDVKeExXUPk",
+ "did:key:zDnaeSezF2TgCD71b5DiiFyhHQwKAfsBVqTTHRMvP597Z5Ztn",
+ "did:key:zDnaeh9v2RmcMo13Du2d6pjUf5bZwtauYxj3n9dYjw4EZUAR7",
+ "did:key:zDnaedvvAsDE6H3BDdBejpx9ve2Tz95cymyCAKF66JbyMh1Lt",
+ ];
+
+ for did in example_dids {
+ assert_eq!(did, &PubKey::from_did_key(did).unwrap().to_did_key());
+ }
+}
+
+#[test]
+fn test_signing() {
+ let msg = b"you have found the secret message";
+ let keypair = KeyPair::new_random();
+ let sig_str = keypair.sign_bytes(msg);
+ keypair.pubkey().verify_bytes(msg, &sig_str).unwrap();
+
+ // and with pubkey that has been serialized/deserialized
+ let did_key = keypair.pubkey().to_did_key();
+ let pubkey = PubKey::from_did_key(&did_key).unwrap();
+ pubkey.verify_bytes(msg, &sig_str).unwrap();
+}
+
+#[test]
+fn test_keypair_hex() {
+ let before = KeyPair::new_random();
+ let after = KeyPair::from_hex(&before.to_hex()).unwrap();
+ assert!(before == after);
+}
diff --git a/adenosine/src/did.rs b/adenosine/src/did.rs
new file mode 100644
index 0000000..c7d7d10
--- /dev/null
+++ b/adenosine/src/did.rs
@@ -0,0 +1,368 @@
+use crate::crypto::{KeyPair, PubKey};
+/// DID and 'did:plc' stuff
+///
+/// This is currently a partial/skeleton implementation, which only generates local/testing did:plc
+/// DIDs (and DID documents) using a single 'create' genesis block. Key rotation, etc, is not
+/// supported.
+use crate::identifiers::Did;
+use anyhow::Result;
+use libipld::cbor::DagCborCodec;
+use libipld::multihash::Code;
+use libipld::{Block, Cid, DagCbor, DefaultParams};
+use serde_json::json;
+use std::str::FromStr;
+
+#[allow(non_snake_case)]
+#[derive(Debug, DagCbor, PartialEq, Eq, Clone)]
+pub struct CreateOp {
+ #[ipld(rename = "type")]
+ pub op_type: String,
+ pub signingKey: String,
+ pub recoveryKey: String,
+ pub username: String,
+ pub service: String,
+ pub prev: Option<Cid>,
+ pub sig: String,
+}
+
+#[allow(non_snake_case)]
+#[derive(Debug, DagCbor, PartialEq, Eq, Clone)]
+struct UnsignedCreateOp {
+ #[ipld(rename = "type")]
+ pub op_type: String,
+ pub signingKey: String,
+ pub recoveryKey: String,
+ pub username: String,
+ pub service: String,
+ pub prev: Option<Cid>,
+}
+
+impl UnsignedCreateOp {
+ fn into_signed(self, sig: String) -> CreateOp {
+ CreateOp {
+ op_type: self.op_type,
+ prev: self.prev,
+ sig,
+ signingKey: self.signingKey,
+ recoveryKey: self.recoveryKey,
+ username: self.username,
+ service: self.service,
+ }
+ }
+}
+
+impl CreateOp {
+ pub fn new(
+ username: String,
+ atp_pds: String,
+ keypair: &KeyPair,
+ recovery_key: Option<String>,
+ ) -> Self {
+ let signing_key = keypair.pubkey().to_did_key();
+ let recovery_key = recovery_key.unwrap_or(signing_key.clone());
+ let unsigned = UnsignedCreateOp {
+ op_type: "create".to_string(),
+ prev: None,
+ signingKey: signing_key,
+ recoveryKey: recovery_key,
+ username,
+ service: atp_pds,
+ };
+ let block = Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, &unsigned)
+ .expect("encode DAG-CBOR");
+ let sig = keypair.sign_bytes(block.data());
+ unsigned.into_signed(sig)
+ }
+
+ pub fn did_plc(&self) -> Did {
+ // dump DAG-CBOR
+ let block = Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, self)
+ .expect("encode DAG-CBOR");
+ let bin = block.data();
+ // hash SHA-256
+ let digest_bytes: Vec<u8> = data_encoding::HEXLOWER
+ .decode(sha256::digest(bin).as_bytes())
+ .expect("SHA-256 digest is always hex string");
+ // encode base32
+ let digest_b32 = data_encoding::BASE32_NOPAD
+ .encode(&digest_bytes)
+ .to_ascii_lowercase();
+ // truncate
+ Did::from_str(&format!("did:plc:{}", &digest_b32[0..24])).unwrap()
+ }
+
+ fn into_unsigned(self) -> UnsignedCreateOp {
+ UnsignedCreateOp {
+ op_type: self.op_type,
+ prev: self.prev,
+ signingKey: self.signingKey,
+ recoveryKey: self.recoveryKey,
+ username: self.username,
+ service: self.service,
+ }
+ }
+
+ pub fn did_doc(&self) -> serde_json::Value {
+ let meta = DidDocMeta {
+ did: self.did_plc(),
+ // TODO
+ user_url: format!("https://{}", self.username),
+ service_url: self.service.clone(),
+ recovery_didkey: self.recoveryKey.clone(),
+ signing_didkey: self.signingKey.clone(),
+ };
+ meta.did_doc()
+ }
+
+ /// This method only makes sense on the "genesis" create object
+ pub fn verify_self(&self) -> Result<()> {
+ let key = PubKey::from_did_key(&self.signingKey)?;
+ let unsigned = {
+ let cpy = (*self).clone();
+ cpy.into_unsigned()
+ };
+ //println!("unsigned: {:?}", unsigned);
+ let block = Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, &unsigned)
+ .expect("encode DAG-CBOR");
+ key.verify_bytes(block.data(), &self.sig)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct DidDocMeta {
+ pub did: Did,
+ pub user_url: String,
+ pub service_url: String,
+ pub recovery_didkey: String,
+ pub signing_didkey: String,
+}
+
+impl DidDocMeta {
+ pub fn did_doc(&self) -> serde_json::Value {
+ let key_type = "EcdsaSecp256r1VerificationKey2019";
+ json!({
+ "@context": [
+ "https://www.w3.org/ns/did/v1",
+ "https://w3id.org/security/suites/ecdsa-2019/v1"
+ ],
+ "id": self.did.to_string(),
+ "alsoKnownAs": [ self.user_url ],
+ "verificationMethod": [
+ {
+ "id": format!("{}#signingKey)", self.did),
+ "type": key_type,
+ "controller": self.did.to_string(),
+ "publicKeyMultibase": self.signing_didkey
+ },
+ {
+ "id": format!("{}#recoveryKey)", self.did),
+ "type": key_type,
+ "controller": self.did.to_string(),
+ "publicKeyMultibase": self.recovery_didkey
+ }
+ ],
+ "assertionMethod": [ format!("{}#signingKey)", self.did)],
+ "capabilityInvocation": [ format!("{}#signingKey)", self.did) ],
+ "capabilityDelegation": [ format!("{}#signingKey)", self.did) ],
+ "service": [
+ {
+ "id": format!("{}#atpPds)", self.did),
+ "type": "AtpPersonalDataServer",
+ "serviceEndpoint": self.service_url
+ }
+ ]
+ })
+ }
+}
+
+#[test]
+fn test_debug_did_signing() {
+ let op = UnsignedCreateOp {
+ op_type: "create".to_string(),
+ signingKey: "did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE".to_string(),
+ recoveryKey: "did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE".to_string(),
+ username: "carla.test".to_string(),
+ service: "http://localhost:2583".to_string(),
+ prev: None,
+ };
+ let block =
+ Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, &op).expect("encode DAG-CBOR");
+ let op_bytes = block.data();
+
+ let _key_bytes = vec![
+ 4, 30, 224, 8, 198, 84, 108, 1, 58, 193, 91, 176, 212, 45, 4, 36, 28, 252, 242, 95, 20, 85,
+ 87, 246, 79, 134, 42, 113, 5, 216, 238, 235, 21, 146, 16, 88, 239, 217, 36, 252, 148, 197,
+ 203, 22, 29, 2, 52, 152, 77, 208, 21, 88, 2, 85, 219, 212, 148, 139, 104, 200, 15, 119, 46,
+ 178, 186,
+ ];
+
+ let pub_key =
+ PubKey::from_did_key("did:key:zDnaeSWVQyW8DSF6mDwT9j8YrzDWDs8h6PPjuTcipzG84iCBE").unwrap();
+ //let keypair = KeyPair::from_bytes(&key_bytes).unwrap();
+ //assert_eq!(keypair.to_bytes(), key_bytes);
+
+ let encoded_bytes = vec![
+ 166, 100, 112, 114, 101, 118, 246, 100, 116, 121, 112, 101, 102, 99, 114, 101, 97, 116,
+ 101, 103, 115, 101, 114, 118, 105, 99, 101, 117, 104, 116, 116, 112, 58, 47, 47, 108, 111,
+ 99, 97, 108, 104, 111, 115, 116, 58, 50, 53, 56, 51, 104, 117, 115, 101, 114, 110, 97, 109,
+ 101, 106, 99, 97, 114, 108, 97, 46, 116, 101, 115, 116, 106, 115, 105, 103, 110, 105, 110,
+ 103, 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, 101, 121, 58, 122, 68, 110, 97, 101,
+ 83, 87, 86, 81, 121, 87, 56, 68, 83, 70, 54, 109, 68, 119, 84, 57, 106, 56, 89, 114, 122,
+ 68, 87, 68, 115, 56, 104, 54, 80, 80, 106, 117, 84, 99, 105, 112, 122, 71, 56, 52, 105, 67,
+ 66, 69, 107, 114, 101, 99, 111, 118, 101, 114, 121, 75, 101, 121, 120, 57, 100, 105, 100,
+ 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, 83, 87, 86, 81, 121, 87, 56, 68, 83, 70, 54,
+ 109, 68, 119, 84, 57, 106, 56, 89, 114, 122, 68, 87, 68, 115, 56, 104, 54, 80, 80, 106,
+ 117, 84, 99, 105, 112, 122, 71, 56, 52, 105, 67, 66, 69,
+ ];
+ assert_eq!(encoded_bytes, op_bytes);
+
+ let _sig_bytes = vec![
+ 131, 115, 47, 143, 89, 68, 79, 73, 121, 198, 70, 76, 91, 64, 171, 25, 18, 139, 244, 94,
+ 123, 224, 205, 32, 241, 174, 36, 120, 199, 206, 199, 202, 216, 154, 2, 10, 247, 101, 138,
+ 170, 85, 95, 142, 164, 50, 203, 92, 23, 247, 218, 231, 224, 78, 68, 55, 104, 243, 145, 243,
+ 4, 219, 102, 44, 227,
+ ];
+ let sig_str =
+ "g3Mvj1lET0l5xkZMW0CrGRKL9F574M0g8a4keMfOx8rYmgIK92WKqlVfjqQyy1wX99rn4E5EN2jzkfME22Ys4w";
+
+ pub_key.verify_bytes(op_bytes, sig_str).unwrap();
+
+ let signed = op.into_signed(sig_str.to_string());
+ signed.verify_self().unwrap();
+}
+
+/*
+------------------------------------
+OP:
+{
+ type: 'create',
+ signingKey: 'did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN',
+ recoveryKey: 'did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN',
+ username: 'carla.test',
+ service: 'http://localhost:2583',
+ prev: null,
+ sig: 'VYGxmZs-D5830YdQSNrZpbxVyOPB4nCJtO-x0XElt35AE5wjvJFa2vJu8qjURG6TvEbMvfbekDo_eXEMhdPWdg'
+}
+ENCODED:
+{"0":167,"1":99,"2":115,"3":105,"4":103,"5":120,"6":86,"7":86,"8":89,"9":71,"10":120,"11":109,"12":90,"13":115,"14":45,"15":68,"16":53,"17":56,"18":51,"19":48,"20":89,"21":100,"22":81,"23":83,"24":78,"25":114,"26":90,"27":112,"28":98,"29":120,"30":86,"31":121,"32":79,"33":80,"34":66,"35":52,"36":110,"37":67,"38":74,"39":116,"40":79,"41":45,"42":120,"43":48,"44":88,"45":69,"46":108,"47":116,"48":51,"49":53,"50":65,"51":69,"52":53,"53":119,"54":106,"55":118,"56":74,"57":70,"58":97,"59":50,"60":118,"61":74,"62":117,"63":56,"64":113,"65":106,"66":85,"67":82,"68":71,"69":54,"70":84,"71":118,"72":69,"73":98,"74":77,"75":118,"76":102,"77":98,"78":101,"79":107,"80":68,"81":111,"82":95,"83":101,"84":88,"85":69,"86":77,"87":104,"88":100,"89":80,"90":87,"91":100,"92":103,"93":100,"94":112,"95":114,"96":101,"97":118,"98":246,"99":100,"100":116,"101":121,"102":112,"103":101,"104":102,"105":99,"106":114,"107":101,"108":97,"109":116,"110":101,"111":103,"112":115,"113":101,"114":114,"115":118,"116":105,"117":99,"118":101,"119":117,"120":104,"121":116,"122":116,"123":112,"124":58,"125":47,"126":47,"127":108,"128":111,"129":99,"130":97,"131":108,"132":104,"133":111,"134":115,"135":116,"136":58,"137":50,"138":53,"139":56,"140":51,"141":104,"142":117,"143":115,"144":101,"145":114,"146":110,"147":97,"148":109,"149":101,"150":106,"151":99,"152":97,"153":114,"154":108,"155":97,"156":46,"157":116,"158":101,"159":115,"160":116,"161":106,"162":115,"163":105,"164":103,"165":110,"166":105,"167":110,"168":103,"169":75,"170":101,"171":121,"172":120,"173":57,"174":100,"175":105,"176":100,"177":58,"178":107,"179":101,"180":121,"181":58,"182":122,"183":68,"184":110,"185":97,"186":101,"187":115,"188":111,"189":120,"190":90,"191":98,"192":56,"193":109,"194":76,"195":106,"196":102,"197":49,"198":54,"199":101,"200":52,"201":80,"202":87,"203":115,"204":78,"205":113,"206":76,"207":76,"208":106,"209":57,"210":117,"211":87,"212":77,"213":57,"214":84,"215":81,"216":56,"217":110,"218":78,"219":119,"220":120,"221":113,"222":69,"223":114,"224":68,"225":109,"226":75,"227":88,"228":76,"229":65,"230":78,"231":107,"232":114,"233":101,"234":99,"235":111,"236":118,"237":101,"238":114,"239":121,"240":75,"241":101,"242":121,"243":120,"244":57,"245":100,"246":105,"247":100,"248":58,"249":107,"250":101,"251":121,"252":58,"253":122,"254":68,"255":110,"256":97,"257":101,"258":115,"259":111,"260":120,"261":90,"262":98,"263":56,"264":109,"265":76,"266":106,"267":102,"268":49,"269":54,"270":101,"271":52,"272":80,"273":87,"274":115,"275":78,"276":113,"277":76,"278":76,"279":106,"280":57,"281":117,"282":87,"283":77,"284":57,"285":84,"286":81,"287":56,"288":110,"289":78,"290":119,"291":120,"292":113,"293":69,"294":114,"295":68,"296":109,"297":75,"298":88,"299":76,"300":65,"301":78}
+SHA256 base32:
+cg2dfxdh5voabmdjzw2abw3sgvtjymknh2bmpvtwot7t2ih4v7za
+did:plc:cg2dfxdh5voabmdjzw2abw3s
+------------------------------------
+
+*/
+
+#[test]
+fn test_debug_did_plc() {
+ let op = CreateOp {
+ op_type: "create".to_string(),
+ signingKey: "did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN".to_string(),
+ recoveryKey: "did:key:zDnaesoxZb8mLjf16e4PWsNqLLj9uWM9TQ8nNwxqErDmKXLAN".to_string(),
+ username: "carla.test".to_string(),
+ service: "http://localhost:2583".to_string(),
+ prev: None,
+ sig:
+ "VYGxmZs-D5830YdQSNrZpbxVyOPB4nCJtO-x0XElt35AE5wjvJFa2vJu8qjURG6TvEbMvfbekDo_eXEMhdPWdg"
+ .to_string(),
+ };
+ op.verify_self().unwrap();
+ let block =
+ Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, &op).expect("encode DAG-CBOR");
+ let op_bytes = block.data();
+
+ let encoded_bytes = vec![
+ 167, 99, 115, 105, 103, 120, 86, 86, 89, 71, 120, 109, 90, 115, 45, 68, 53, 56, 51, 48, 89,
+ 100, 81, 83, 78, 114, 90, 112, 98, 120, 86, 121, 79, 80, 66, 52, 110, 67, 74, 116, 79, 45,
+ 120, 48, 88, 69, 108, 116, 51, 53, 65, 69, 53, 119, 106, 118, 74, 70, 97, 50, 118, 74, 117,
+ 56, 113, 106, 85, 82, 71, 54, 84, 118, 69, 98, 77, 118, 102, 98, 101, 107, 68, 111, 95,
+ 101, 88, 69, 77, 104, 100, 80, 87, 100, 103, 100, 112, 114, 101, 118, 246, 100, 116, 121,
+ 112, 101, 102, 99, 114, 101, 97, 116, 101, 103, 115, 101, 114, 118, 105, 99, 101, 117, 104,
+ 116, 116, 112, 58, 47, 47, 108, 111, 99, 97, 108, 104, 111, 115, 116, 58, 50, 53, 56, 51,
+ 104, 117, 115, 101, 114, 110, 97, 109, 101, 106, 99, 97, 114, 108, 97, 46, 116, 101, 115,
+ 116, 106, 115, 105, 103, 110, 105, 110, 103, 75, 101, 121, 120, 57, 100, 105, 100, 58, 107,
+ 101, 121, 58, 122, 68, 110, 97, 101, 115, 111, 120, 90, 98, 56, 109, 76, 106, 102, 49, 54,
+ 101, 52, 80, 87, 115, 78, 113, 76, 76, 106, 57, 117, 87, 77, 57, 84, 81, 56, 110, 78, 119,
+ 120, 113, 69, 114, 68, 109, 75, 88, 76, 65, 78, 107, 114, 101, 99, 111, 118, 101, 114, 121,
+ 75, 101, 121, 120, 57, 100, 105, 100, 58, 107, 101, 121, 58, 122, 68, 110, 97, 101, 115,
+ 111, 120, 90, 98, 56, 109, 76, 106, 102, 49, 54, 101, 52, 80, 87, 115, 78, 113, 76, 76,
+ 106, 57, 117, 87, 77, 57, 84, 81, 56, 110, 78, 119, 120, 113, 69, 114, 68, 109, 75, 88, 76,
+ 65, 78,
+ ];
+ assert_eq!(op_bytes, encoded_bytes);
+
+ let sha256_str = "cg2dfxdh5voabmdjzw2abw3sgvtjymknh2bmpvtwot7t2ih4v7za";
+ let _did_plc = "did:plc:cg2dfxdh5voabmdjzw2abw3s";
+
+ let digest_bytes: Vec<u8> = data_encoding::HEXLOWER
+ .decode(&sha256::digest(op_bytes).as_bytes())
+ .expect("SHA-256 digest is always hex string");
+ let digest_b32 = data_encoding::BASE32_NOPAD
+ .encode(&digest_bytes)
+ .to_ascii_lowercase();
+ assert_eq!(digest_b32, sha256_str);
+}
+
+#[test]
+fn test_did_plc_examples() {
+ // https://atproto.com/specs/did-plc
+ let op = CreateOp {
+ op_type: "create".to_string(),
+ signingKey: "did:key:zDnaejYFhgFiVF89LhJ4UipACLKuqo6PteZf8eKDVKeExXUPk".to_string(),
+ recoveryKey: "did:key:zDnaeSezF2TgCD71b5DiiFyhHQwKAfsBVqTTHRMvP597Z5Ztn".to_string(),
+ username: "alice.example.com".to_string(),
+ service: "https://example.com".to_string(),
+ prev: None,
+ sig:
+ "vi6JAl5W4FfyViD5_BKL9p0rbI3MxTWuh0g_egTFAjtf7gwoSfSe1O3qMOEUPX6QH3H0Q9M4y7gOLGblWkEwfQ"
+ .to_string(),
+ };
+ op.verify_self().unwrap();
+ assert_eq!(
+ &op.did_plc().to_string(),
+ "did:plc:7iza6de2dwap2sbkpav7c6c6"
+ );
+
+ // interacting with PDS / PLC server
+ let op = CreateOp {
+ op_type: "create".to_string(),
+ signingKey: "did:key:zDnaekmbFffmpo7LZ4C7bEFjGKPk11N47kKN8j7jtAcGUabw3".to_string(),
+ recoveryKey: "did:key:zDnaekmbFffmpo7LZ4C7bEFjGKPk11N47kKN8j7jtAcGUabw3".to_string(),
+ username: "voltaire.test".to_string(),
+ service: "http://localhost:2583".to_string(),
+ prev: None,
+ sig:
+ "HNfQUg6SMnYKp1l3LtAIsoAblmi33mYiHE9JH1j7w3B-hd8xWpmCUBUoqKfQXmsAs0K1z8Izt19yYk6PqVFgyg"
+ .to_string(),
+ };
+ op.verify_self().unwrap();
+ assert_eq!(
+ &op.did_plc().to_string(),
+ "did:plc:bmrcg7zrxoiw2kiml3tkw2xv"
+ );
+}
+
+#[test]
+fn test_self_verify() {
+ let keypair = KeyPair::new_random();
+ let op = CreateOp::new(
+ "dummy-handle".to_string(),
+ "https://dummy.service".to_string(),
+ &keypair,
+ None,
+ );
+ println!("{:?}", op);
+ op.verify_self().unwrap();
+}
+
+#[test]
+fn test_known_key() {
+ let keypair = KeyPair::new_random();
+ let op = CreateOp::new(
+ "dummy-handle".to_string(),
+ "https://dummy.service".to_string(),
+ &keypair,
+ None,
+ );
+ println!("{:?}", op);
+ op.verify_self().unwrap();
+}
diff --git a/adenosine/src/lib.rs b/adenosine/src/lib.rs
index 461930f..dc4a1b9 100644
--- a/adenosine/src/lib.rs
+++ b/adenosine/src/lib.rs
@@ -6,7 +6,14 @@ use std::collections::HashMap;
use std::str::FromStr;
use std::time::Duration;
+pub mod car;
+pub mod crypto;
+pub mod did;
pub mod identifiers;
+pub mod mst;
+pub mod repo;
+pub mod ucan_p256;
+pub mod vendored;
use identifiers::Nsid;
static APP_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),);
diff --git a/adenosine/src/mst.rs b/adenosine/src/mst.rs
new file mode 100644
index 0000000..1d75da1
--- /dev/null
+++ b/adenosine/src/mst.rs
@@ -0,0 +1,399 @@
+/// This is a simple immutable implemenation of of a Merkle Search Tree (MST) for atproto.
+///
+/// Mutations on the data structure are not implemented; instead the entire tree is read into a
+/// BTreeMap, that is mutated, and then the entire tree is regenerated. This makes implementation
+/// much simpler, at the obvious cost of performance in some situations.
+///
+/// The MST is basically a sorted key/value store where the key is a string and the value is a CID.
+/// Tree nodes are stored as DAG-CBOG IPLD blocks, with references as CIDs.
+///
+/// In the atproto MST implementation, SHA-256 is the hashing algorithm, and "leading zeros" are
+/// counted in blocks of 4 bits (so a leading zero byte counts as two zeros). This happens to match
+/// simple hex encoding of the SHA-256 hash.
+use anyhow::{anyhow, Context, Result};
+use ipfs_sqlite_block_store::BlockStore;
+use libipld::cbor::DagCborCodec;
+use libipld::multihash::Code;
+use libipld::prelude::Codec;
+use libipld::store::DefaultParams;
+use libipld::Block;
+use libipld::{Cid, DagCbor};
+use log::{debug, error, info};
+use std::collections::BTreeMap;
+use std::path::PathBuf;
+
+#[derive(Debug, DagCbor, PartialEq, Eq)]
+pub struct CommitNode {
+ pub root: Cid,
+ pub sig: Box<[u8]>,
+}
+
+#[derive(Debug, DagCbor, PartialEq, Eq)]
+pub struct RootNode {
+ pub auth_token: Option<String>,
+ pub prev: Option<Cid>,
+ // TODO: not 'metadata'?
+ pub meta: Cid,
+ pub data: Cid,
+}
+
+#[derive(Debug, DagCbor, PartialEq, Eq)]
+pub struct MetadataNode {
+ pub datastore: String, // "mst"
+ pub did: String,
+ pub version: u8, // 1
+}
+
+#[derive(Debug, DagCbor, PartialEq, Eq)]
+struct MstEntry {
+ p: u32,
+ k: String,
+ v: Cid,
+ t: Option<Cid>,
+}
+
+#[derive(Debug, DagCbor, PartialEq)]
+struct MstNode {
+ l: Option<Cid>,
+ e: Vec<MstEntry>,
+}
+
+struct WipEntry {
+ height: u8,
+ key: String,
+ val: Cid,
+ right: Option<Box<WipNode>>,
+}
+
+struct WipNode {
+ height: u8,
+ left: Option<Box<WipNode>>,
+ entries: Vec<WipEntry>,
+}
+
+fn get_mst_node(db: &mut BlockStore<libipld::DefaultParams>, cid: &Cid) -> Result<MstNode> {
+ let block = &db
+ .get_block(cid)?
+ .ok_or(anyhow!("reading MST node from blockstore"))?;
+ //println!("{:?}", block);
+ let mst_node: MstNode = DagCborCodec
+ .decode(block)
+ .context("parsing MST DAG-CBOR IPLD node from blockstore")?;
+ Ok(mst_node)
+}
+
+pub fn print_mst_keys(db: &mut BlockStore<libipld::DefaultParams>, cid: &Cid) -> Result<()> {
+ let node = get_mst_node(db, cid)?;
+ if let Some(ref left) = node.l {
+ print_mst_keys(db, left)?;
+ }
+ let mut key: String = "".to_string();
+ for entry in node.e.iter() {
+ key = format!("{}{}", &key[0..entry.p as usize], entry.k);
+ println!("\t{}\t-> {}", key, entry.v);
+ if let Some(ref right) = entry.t {
+ print_mst_keys(db, right)?;
+ }
+ }
+ Ok(())
+}
+
+pub fn dump_mst_keys(db_path: &PathBuf) -> Result<()> {
+ let mut db: BlockStore<libipld::DefaultParams> = BlockStore::open(db_path, Default::default())?;
+
+ let all_aliases: Vec<(Vec<u8>, Cid)> = db.aliases()?;
+ if all_aliases.is_empty() {
+ error!("expected at least one alias in block store");
+ std::process::exit(-1);
+ }
+
+ // print all the aliases
+ for (alias, commit_cid) in all_aliases.iter() {
+ let did = String::from_utf8_lossy(alias);
+ println!("{} -> {}", did, commit_cid);
+ }
+
+ let (did, commit_cid) = all_aliases[0].clone();
+ let did = String::from_utf8_lossy(&did);
+ info!("starting from {} [{}]", commit_cid, did);
+
+ // NOTE: the faster way to develop would have been to decode to libipld::ipld::Ipld first? meh
+
+ debug!(
+ "raw commit: {:?}",
+ &db.get_block(&commit_cid)?
+ .ok_or(anyhow!("expected commit block in store"))?
+ );
+ let commit: CommitNode = DagCborCodec.decode(
+ &db.get_block(&commit_cid)?
+ .ok_or(anyhow!("expected commit block in store"))?,
+ )?;
+ debug!("Commit: {:?}", commit);
+ let root: RootNode = DagCborCodec.decode(
+ &db.get_block(&commit.root)?
+ .ok_or(anyhow!("expected root block in store"))?,
+ )?;
+ debug!("Root: {:?}", root);
+ let metadata: MetadataNode = DagCborCodec.decode(
+ &db.get_block(&root.meta)?
+ .ok_or(anyhow!("expected metadata block in store"))?,
+ )?;
+ debug!("Metadata: {:?}", metadata);
+ let mst_node: MstNode = DagCborCodec.decode(
+ &db.get_block(&root.data)?
+ .ok_or(anyhow!("expected block in store"))?,
+ )?;
+ debug!("MST root node: {:?}", mst_node);
+ debug!("============");
+
+ println!("{}", did);
+ print_mst_keys(&mut db, &root.data)?;
+ Ok(())
+}
+
+pub fn collect_mst_keys(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ cid: &Cid,
+ map: &mut BTreeMap<String, Cid>,
+) -> Result<()> {
+ let node = get_mst_node(db, cid)?;
+ if let Some(ref left) = node.l {
+ collect_mst_keys(db, left, map)?;
+ }
+ let mut key: String = "".to_string();
+ for entry in node.e.iter() {
+ key = format!("{}{}", &key[0..entry.p as usize], entry.k);
+ map.insert(key.clone(), entry.v);
+ if let Some(ref right) = entry.t {
+ collect_mst_keys(db, right, map)?;
+ }
+ }
+ Ok(())
+}
+
+fn leading_zeros(key: &str) -> u8 {
+ let digest = sha256::digest(key);
+ let digest = digest.as_bytes();
+ for (i, c) in digest.iter().enumerate() {
+ if *c != b'0' {
+ return i as u8;
+ }
+ }
+ digest.len() as u8
+}
+
+// # python code to generate test cases
+// import hashlib
+// seed = b"asdf"
+// while True:
+// out = hashlib.sha256(seed).hexdigest()
+// if out.startswith("00"):
+// print(f"{seed} -> {out}")
+// seed = b"app.bsky.feed.post/" + out.encode('utf8')[:12]
+
+#[test]
+fn test_leading_zeros() {
+ assert_eq!(leading_zeros(""), 0);
+ assert_eq!(leading_zeros("asdf"), 0);
+ assert_eq!(leading_zeros("2653ae71"), 0);
+ assert_eq!(leading_zeros("88bfafc7"), 1);
+ assert_eq!(leading_zeros("2a92d355"), 2);
+ assert_eq!(leading_zeros("884976f5"), 3);
+ assert_eq!(leading_zeros("app.bsky.feed.post/454397e440ec"), 2);
+ assert_eq!(leading_zeros("app.bsky.feed.post/9adeb165882c"), 4);
+}
+
+pub fn generate_mst(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ map: &BTreeMap<String, Cid>,
+) -> Result<Cid> {
+ // construct a "WIP" tree
+ let mut root: Option<WipNode> = None;
+ for (key, val) in map {
+ let height = leading_zeros(key);
+ let entry = WipEntry {
+ height,
+ key: key.clone(),
+ val: *val,
+ right: None,
+ };
+ if let Some(node) = root {
+ root = Some(insert_entry(node, entry));
+ } else {
+ root = Some(WipNode {
+ height: entry.height,
+ left: None,
+ entries: vec![entry],
+ });
+ }
+ }
+ let empty_node = WipNode {
+ height: 0,
+ left: None,
+ entries: vec![],
+ };
+ serialize_wip_tree(db, root.unwrap_or(empty_node))
+}
+
+// this routine assumes that entries are added in sorted key order. AKA, the `entry` being added is
+// "further right" in the tree than any existing entries
+fn insert_entry(mut node: WipNode, entry: WipEntry) -> WipNode {
+ // if we are higher on tree than existing node, replace it (recursively) with new layers first
+ while entry.height > node.height {
+ node = WipNode {
+ height: node.height + 1,
+ left: Some(Box::new(node)),
+ entries: vec![],
+ }
+ }
+ // if we are lower on tree, then need to descend first
+ if entry.height < node.height {
+ // if no entries at this node, then we should insert down "left" (which is just "down", not
+ // "before" any entries)
+ if node.entries.is_empty() {
+ if let Some(left) = node.left {
+ node.left = Some(Box::new(insert_entry(*left, entry)));
+ return node;
+ } else {
+ panic!("hit existing totally empty MST node");
+ }
+ }
+ let mut last = node.entries.pop().expect("hit empty existing entry list");
+ assert!(entry.key > last.key);
+ if last.right.is_some() {
+ last.right = Some(Box::new(insert_entry(*last.right.unwrap(), entry)));
+ } else {
+ let mut new_node = WipNode {
+ height: entry.height,
+ left: None,
+ entries: vec![entry],
+ };
+ // may need to (recursively) insert multiple filler layers
+ while new_node.height + 1 < node.height {
+ new_node = WipNode {
+ height: new_node.height + 1,
+ left: Some(Box::new(new_node)),
+ entries: vec![],
+ }
+ }
+ last.right = Some(Box::new(new_node));
+ }
+ node.entries.push(last);
+ return node;
+ }
+ // same height, simply append to end (but verify first)
+ assert!(node.height == entry.height);
+ if !node.entries.is_empty() {
+ let last = &node.entries.last().unwrap();
+ assert!(entry.key > last.key);
+ }
+ node.entries.push(entry);
+ node
+}
+
+/// returns the length of common characters between the two strings. Strings must be simple ASCII,
+/// which should hold for current ATP MST keys (collection plus TID)
+fn common_prefix_len(a: &str, b: &str) -> usize {
+ let a = a.as_bytes();
+ let b = b.as_bytes();
+ for i in 0..std::cmp::min(a.len(), b.len()) {
+ if a[i] != b[i] {
+ return i;
+ }
+ }
+ // strings are the same, up to common length
+ std::cmp::min(a.len(), b.len())
+}
+
+#[test]
+fn test_common_prefix_len() {
+ assert_eq!(common_prefix_len("abc", "abc"), 3);
+ assert_eq!(common_prefix_len("", "abc"), 0);
+ assert_eq!(common_prefix_len("abc", ""), 0);
+ assert_eq!(common_prefix_len("ab", "abc"), 2);
+ assert_eq!(common_prefix_len("abc", "ab"), 2);
+ assert_eq!(common_prefix_len("abcde", "abc"), 3);
+ assert_eq!(common_prefix_len("abc", "abcde"), 3);
+ assert_eq!(common_prefix_len("abcde", "abc1"), 3);
+ assert_eq!(common_prefix_len("abcde", "abb"), 2);
+ assert_eq!(common_prefix_len("abcde", "qbb"), 0);
+ assert_eq!(common_prefix_len("abc", "abc\x00"), 3);
+ assert_eq!(common_prefix_len("abc\x00", "abc"), 3);
+}
+
+#[test]
+fn test_common_prefix_len_wide() {
+ // TODO: these are not cross-language consistent!
+ assert_eq!("jalapeño".len(), 9); // 8 in javascript
+ assert_eq!("💩".len(), 4); // 2 in javascript
+ assert_eq!("👩‍👧‍👧".len(), 18); // 8 in javascript
+
+ // many of the below are different in JS; in Rust we *must* cast down to bytes to count
+ assert_eq!(common_prefix_len("jalapeño", "jalapeno"), 6);
+ assert_eq!(common_prefix_len("jalapeñoA", "jalapeñoB"), 9);
+ assert_eq!(common_prefix_len("coöperative", "coüperative"), 3);
+ assert_eq!(common_prefix_len("abc💩abc", "abcabc"), 3);
+ assert_eq!(common_prefix_len("💩abc", "💩ab"), 6);
+ assert_eq!(common_prefix_len("abc👩‍👦‍👦de", "abc👩‍👧‍👧de"), 13);
+}
+
+fn serialize_wip_tree(
+ db: &mut BlockStore<libipld::DefaultParams>,
+ wip_node: WipNode,
+) -> Result<Cid> {
+ let left: Option<Cid> = if let Some(left) = wip_node.left {
+ Some(serialize_wip_tree(db, *left)?)
+ } else {
+ None
+ };
+
+ let mut entries: Vec<MstEntry> = vec![];
+ let mut last_key = "".to_string();
+ for wip_entry in wip_node.entries {
+ let right: Option<Cid> = if let Some(right) = wip_entry.right {
+ Some(serialize_wip_tree(db, *right)?)
+ } else {
+ None
+ };
+ let prefix_len = common_prefix_len(&last_key, &wip_entry.key);
+ entries.push(MstEntry {
+ k: wip_entry.key[prefix_len..].to_string(),
+ p: prefix_len as u32,
+ v: wip_entry.val,
+ t: right,
+ });
+ last_key = wip_entry.key;
+ }
+ let mst_node = MstNode {
+ l: left,
+ e: entries,
+ };
+ let block = Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, &mst_node)?;
+ let cid = *block.cid();
+ db.put_block(block, None)?;
+ Ok(cid)
+}
+
+#[test]
+fn test_mst_node_cbor() {
+ use std::str::FromStr;
+ let cid1 =
+ Cid::from_str("bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454").unwrap();
+ let node = MstNode {
+ l: None,
+ e: vec![MstEntry {
+ k: "asdf".to_string(),
+ p: 0,
+ v: cid1,
+ t: None,
+ }],
+ };
+ let block = Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, &node).unwrap();
+ println!("{:?}", block);
+ //assert_eq!(1, 2);
+ let cid = *block.cid();
+ assert_eq!(
+ cid.to_string(),
+ "bafyreidaftbr35xhh4lzmv5jcoeufqjh75ohzmz6u56v7n2ippbtxdgqqe"
+ );
+}
diff --git a/adenosine/src/repo.rs b/adenosine/src/repo.rs
new file mode 100644
index 0000000..2383e52
--- /dev/null
+++ b/adenosine/src/repo.rs
@@ -0,0 +1,466 @@
+use crate::car::{
+ load_car_bytes_to_blockstore, load_car_path_to_blockstore, read_car_bytes_from_blockstore,
+};
+use crate::crypto::KeyPair;
+use crate::identifiers::{Did, Nsid, Tid};
+use crate::mst::{collect_mst_keys, generate_mst, CommitNode, MetadataNode, RootNode};
+use anyhow::{anyhow, ensure, Context, Result};
+use ipfs_sqlite_block_store::BlockStore;
+use libipld::cbor::DagCborCodec;
+use libipld::multihash::Code;
+use libipld::prelude::Codec;
+use libipld::store::DefaultParams;
+use libipld::{Block, Cid, Ipld};
+use serde_json::{json, Value};
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+use std::collections::HashSet;
+use std::path::PathBuf;
+use std::str::FromStr;
+
+#[derive(Debug, serde::Serialize)]
+pub struct RepoCommit {
+ pub sig: Box<[u8]>,
+ pub commit_cid: Cid,
+ pub root_cid: Cid,
+ pub did: Did,
+ pub prev: Option<Cid>,
+ pub meta_cid: Cid,
+ pub mst_cid: Cid,
+}
+
+impl RepoCommit {
+ /// Returns a JSON object version of this struct, with CIDs and signatures in expected format
+ /// (aka, CID as a string, not an array of bytes).
+ pub fn to_pretty_json(&self) -> Value {
+ json!({
+ "sig": data_encoding::HEXUPPER.encode(&self.sig),
+ "commit_cid": self.commit_cid.to_string(),
+ "root_cid": self.root_cid.to_string(),
+ "did": self.did.to_string(),
+ "prev": self.prev.map(|v| v.to_string()),
+ "meta_cid": self.meta_cid.to_string(),
+ "mst_cid": self.mst_cid.to_string(),
+ })
+ }
+}
+
+pub struct RepoStore {
+ // TODO: only public for test/debug; should wrap instead
+ pub db: BlockStore<libipld::DefaultParams>,
+}
+
+pub enum Mutation {
+ Create(Nsid, Tid, Ipld),
+ Update(Nsid, Tid, Ipld),
+ Delete(Nsid, Tid),
+}
+
+impl RepoStore {
+ pub fn open(db_path: &PathBuf) -> Result<Self> {
+ Ok(RepoStore {
+ db: BlockStore::open(db_path, Default::default())?,
+ })
+ }
+
+ pub fn open_ephemeral() -> Result<Self> {
+ Ok(RepoStore {
+ db: BlockStore::open_path(ipfs_sqlite_block_store::DbPath::Memory, Default::default())?,
+ })
+ }
+
+ pub fn new_connection(&mut self) -> Result<Self> {
+ Ok(RepoStore {
+ db: self.db.additional_connection()?,
+ })
+ }
+
+ pub fn get_ipld(&mut self, cid: &Cid) -> Result<Ipld> {
+ if let Some(b) = self.db.get_block(cid)? {
+ let block: Block<DefaultParams> = Block::new(*cid, b)?;
+ block.ipld()
+ } else {
+ Err(anyhow!("missing IPLD CID: {}", cid))
+ }
+ }
+
+ pub fn get_blob(&mut self, cid: &Cid) -> Result<Option<Vec<u8>>> {
+ Ok(self.db.get_block(cid)?)
+ }
+
+ /// Returns CID that was inserted
+ pub fn put_ipld<S: libipld::codec::Encode<DagCborCodec>>(&mut self, record: &S) -> Result<Cid> {
+ let block = Block::<DefaultParams>::encode(DagCborCodec, Code::Sha2_256, record)?;
+ let cid = *block.cid();
+ self.db
+ .put_block(block, None)
+ .context("writing IPLD DAG-CBOR record to blockstore")?;
+ Ok(cid)
+ }
+
+ /// Returns CID that was inserted
+ pub fn put_blob(&mut self, data: &[u8]) -> Result<Cid> {
+ let block = Block::<DefaultParams>::encode(libipld::raw::RawCodec, Code::Sha2_256, data)?;
+ let cid = *block.cid();
+ self.db
+ .put_block(block, None)
+ .context("writing non-record blob to blockstore")?;
+ Ok(cid)
+ }
+
+ /// Quick alias lookup
+ pub fn lookup_commit(&mut self, did: &Did) -> Result<Option<Cid>> {
+ Ok(self.db.resolve(Cow::from(did.as_bytes()))?)
+ }
+
+ pub fn get_commit(&mut self, commit_cid: &Cid) -> Result<RepoCommit> {
+ // read records by CID: commit, root, meta
+ let commit_node: CommitNode = DagCborCodec
+ .decode(
+ &self
+ .db
+ .get_block(commit_cid)?
+ .ok_or(anyhow!("expected commit block in store"))?,
+ )
+ .context("parsing commit IPLD node from blockstore")?;
+ let root_node: RootNode = DagCborCodec
+ .decode(
+ &self
+ .db
+ .get_block(&commit_node.root)?
+ .ok_or(anyhow!("expected root block in store"))?,
+ )
+ .context("parsing root IPLD node from blockstore")?;
+ let metadata_node: MetadataNode = DagCborCodec
+ .decode(
+ &self
+ .db
+ .get_block(&root_node.meta)?
+ .ok_or(anyhow!("expected metadata block in store"))?,
+ )
+ .context("parsing metadata IPLD node from blockstore")?;
+ ensure!(
+ metadata_node.datastore == "mst",
+ "unexpected repo metadata.datastore: {}",
+ metadata_node.datastore
+ );
+ ensure!(
+ metadata_node.version == 1,
+ "unexpected repo metadata.version: {}",
+ metadata_node.version
+ );
+ Ok(RepoCommit {
+ sig: commit_node.sig,
+ commit_cid: *commit_cid,
+ root_cid: commit_node.root,
+ meta_cid: root_node.meta,
+ did: Did::from_str(&metadata_node.did)?,
+ prev: root_node.prev,
+ mst_cid: root_node.data,
+ })
+ }
+
+ pub fn get_mst_record_by_key(&mut self, mst_cid: &Cid, key: &str) -> Result<Option<Ipld>> {
+ let map = self.mst_to_map(mst_cid)?;
+ if let Some(cid) = map.get(key) {
+ self.get_ipld(cid).map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+
+ pub fn collections(&mut self, did: &Did) -> Result<Vec<String>> {
+ let commit = if let Some(c) = self.lookup_commit(did)? {
+ self.get_commit(&c)?
+ } else {
+ return Err(anyhow!("DID not found in repositories: {}", did));
+ };
+ let map = self.mst_to_map(&commit.mst_cid)?;
+ let mut collections: HashSet<String> = Default::default();
+ // XXX: confirm that keys actually start with leading slash
+ for k in map.keys() {
+ let coll = k.split('/').nth(1).unwrap();
+ collections.insert(coll.to_string());
+ }
+ Ok(collections.into_iter().collect())
+ }
+
+ pub fn get_atp_record(
+ &mut self,
+ did: &Did,
+ collection: &Nsid,
+ tid: &Tid,
+ ) -> Result<Option<Ipld>> {
+ let commit = if let Some(c) = self.lookup_commit(did)? {
+ self.get_commit(&c)?
+ } else {
+ return Ok(None);
+ };
+ let record_key = format!("/{}/{}", collection, tid);
+ self.get_mst_record_by_key(&commit.mst_cid, &record_key)
+ }
+
+ pub fn write_metadata(&mut self, did: &Did) -> Result<Cid> {
+ self.put_ipld(&MetadataNode {
+ datastore: "mst".to_string(),
+ did: did.to_string(),
+ version: 1,
+ })
+ }
+
+ pub fn write_root(&mut self, meta_cid: Cid, prev: Option<Cid>, mst_cid: Cid) -> Result<Cid> {
+ self.put_ipld(&RootNode {
+ auth_token: None,
+ prev,
+ meta: meta_cid,
+ data: mst_cid,
+ })
+ }
+
+ pub fn write_commit(&mut self, did: &Did, root_cid: Cid, sig: &str) -> Result<Cid> {
+ let commit_cid = self.put_ipld(&CommitNode {
+ root: root_cid,
+ sig: sig.as_bytes().to_vec().into_boxed_slice(),
+ })?;
+ self.db.alias(did.as_bytes().to_vec(), Some(&commit_cid))?;
+ Ok(commit_cid)
+ }
+
+ pub fn mst_from_map(&mut self, map: &BTreeMap<String, Cid>) -> Result<Cid> {
+ let mst_cid = generate_mst(&mut self.db, map)?;
+ Ok(mst_cid)
+ }
+
+ pub fn mst_to_map(&mut self, mst_cid: &Cid) -> Result<BTreeMap<String, Cid>> {
+ let mut cid_map: BTreeMap<String, Cid> = Default::default();
+ collect_mst_keys(&mut self.db, mst_cid, &mut cid_map)
+ .context("reading repo MST from blockstore")?;
+ Ok(cid_map)
+ }
+
+ pub fn update_mst(&mut self, mst_cid: &Cid, mutations: &[Mutation]) -> Result<Cid> {
+ let mut cid_map = self.mst_to_map(mst_cid)?;
+ for m in mutations.iter() {
+ match m {
+ Mutation::Create(collection, tid, val) => {
+ let cid = self.put_ipld(val)?;
+ cid_map.insert(format!("/{}/{}", collection, tid), cid);
+ }
+ Mutation::Update(collection, tid, val) => {
+ let cid = self.put_ipld(val)?;
+ cid_map.insert(format!("/{}/{}", collection, tid), cid);
+ }
+ Mutation::Delete(collection, tid) => {
+ cid_map.remove(&format!("/{}/{}", collection, tid));
+ }
+ }
+ }
+ let mst_cid = generate_mst(&mut self.db, &cid_map)?;
+ Ok(mst_cid)
+ }
+
+ /// High-level helper to write a batch of mutations to the repo corresponding to the DID, and
+ /// signing the resulting new root CID with the given keypair.
+ pub fn mutate_repo(
+ &mut self,
+ did: &Did,
+ mutations: &[Mutation],
+ signing_key: &KeyPair,
+ ) -> Result<Cid> {
+ let commit_cid = self.lookup_commit(did)?.unwrap();
+ let last_commit = self.get_commit(&commit_cid)?;
+ let new_mst_cid = self
+ .update_mst(&last_commit.mst_cid, mutations)
+ .context("updating MST in repo")?;
+ let new_root_cid = self.write_root(
+ last_commit.meta_cid,
+ Some(last_commit.commit_cid),
+ new_mst_cid,
+ )?;
+ // TODO: is this how signatures are supposed to work?
+ let sig = signing_key.sign_bytes(new_root_cid.to_string().as_bytes());
+ self.write_commit(did, new_root_cid, &sig)
+ }
+
+ /// Reads in a full MST tree starting at a repo commit, then re-builds and re-writes the tree
+ /// in to the repo, and verifies that both the MST root CIDs and the repo root CIDs are identical.
+ pub fn verify_repo_mst(&mut self, commit_cid: &Cid) -> Result<()> {
+ // load existing commit and MST tree
+ let existing_commit = self.get_commit(commit_cid)?;
+ let repo_map = self.mst_to_map(&existing_commit.mst_cid)?;
+
+ // write MST tree, and verify root CID
+ let new_mst_cid = self.mst_from_map(&repo_map)?;
+ if new_mst_cid != existing_commit.mst_cid {
+ Err(anyhow!(
+ "MST root CID did not verify: {} != {}",
+ existing_commit.mst_cid,
+ new_mst_cid
+ ))?;
+ }
+
+ let new_root_cid =
+ self.write_root(existing_commit.meta_cid, existing_commit.prev, new_mst_cid)?;
+ if new_root_cid != existing_commit.root_cid {
+ Err(anyhow!(
+ "repo root CID did not verify: {} != {}",
+ existing_commit.root_cid,
+ new_root_cid
+ ))?;
+ }
+
+ Ok(())
+ }
+
+ /// Import blocks from a CAR file in memory, optionally setting an alias pointing to the input
+ /// (eg, a DID identifier).
+ ///
+ /// Does not currently do any validation of, eg, signatures. It is naive and incomplete to use
+ /// this to simply import CAR content from users, remote servers, etc.
+ ///
+ /// Returns the root commit from the CAR file, which may or may not actually be a "commit"
+ /// block.
+ pub fn import_car_bytes(&mut self, car_bytes: &[u8], alias: Option<String>) -> Result<Cid> {
+ let cid = load_car_bytes_to_blockstore(&mut self.db, car_bytes)?;
+ self.verify_repo_mst(&cid)?;
+ if let Some(alias) = alias {
+ self.db.alias(alias.as_bytes().to_vec(), Some(&cid))?;
+ }
+ Ok(cid)
+ }
+
+ /// Similar to import_car_bytes(), but reads from a local file on disk instead of from memory.
+ pub fn import_car_path(&mut self, car_path: &PathBuf, alias: Option<String>) -> Result<Cid> {
+ let cid = load_car_path_to_blockstore(&mut self.db, car_path)?;
+ self.verify_repo_mst(&cid)?;
+ if let Some(alias) = alias {
+ self.db.alias(alias.as_bytes().to_vec(), Some(&cid))?;
+ }
+ Ok(cid)
+ }
+
+ /// Exports in CAR format to a Writer
+ ///
+ /// The "from" commit CID feature is not implemented.
+ pub fn export_car(
+ &mut self,
+ commit_cid: &Cid,
+ _from_commit_cid: Option<&Cid>,
+ ) -> Result<Vec<u8>> {
+ // TODO: from_commit_cid
+ read_car_bytes_from_blockstore(&mut self.db, commit_cid)
+ }
+}
+
+#[test]
+fn test_repo_mst() {
+ use libipld::ipld;
+
+ let mut repo = RepoStore::open_ephemeral().unwrap();
+ let did = Did::from_str("did:plc:dummy").unwrap();
+
+ // basic blob and IPLD record put/get
+ let blob = b"beware the swamp thing";
+ let blob_cid = repo.put_blob(blob).unwrap();
+
+ let record = ipld!({"some-thing": 123});
+ let record_cid = repo.put_ipld(&record).unwrap();
+
+ repo.get_blob(&blob_cid).unwrap().unwrap();
+ repo.get_ipld(&record_cid).unwrap();
+
+ // basic MST get/put
+ let mut map: BTreeMap<String, Cid> = Default::default();
+ let empty_map_cid = repo.mst_from_map(&map).unwrap();
+ assert_eq!(map, repo.mst_to_map(&empty_map_cid).unwrap());
+ assert!(repo
+ .get_mst_record_by_key(&empty_map_cid, "/test.records/44444444444444")
+ .unwrap()
+ .is_none());
+
+ map.insert("/blobs/1".to_string(), blob_cid.clone());
+ map.insert("/blobs/2".to_string(), blob_cid.clone());
+ map.insert(
+ "/test.records/44444444444444".to_string(),
+ record_cid.clone(),
+ );
+ map.insert(
+ "/test.records/22222222222222".to_string(),
+ record_cid.clone(),
+ );
+ let simple_map_cid = repo.mst_from_map(&map).unwrap();
+ assert_eq!(map, repo.mst_to_map(&simple_map_cid).unwrap());
+
+ // create root and commit IPLD nodes
+ let meta_cid = repo.write_metadata(&did).unwrap();
+ let simple_root_cid = repo.write_root(meta_cid, None, simple_map_cid).unwrap();
+ let simple_commit_cid = repo
+ .write_commit(&did, simple_root_cid, "dummy-sig")
+ .unwrap();
+ assert_eq!(
+ Some(record.clone()),
+ repo.get_mst_record_by_key(&simple_map_cid, "/test.records/44444444444444")
+ .unwrap()
+ );
+ assert_eq!(
+ Some(record.clone()),
+ repo.get_atp_record(
+ &did,
+ &Nsid::from_str("test.records").unwrap(),
+ &Tid::from_str("44444444444444").unwrap()
+ )
+ .unwrap()
+ );
+ assert!(repo
+ .get_mst_record_by_key(&simple_map_cid, "/test.records/33333333333333")
+ .unwrap()
+ .is_none());
+ assert!(repo
+ .get_atp_record(
+ &did,
+ &Nsid::from_str("test.records").unwrap(),
+ &Tid::from_str("33333333333333").unwrap()
+ )
+ .unwrap()
+ .is_none());
+ assert_eq!(
+ Some(simple_commit_cid.clone()),
+ repo.lookup_commit(&did).unwrap()
+ );
+
+ map.insert(
+ "/test.records/33333333333333".to_string(),
+ record_cid.clone(),
+ );
+ let simple3_map_cid = repo.mst_from_map(&map).unwrap();
+ let simple3_root_cid = repo
+ .write_root(meta_cid, Some(simple_commit_cid), simple3_map_cid)
+ .unwrap();
+ let simple3_commit_cid = repo
+ .write_commit(&did, simple3_root_cid, "dummy-sig3")
+ .unwrap();
+ assert_eq!(map, repo.mst_to_map(&simple3_map_cid).unwrap());
+ assert_eq!(
+ Some(record.clone()),
+ repo.get_mst_record_by_key(&simple3_map_cid, "/test.records/33333333333333")
+ .unwrap()
+ );
+ assert_eq!(
+ Some(record.clone()),
+ repo.get_atp_record(
+ &did,
+ &Nsid::from_str("test.records").unwrap(),
+ &Tid::from_str("33333333333333").unwrap()
+ )
+ .unwrap()
+ );
+ let commit = repo.get_commit(&simple3_commit_cid).unwrap();
+ assert_eq!(commit.sig.to_vec(), b"dummy-sig3".to_vec());
+ assert_eq!(commit.did, did);
+ assert_eq!(commit.prev, Some(simple_commit_cid));
+ assert_eq!(commit.mst_cid, simple3_map_cid);
+ assert_eq!(
+ Some(simple3_commit_cid.clone()),
+ repo.lookup_commit(&did).unwrap()
+ );
+}
diff --git a/adenosine/src/ucan_p256.rs b/adenosine/src/ucan_p256.rs
new file mode 100644
index 0000000..b8b6cd2
--- /dev/null
+++ b/adenosine/src/ucan_p256.rs
@@ -0,0 +1,85 @@
+/// Implement UCAN KeyMaterial trait for p256
+///
+/// This is needed because the 'ucan-key-support' crate does not include support for this key type.
+use anyhow::{anyhow, Result};
+use async_trait::async_trait;
+
+use p256::ecdsa::signature::{Signer, Verifier};
+use p256::ecdsa::{Signature, SigningKey as P256PrivateKey, VerifyingKey as P256PublicKey};
+
+use ucan::crypto::KeyMaterial;
+
+pub use ucan::crypto::{did::P256_MAGIC_BYTES, JwtSignatureAlgorithm};
+
+#[derive(Clone)]
+pub struct P256KeyMaterial(pub P256PublicKey, pub Option<P256PrivateKey>);
+
+#[cfg_attr(target_arch="wasm32", async_trait(?Send))]
+#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
+impl KeyMaterial for P256KeyMaterial {
+ fn get_jwt_algorithm_name(&self) -> String {
+ JwtSignatureAlgorithm::ES256.to_string()
+ }
+
+ async fn get_did(&self) -> Result<String> {
+ let bytes = [P256_MAGIC_BYTES, &self.0.to_encoded_point(true).to_bytes()].concat();
+ Ok(format!("did:key:z{}", bs58::encode(bytes).into_string()))
+ }
+
+ async fn sign(&self, payload: &[u8]) -> Result<Vec<u8>> {
+ match self.1 {
+ Some(ref private_key) => {
+ let signature = private_key.sign(payload);
+ Ok(signature.to_vec())
+ }
+ None => Err(anyhow!("No private key; cannot sign data")),
+ }
+ }
+
+ async fn verify(&self, payload: &[u8], signature: &[u8]) -> Result<()> {
+ let signature = Signature::try_from(signature)?;
+ self.0
+ .verify(payload, &signature)
+ .map_err(|error| anyhow!("Could not verify signature: {:?}", error))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{P256KeyMaterial, Result, P256_MAGIC_BYTES};
+ use p256::ecdsa::{SigningKey as P256PrivateKey, VerifyingKey as P256PublicKey};
+ use ucan::{
+ builder::UcanBuilder,
+ crypto::{did::DidParser, KeyMaterial},
+ ucan::Ucan,
+ };
+
+ pub fn bytes_to_p256_key(bytes: Vec<u8>) -> Result<Box<dyn KeyMaterial>> {
+ let public_key = P256PublicKey::try_from(bytes.as_slice())?;
+ Ok(Box::new(P256KeyMaterial(public_key, None)))
+ }
+
+ #[cfg_attr(not(target_arch = "wasm32"), tokio::test)]
+ async fn it_can_sign_and_verify_a_ucan() {
+ let private_key = P256PrivateKey::random(&mut p256::elliptic_curve::rand_core::OsRng);
+ let public_key = P256PublicKey::from(&private_key);
+
+ let key_material = P256KeyMaterial(public_key, Some(private_key));
+ let token_string = UcanBuilder::default()
+ .issued_by(&key_material)
+ .for_audience(key_material.get_did().await.unwrap().as_str())
+ .with_lifetime(60)
+ .build()
+ .unwrap()
+ .sign()
+ .await
+ .unwrap()
+ .encode()
+ .unwrap();
+
+ let mut did_parser = DidParser::new(&[(P256_MAGIC_BYTES, bytes_to_p256_key)]);
+
+ let ucan = Ucan::try_from_token_string(&token_string).unwrap();
+ ucan.check_signature(&mut did_parser).await.unwrap();
+ }
+}
diff --git a/adenosine/src/vendored.rs b/adenosine/src/vendored.rs
new file mode 100644
index 0000000..74584ad
--- /dev/null
+++ b/adenosine/src/vendored.rs
@@ -0,0 +1 @@
+pub mod iroh_car;
diff --git a/adenosine/src/vendored/iroh_car/README.md b/adenosine/src/vendored/iroh_car/README.md
new file mode 100644
index 0000000..0cad81b
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/README.md
@@ -0,0 +1,27 @@
+# iroh-car
+
+[CAR file](https://ipld.io/specs/transport/car/) support for iroh. "CAR" stands
+for Content Addressable aRchives. A CAR file typically contains a serialized
+representation of an [IPLD
+DAG](https://docs.ipfs.tech/concepts/merkle-dag/#merkle-directed-acyclic-graphs-dags),
+though is general enough to contain arbitrary IPLD blocks.
+
+Currently supports only [v1](https://ipld.io/specs/transport/car/carv1/).
+
+It is part of [iroh](https://github.com/n0-computer/iroh).
+
+## License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br/>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
+
diff --git a/adenosine/src/vendored/iroh_car/error.rs b/adenosine/src/vendored/iroh_car/error.rs
new file mode 100644
index 0000000..1edcefe
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/error.rs
@@ -0,0 +1,29 @@
+use libipld::cid;
+use thiserror::Error;
+
+/// Car utility error
+#[derive(Debug, Error)]
+pub enum Error {
+ #[error("Failed to parse CAR file: {0}")]
+ Parsing(String),
+ #[error("Invalid CAR file: {0}")]
+ InvalidFile(String),
+ #[error("Io error: {0}")]
+ Io(#[from] std::io::Error),
+ #[error("Cbor encoding error: {0}")]
+ Cbor(#[from] libipld::error::Error),
+ #[error("ld read too large {0}")]
+ LdReadTooLarge(usize),
+}
+
+impl From<cid::Error> for Error {
+ fn from(err: cid::Error) -> Error {
+ Error::Parsing(err.to_string())
+ }
+}
+
+impl From<cid::multihash::Error> for Error {
+ fn from(err: cid::multihash::Error) -> Error {
+ Error::Parsing(err.to_string())
+ }
+}
diff --git a/adenosine/src/vendored/iroh_car/header.rs b/adenosine/src/vendored/iroh_car/header.rs
new file mode 100644
index 0000000..cd0feb7
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/header.rs
@@ -0,0 +1,107 @@
+#![allow(unused)]
+
+use libipld::cbor::DagCborCodec;
+use libipld::codec::Codec;
+use libipld::ipld;
+use libipld::Cid;
+
+use super::error::Error;
+
+/// A car header.
+#[derive(Debug, Clone, PartialEq, Eq)]
+#[non_exhaustive]
+pub enum CarHeader {
+ V1(CarHeaderV1),
+}
+
+impl CarHeader {
+ pub fn new_v1(roots: Vec<Cid>) -> Self {
+ Self::V1(roots.into())
+ }
+
+ pub fn decode(buffer: &[u8]) -> Result<Self, Error> {
+ let header: CarHeaderV1 = DagCborCodec
+ .decode(buffer)
+ .map_err(|e| Error::Parsing(e.to_string()))?;
+
+ if header.roots.is_empty() {
+ return Err(Error::Parsing("empty CAR file".to_owned()));
+ }
+
+ if header.version != 1 {
+ return Err(Error::InvalidFile(
+ "Only CAR file version 1 is supported".to_string(),
+ ));
+ }
+
+ Ok(CarHeader::V1(header))
+ }
+
+ pub fn encode(&self) -> Result<Vec<u8>, Error> {
+ match self {
+ CarHeader::V1(ref header) => {
+ let res = DagCborCodec.encode(header)?;
+ Ok(res)
+ }
+ }
+ }
+
+ pub fn roots(&self) -> &[Cid] {
+ match self {
+ CarHeader::V1(header) => &header.roots,
+ }
+ }
+
+ pub fn version(&self) -> u64 {
+ match self {
+ CarHeader::V1(_) => 1,
+ }
+ }
+}
+
+/// CAR file header version 1.
+#[derive(Debug, Clone, Default, libipld::DagCbor, PartialEq, Eq)]
+pub struct CarHeaderV1 {
+ #[ipld]
+ pub roots: Vec<Cid>,
+ #[ipld]
+ pub version: u64,
+}
+
+impl CarHeaderV1 {
+ /// Creates a new CAR file header
+ pub fn new(roots: Vec<Cid>, version: u64) -> Self {
+ Self { roots, version }
+ }
+}
+
+impl From<Vec<Cid>> for CarHeaderV1 {
+ fn from(roots: Vec<Cid>) -> Self {
+ Self { roots, version: 1 }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use libipld::cbor::DagCborCodec;
+ use libipld::codec::{Decode, Encode};
+ use multihash::MultihashDigest;
+
+ use super::*;
+
+ #[test]
+ fn symmetric_header_v1() {
+ let digest = multihash::Code::Blake2b256.digest(b"test");
+ let cid = Cid::new_v1(DagCborCodec.into(), digest);
+
+ let header = CarHeaderV1::from(vec![cid]);
+
+ let mut bytes = Vec::new();
+ header.encode(DagCborCodec, &mut bytes).unwrap();
+
+ assert_eq!(
+ CarHeaderV1::decode(DagCborCodec, &mut std::io::Cursor::new(&bytes)).unwrap(),
+ header
+ );
+ }
+}
diff --git a/adenosine/src/vendored/iroh_car/lib.rs b/adenosine/src/vendored/iroh_car/lib.rs
new file mode 100644
index 0000000..d4e5f66
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/lib.rs
@@ -0,0 +1,11 @@
+//! Implementation of the [car](https://ipld.io/specs/transport/car/) format.
+
+mod error;
+mod header;
+mod reader;
+mod util;
+mod writer;
+
+pub use crate::header::CarHeader;
+pub use crate::reader::CarReader;
+pub use crate::writer::CarWriter;
diff --git a/adenosine/src/vendored/iroh_car/mod.rs b/adenosine/src/vendored/iroh_car/mod.rs
new file mode 100644
index 0000000..b40e046
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/mod.rs
@@ -0,0 +1,10 @@
+/// Module version of lib.rs
+mod error;
+mod header;
+mod reader;
+mod util;
+mod writer;
+
+pub use header::CarHeader;
+pub use reader::CarReader;
+pub use writer::CarWriter;
diff --git a/adenosine/src/vendored/iroh_car/reader.rs b/adenosine/src/vendored/iroh_car/reader.rs
new file mode 100644
index 0000000..90313f5
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/reader.rs
@@ -0,0 +1,101 @@
+#![allow(unused)]
+
+use futures::Stream;
+use libipld::Cid;
+use tokio::io::AsyncRead;
+
+use super::{
+ error::Error,
+ header::CarHeader,
+ util::{ld_read, read_node},
+};
+
+/// Reads CAR files that are in a BufReader
+pub struct CarReader<R> {
+ reader: R,
+ header: CarHeader,
+ buffer: Vec<u8>,
+}
+
+impl<R> CarReader<R>
+where
+ R: AsyncRead + Send + Unpin,
+{
+ /// Creates a new CarReader and parses the CarHeader
+ pub async fn new(mut reader: R) -> Result<Self, Error> {
+ let mut buffer = Vec::new();
+
+ match ld_read(&mut reader, &mut buffer).await? {
+ Some(buf) => {
+ let header = CarHeader::decode(buf)?;
+
+ Ok(CarReader {
+ reader,
+ header,
+ buffer,
+ })
+ }
+ None => Err(Error::Parsing(
+ "failed to parse uvarint for header".to_string(),
+ )),
+ }
+ }
+
+ /// Returns the header of this car file.
+ pub fn header(&self) -> &CarHeader {
+ &self.header
+ }
+
+ /// Returns the next IPLD Block in the buffer
+ pub async fn next_block(&mut self) -> Result<Option<(Cid, Vec<u8>)>, Error> {
+ read_node(&mut self.reader, &mut self.buffer).await
+ }
+
+ pub fn stream(self) -> impl Stream<Item = Result<(Cid, Vec<u8>), Error>> {
+ futures::stream::try_unfold(self, |mut this| async move {
+ let maybe_block = read_node(&mut this.reader, &mut this.buffer).await?;
+ Ok(maybe_block.map(|b| (b, this)))
+ })
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::io::Cursor;
+
+ use futures::TryStreamExt;
+ use libipld::cbor::DagCborCodec;
+ use libipld::Cid;
+ use multihash::MultihashDigest;
+
+ use super::super::{header::CarHeaderV1, writer::CarWriter};
+
+ use super::*;
+
+ #[tokio::test]
+ async fn car_write_read() {
+ let digest_test = multihash::Code::Blake2b256.digest(b"test");
+ let cid_test = Cid::new_v1(DagCborCodec.into(), digest_test);
+
+ let digest_foo = multihash::Code::Blake2b256.digest(b"foo");
+ let cid_foo = Cid::new_v1(DagCborCodec.into(), digest_foo);
+
+ let header = CarHeader::V1(CarHeaderV1::from(vec![cid_foo]));
+
+ let mut buffer = Vec::new();
+ let mut writer = CarWriter::new(header, &mut buffer);
+ writer.write(cid_test, b"test").await.unwrap();
+ writer.write(cid_foo, b"foo").await.unwrap();
+ writer.finish().await.unwrap();
+
+ let reader = Cursor::new(&buffer);
+ let car_reader = CarReader::new(reader).await.unwrap();
+ let files: Vec<_> = car_reader.stream().try_collect().await.unwrap();
+
+ assert_eq!(files.len(), 2);
+ assert_eq!(files[0].0, cid_test);
+ assert_eq!(files[0].1, b"test");
+ assert_eq!(files[1].0, cid_foo);
+ assert_eq!(files[1].1, b"foo");
+ }
+}
diff --git a/adenosine/src/vendored/iroh_car/util.rs b/adenosine/src/vendored/iroh_car/util.rs
new file mode 100644
index 0000000..90435b1
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/util.rs
@@ -0,0 +1,95 @@
+use integer_encoding::VarIntAsyncReader;
+use libipld::Cid;
+use tokio::io::{AsyncRead, AsyncReadExt};
+
+use super::error::Error;
+
+/// Maximum size that is used for single node.
+pub(crate) const MAX_ALLOC: usize = 4 * 1024 * 1024;
+
+pub(crate) async fn ld_read<R>(mut reader: R, buf: &mut Vec<u8>) -> Result<Option<&[u8]>, Error>
+where
+ R: AsyncRead + Send + Unpin,
+{
+ let length: usize = match VarIntAsyncReader::read_varint_async(&mut reader).await {
+ Ok(len) => len,
+ Err(e) => {
+ if e.kind() == std::io::ErrorKind::UnexpectedEof {
+ return Ok(None);
+ }
+ return Err(Error::Parsing(e.to_string()));
+ }
+ };
+
+ if length > MAX_ALLOC {
+ return Err(Error::LdReadTooLarge(length));
+ }
+ if length > buf.len() {
+ buf.resize(length, 0);
+ }
+
+ reader
+ .read_exact(&mut buf[..length])
+ .await
+ .map_err(|e| Error::Parsing(e.to_string()))?;
+
+ Ok(Some(&buf[..length]))
+}
+
+pub(crate) async fn read_node<R>(
+ buf_reader: &mut R,
+ buf: &mut Vec<u8>,
+) -> Result<Option<(Cid, Vec<u8>)>, Error>
+where
+ R: AsyncRead + Send + Unpin,
+{
+ if let Some(buf) = ld_read(buf_reader, buf).await? {
+ let mut cursor = std::io::Cursor::new(buf);
+ let c = Cid::read_bytes(&mut cursor)?;
+ let pos = cursor.position() as usize;
+
+ return Ok(Some((c, buf[pos..].to_vec())));
+ }
+ Ok(None)
+}
+
+#[cfg(test)]
+mod tests {
+ use integer_encoding::VarIntAsyncWriter;
+ use tokio::io::{AsyncWrite, AsyncWriteExt};
+
+ use super::*;
+
+ async fn ld_write<'a, W>(writer: &mut W, bytes: &[u8]) -> Result<(), Error>
+ where
+ W: AsyncWrite + Send + Unpin,
+ {
+ writer.write_varint_async(bytes.len()).await?;
+ writer.write_all(bytes).await?;
+ writer.flush().await?;
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn ld_read_write_good() {
+ let mut buffer = Vec::<u8>::new();
+ ld_write(&mut buffer, b"test bytes").await.unwrap();
+ let reader = std::io::Cursor::new(buffer);
+
+ let mut buffer = vec![1u8; 1024];
+ let read = ld_read(reader, &mut buffer).await.unwrap().unwrap();
+ assert_eq!(read, b"test bytes");
+ }
+
+ #[tokio::test]
+ async fn ld_read_write_fail() {
+ let mut buffer = Vec::<u8>::new();
+ let size = MAX_ALLOC + 1;
+ ld_write(&mut buffer, &vec![2u8; size]).await.unwrap();
+ let reader = std::io::Cursor::new(buffer);
+
+ let mut buffer = vec![1u8; 1024];
+ let read = ld_read(reader, &mut buffer).await;
+ assert!(matches!(read, Err(Error::LdReadTooLarge(_))));
+ }
+}
diff --git a/adenosine/src/vendored/iroh_car/writer.rs b/adenosine/src/vendored/iroh_car/writer.rs
new file mode 100644
index 0000000..b7e25d3
--- /dev/null
+++ b/adenosine/src/vendored/iroh_car/writer.rs
@@ -0,0 +1,73 @@
+#![allow(unused)]
+
+use integer_encoding::VarIntAsyncWriter;
+use libipld::Cid;
+use tokio::io::{AsyncWrite, AsyncWriteExt};
+
+use super::{error::Error, header::CarHeader};
+
+#[derive(Debug)]
+pub struct CarWriter<W> {
+ header: CarHeader,
+ writer: W,
+ cid_buffer: Vec<u8>,
+ is_header_written: bool,
+}
+
+impl<W> CarWriter<W>
+where
+ W: AsyncWrite + Send + Unpin,
+{
+ pub fn new(header: CarHeader, writer: W) -> Self {
+ CarWriter {
+ header,
+ writer,
+ cid_buffer: Vec::new(),
+ is_header_written: false,
+ }
+ }
+
+ /// Writes header and stream of data to writer in Car format.
+ pub async fn write<T>(&mut self, cid: Cid, data: T) -> Result<(), Error>
+ where
+ T: AsRef<[u8]>,
+ {
+ if !self.is_header_written {
+ // Write header bytes
+ let header_bytes = self.header.encode()?;
+ self.writer.write_varint_async(header_bytes.len()).await?;
+ self.writer.write_all(&header_bytes).await?;
+ self.is_header_written = true;
+ }
+
+ // Write the given block.
+ self.cid_buffer.clear();
+ cid.write_bytes(&mut self.cid_buffer).expect("vec write");
+
+ let data = data.as_ref();
+ let len = self.cid_buffer.len() + data.len();
+
+ self.writer.write_varint_async(len).await?;
+ self.writer.write_all(&self.cid_buffer).await?;
+ self.writer.write_all(data).await?;
+
+ Ok(())
+ }
+
+ /// Finishes writing, including flushing and returns the writer.
+ pub async fn finish(mut self) -> Result<W, Error> {
+ self.flush().await?;
+ Ok(self.writer)
+ }
+
+ /// Flushes the underlying writer.
+ pub async fn flush(&mut self) -> Result<(), Error> {
+ self.writer.flush().await?;
+ Ok(())
+ }
+
+ /// Consumes the [`CarWriter`] and returns the underlying writer.
+ pub fn into_inner(self) -> W {
+ self.writer
+ }
+}