Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions data/src/merkle_proof/proof_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,19 @@ impl MerkleProofFold {
pub fn new_leaf(constraint: MinimumPresence, data: Vec<u8>) -> impl Foldable<Self> {
CompressibleMerkleProof::new_leaf(constraint, data)
}

/// Create a blinded proof leaf from a known hash.
///
/// Use this when a subtree was not accessed during proof generation and should be represented
/// only by its hash in the proof.
/// TODO: RV-968 - remove workaround for creating CompressibleMerkleProof, as users can also be
/// in "may-blind" state.
pub fn into_blind(self, hash: Hash) -> CompressibleMerkleProof {
CompressibleMerkleProof {
constraint: MinimumPresence::MayOmit,
tree: MerkleProof::leaf_blind(hash),
}
}
}

impl Fold for MerkleProofFold {
Expand Down
194 changes: 194 additions & 0 deletions durable-storage/src/avl/resolver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,14 @@ use octez_riscv_data::foldable::Fold;
use octez_riscv_data::foldable::Foldable;
use octez_riscv_data::hash::Hash;
use octez_riscv_data::hash::HashFold;
use octez_riscv_data::hash::PartialHash;
use octez_riscv_data::hash::PartialHashFold;
use octez_riscv_data::merkle_proof::Deserialiser;
use octez_riscv_data::merkle_proof::DeserialiserNode;
use octez_riscv_data::merkle_proof::FromProof;
use octez_riscv_data::merkle_proof::Partial;
use octez_riscv_data::merkle_proof::SuspendedResult;
use octez_riscv_data::merkle_proof::proof_tree::MerkleProofFold;
use octez_riscv_data::mode::Mode;
use octez_riscv_data::mode::Normal;
use octez_riscv_data::mode::Prove;
Expand Down Expand Up @@ -450,6 +453,18 @@ pub struct ProveNodeId {
node: LazyNodeId,
}

// TODO RV-895: This method is implemented to satisfy trait-bounds in `tree::upsert`.
// The current implementation around proofs for created nodes during proof-generation
// needs to be fixed.
impl From<Node<ProveTreeId, Prove<'static>>> for ProveNodeId {
fn from(node: Node<ProveTreeId, Prove<'static>>) -> Self {
ProveNodeId {
inner: OnceLock::from(Rc::new(node)),
node: LazyNodeId::from(Hash::hash_bytes(&[])),
}
}
}

impl Foldable<HashFold> for ProveNodeId {
fn fold(&self, builder: HashFold) -> <HashFold as Fold>::Folded {
match self.inner.get() {
Expand All @@ -459,6 +474,15 @@ impl Foldable<HashFold> for ProveNodeId {
}
}

impl Foldable<MerkleProofFold> for ProveNodeId {
fn fold(&self, builder: MerkleProofFold) -> <MerkleProofFold as Fold>::Folded {
match self.inner.get() {
Some(inner) => inner.as_ref().fold(builder),
None => builder.into_blind(Hash::from_foldable(&self.node)),
}
}
}

/// Identifier for a tree resolved in [`Prove`] mode.
///
/// Like [`ProveNodeId`], this wrapper keeps the original lazy identifier and fills `inner` on the
Expand All @@ -470,6 +494,17 @@ pub struct ProveTreeId {
tree: LazyTreeId,
}

// `Node::new` requires `TreeId: Default`. TODO RV-895: fix implementation around proofs
// for created nodes during proof-generation.
impl Default for ProveTreeId {
fn default() -> Self {
ProveTreeId {
inner: OnceLock::from(Tree::default()),
tree: LazyTreeId::default(),
}
}
}

impl Foldable<HashFold> for ProveTreeId {
fn fold(&self, builder: HashFold) -> <HashFold as Fold>::Folded {
match self.inner.get() {
Expand All @@ -479,6 +514,15 @@ impl Foldable<HashFold> for ProveTreeId {
}
}

impl Foldable<MerkleProofFold> for ProveTreeId {
fn fold(&self, builder: MerkleProofFold) -> <MerkleProofFold as Fold>::Folded {
match self.inner.get() {
Some(inner) => inner.fold(builder),
None => builder.into_blind(Hash::from_foldable(&self.tree)),
}
}
}

/// Adapter that projects lazy AVL identifiers into prove-mode values on demand.
///
/// [`ProveResolver`] wraps another resolver for [`LazyNodeId`] and [`LazyTreeId`]. It preserves the
Expand Down Expand Up @@ -586,6 +630,15 @@ impl From<Node<VerifyTreeId, Verify>> for VerifyNodeId {
}
}

impl Foldable<PartialHashFold> for VerifyNodeId {
fn fold(&self, builder: PartialHashFold) -> PartialHash {
match self {
VerifyNodeId::Present(inner) => inner.as_ref().fold(builder),
VerifyNodeId::Blinded(hash) => builder.present(*hash),
}
}
}

impl FromProof for VerifyNodeId {
fn from_proof<Proof: Deserialiser>(proof: Proof) -> SuspendedResult<Proof, Self> {
let ctx = proof.into_node()?;
Expand All @@ -609,6 +662,16 @@ pub enum VerifyTreeId {
Absent,
}

impl Foldable<PartialHashFold> for VerifyTreeId {
fn fold(&self, builder: PartialHashFold) -> PartialHash {
match self {
VerifyTreeId::Present(inner) => inner.fold(builder),
VerifyTreeId::Blinded(hash) => builder.present(*hash),
VerifyTreeId::Absent => builder.previous(),
}
}
}

impl FromProof for VerifyTreeId {
fn from_proof<Proof: Deserialiser>(proof: Proof) -> SuspendedResult<Proof, Self> {
let ctx = proof.into_node()?;
Expand Down Expand Up @@ -689,8 +752,14 @@ mod tests {
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;

use octez_riscv_data::components::atom::AtomMode;
use octez_riscv_data::components::bytes::Bytes;
use octez_riscv_data::components::bytes::BytesMode;
use octez_riscv_data::hash::Hash;
use octez_riscv_data::hash::PartialHash;
use octez_riscv_data::merkle_proof::FromProof;
use octez_riscv_data::merkle_proof::proof_tree::MerkleProof;
use octez_riscv_data::merkle_proof::proof_tree::ProofTree;
use octez_riscv_data::mode::Normal;
use octez_riscv_data::mode::Verify;
use octez_riscv_data::mode::utils::NotFound;
Expand All @@ -717,8 +786,10 @@ mod tests {
use crate::storage::KeyValueStore;
use crate::storage::Storable;
use crate::storage::StoreOptions;
use crate::storage::TestKeyValueStore;
use crate::storage::in_memory::InMemoryKeyValueStore;
use crate::storage::in_memory::InMemoryRepo;
use crate::storage::setup_repo;

/// A wrapper around an in-memory key-value store that counts the number of `blob_get` calls.
#[derive(Debug, Default)]
Expand Down Expand Up @@ -1392,4 +1463,127 @@ mod tests {

assert!(resolved.root().is_some(), "tree should have a root");
}

/// Read `key` from `tree`, assert the contents match `expected`, then overwrite at
/// `offset` with `new_data`.
///
/// Shared across the Prove and Verify halves of
/// [`prove_verify_round_trip_get_and_write`] so both modes execute byte-for-byte
/// identical operations.
// TODO RV-969: rewrite in terms of an `OperationStream`.
fn run_read_then_write<NodeId, TreeId, M, Res>(
tree: &mut Tree<NodeId>,
resolver: &mut Res,
key: &Key,
expected: &[u8],
offset: usize,
new_data: &[u8],
) where
NodeId: Clone + From<Node<TreeId, M>>,
TreeId: Default,
M: BytesMode + AtomMode,
Res: AvlResolver<NodeId, TreeId, M>,
{
let data: &Bytes<M> = tree
.get(key, resolver)
.expect("get should succeed")
.expect("key should exist");
let mut buf = vec![0u8; data.len()];
data.read(0, &mut buf);
assert_eq!(&buf, expected, "read should observe the expected bytes");

tree.write(key, offset, new_data, resolver)
.expect("write should succeed");
}

/// Build a three-node tree, persist it, then run a round-trip through Prove and Verify
/// modes using only read (`get`) and in-place write operations (no structural changes).
///
/// TODO: RV-969: Rewrite operations to use OperationStream.
#[test]
fn prove_verify_round_trip_get_and_write() {
let key1 = Key::new(&[1]).expect("key should be valid");
let key2 = Key::new(&[2]).expect("key should be valid");
let key3 = Key::new(&[3]).expect("key should be valid");

// Shared operation parameters replayed across Prove and Verify modes.
let initial_data: &[u8] = b"beta";
let write_offset: usize = 0;
let overwrite_data: &[u8] = b"BETA";

// ---- Normal: build a three-node tree and persist ----
let mut tree: Tree<ArcNodeId> = Default::default();
let mut resolver = ArcResolver;
tree.set(&key1, b"alpha", &mut resolver)
.expect("set should succeed");
tree.set(&key2, b"beta", &mut resolver)
.expect("set should succeed");
tree.set(&key3, b"gamma", &mut resolver)
.expect("set should succeed");

let root_hash = Hash::from_foldable(tree.root().expect("tree should have a root node"));

let (_keepalive, repo) = setup_repo();
let persistence_layer = Arc::new(
TestKeyValueStore::new(&repo).expect("creating persistence layer should succeed"),
);
persist_tree(&tree, &resolver, persistence_layer.as_ref());

// ---- Prove: load, project, do get + write ----
let lazy_tree: Tree<LazyNodeId> = Some(LazyNodeId::from(root_hash)).into();
let lazy_resolver = LazyResolver::new(persistence_layer);
let mut prove_tree: Tree<ProveNodeId> = lazy_tree.into_proof();
let mut prove_resolver = ProveResolver(lazy_resolver);

// Read key2, then overwrite in place (no structural change).
run_read_then_write(
&mut prove_tree,
&mut prove_resolver,
&key2,
initial_data,
write_offset,
overwrite_data,
);

// Expected final hash (from prove-mode tree after operations)
let expected_hash = prove_tree.hash();

// ---- Generate proof ----
let merkle_proof = MerkleProof::from_foldable(&prove_tree);

// ---- Verify: deserialize proof, replay operations ----
let proof_deser = ProofTree::Present(&merkle_proof);
let verify_tree_id = VerifyTreeId::from_proof(proof_deser)
.expect("proof deserialization should succeed")
.into_result();

let VerifyTreeId::Present(mut verify_tree) = verify_tree_id else {
panic!("expected Present tree from proof, got {verify_tree_id:#?}");
};
let mut verify_resolver = VerifyResolver;

let final_hash = catch_not_found(move || {
Comment thread
emturner marked this conversation as resolved.
// Replay the identical read + write against the verify tree.
run_read_then_write(
&mut verify_tree,
&mut verify_resolver,
&key2,
initial_data,
write_offset,
overwrite_data,
);

// Compute verify hash
let verify_tree_id = VerifyTreeId::Present(verify_tree);
PartialHash::from_foldable(Some(merkle_proof), &verify_tree_id)
.to_hash()
.expect("verify hash should be computable")
})
.expect("verify operations should not trigger not_found");

assert_eq!(
expected_hash, final_hash,
"prove and verify hashes should match after identical get + write operations"
);
}
}
38 changes: 38 additions & 0 deletions durable-storage/src/avl/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,22 @@ use octez_riscv_data::foldable::Foldable;
use octez_riscv_data::foldable::NodeFold;
use octez_riscv_data::hash::Hash;
use octez_riscv_data::hash::HashFold;
use octez_riscv_data::hash::PartialHash;
use octez_riscv_data::hash::PartialHashFold;
use octez_riscv_data::merkle_proof::Deserialiser;
use octez_riscv_data::merkle_proof::DeserialiserNode;
use octez_riscv_data::merkle_proof::FromProof;
use octez_riscv_data::merkle_proof::Partial;
use octez_riscv_data::merkle_proof::proof_tree::MerkleProofFold;
use octez_riscv_data::merkle_proof::proof_tree::MinimumPresence;
use octez_riscv_data::mode::utils::not_found;
use octez_riscv_data::serialisation::deserialise;
use octez_riscv_data::serialisation::serialise;
use perfect_derive::perfect_derive;

use super::node::Node;
use super::resolver::ProveNodeId;
use super::resolver::VerifyNodeId;
use crate::avl::resolver::AvlResolver;
use crate::avl::resolver::LazyNodeId;
use crate::avl::resolver::NodeResolver;
Expand Down Expand Up @@ -364,6 +369,39 @@ impl<NodeId: Foldable<HashFold>> Foldable<HashFold> for Tree<NodeId> {
}
}

impl Foldable<MerkleProofFold> for Tree<ProveNodeId> {
fn fold(&self, builder: MerkleProofFold) -> <MerkleProofFold as Fold>::Folded {
let mut node = builder.into_node_fold();

let present = self.0.is_some();
let bool_data = serialise(present).expect("Serialising a bool should never fail");
let bool_leaf = MerkleProofFold::new_leaf(MinimumPresence::Present, bool_data);
node.add(&bool_leaf);

if let Some(inner) = self.0.as_ref() {
node.add(inner);
}

node.done()
}
}

impl Foldable<PartialHashFold> for Tree<VerifyNodeId> {
fn fold(&self, builder: PartialHashFold) -> PartialHash {
let mut node = builder.into_node_fold();

let present = self.0.is_some();
let bool_hash = Hash::hash_encodable(present).expect("Hashing a bool should never fail");
node.add(&PartialHash::Present(bool_hash));

if let Some(inner) = self.0.as_ref() {
node.add(inner);
}

node.done()
}
}

impl<NodeId: Storable> Storable for Tree<NodeId> {
fn store(
&self,
Expand Down
Loading