Skip to content

Commit 5b93acb

Browse files
Verify proof
1 parent 940ede8 commit 5b93acb

File tree

2 files changed

+203
-16
lines changed

2 files changed

+203
-16
lines changed

contracts/utils/cryptography/TrieProof.sol

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {Bytes} from "../Bytes.sol";
55
import {Strings} from "../Strings.sol";
66
import {RLP} from "../RLP.sol";
77
import {Math} from "../math/Math.sol";
8+
import {Memory} from "../Memory.sol";
89

910
/**
1011
* @dev Library for verifying Ethereum Merkle-Patricia trie inclusion proofs.
@@ -15,6 +16,7 @@ import {Math} from "../math/Math.sol";
1516
library TrieProof {
1617
using Bytes for bytes;
1718
using RLP for *;
19+
using Memory for Memory.Slice;
1820
using Strings for string;
1921

2022
enum Prefix {
@@ -42,7 +44,7 @@ library TrieProof {
4244

4345
struct Node {
4446
bytes encoded; // Raw RLP encoded node
45-
RLP.Item[] decoded; // Decoded RLP items
47+
Memory.Slice[] decoded; // Decoded RLP items
4648
}
4749

4850
/// @dev The radix of the Ethereum trie (hexadecimal = 16)
@@ -127,6 +129,7 @@ library TrieProof {
127129
// Otherwise, continue down the branch specified by the next nibble in the key
128130
uint8 branchKey = uint8(key[keyIndex]);
129131
(nodeId, keyIndex) = (_id(node.decoded[branchKey]), keyIndex + 1);
132+
nodeId = node.decoded[11].readBytes(); // test
130133
} else if (nodeLength == LEAF_OR_EXTENSION_NODE_LENGTH) {
131134
return _processLeafOrExtension(node, trieProof, key, nodeId, keyIndex, i);
132135
}
@@ -142,11 +145,15 @@ library TrieProof {
142145
Node memory node,
143146
uint256 keyIndex
144147
) private pure returns (ProofError) {
145-
if (keyIndex == 0 && !string(bytes.concat(keccak256(node.encoded))).equal(string(nodeId)))
146-
return ProofError.INVALID_ROOT_HASH; // Root node must match root hash
147-
if (node.encoded.length >= 32 && !string(bytes.concat(keccak256(node.encoded))).equal(string(nodeId)))
148-
return ProofError.INVALID_LARGE_INTERNAL_HASH; // Large nodes are stored as hashes
149-
if (!string(node.encoded).equal(string(nodeId))) return ProofError.INVALID_INTERNAL_NODE_HASH; // Small nodes must match directly
148+
if (keyIndex == 0) {
149+
if (!string(bytes.concat(keccak256(node.encoded))).equal(string(nodeId)))
150+
return ProofError.INVALID_ROOT_HASH; // Root node must match root hash
151+
} else if (node.encoded.length >= 32) {
152+
if (!string(bytes.concat(keccak256(node.encoded))).equal(string(nodeId)))
153+
return ProofError.INVALID_LARGE_INTERNAL_HASH; // Large nodes are stored as hashes
154+
} else if (!string(node.encoded).equal(string(nodeId))) {
155+
return ProofError.INVALID_INTERNAL_NODE_HASH; // Small nodes must match directly
156+
}
150157
return ProofError.NO_ERROR; // No error
151158
}
152159

@@ -165,14 +172,10 @@ library TrieProof {
165172
uint256 i
166173
) private pure returns (bytes memory value, ProofError err) {
167174
bytes memory path = _path(node);
168-
uint8 prefix = uint8(path[0]);
175+
uint8 prefix = uint8(path[0] >> 4);
169176
uint8 offset = 2 - (prefix % 2); // Calculate offset based on even/odd path length
170177
bytes memory pathRemainder = Bytes.slice(path, offset); // Path after the prefix
171178
bytes memory keyRemainder = Bytes.slice(key, keyIndex); // Remaining key to match
172-
uint256 sharedNibbleLength = _sharedNibbleLength(pathRemainder, keyRemainder);
173-
174-
// Path must match at least partially with our key
175-
if (sharedNibbleLength == 0) return ("", ProofError.INVALID_PATH_REMAINDER);
176179
if (prefix > uint8(type(Prefix).max)) return ("", ProofError.UNKNOWN_NODE_PREFIX);
177180

178181
// Leaf node (terminal) - return its value if key matches completely
@@ -181,7 +184,12 @@ library TrieProof {
181184
return _validateLastItem(node.decoded[1], trieProof, i);
182185
}
183186

184-
// Extension node (non-terminal) - continue to next node
187+
// Extension node (non-terminal) - validate shared path & continue to next node
188+
uint256 sharedNibbleLength = _sharedNibbleLength(pathRemainder, keyRemainder);
189+
if (Prefix(prefix) == Prefix.EXTENSION_EVEN || Prefix(prefix) == Prefix.EXTENSION_ODD) {
190+
// Path must match at least partially with our key
191+
if (sharedNibbleLength == 0) return ("", ProofError.INVALID_PATH_REMAINDER);
192+
}
185193
// Increment keyIndex by the number of nibbles consumed
186194
(nodeId, keyIndex) = (_id(node.decoded[1]), keyIndex + sharedNibbleLength);
187195
}
@@ -191,7 +199,7 @@ library TrieProof {
191199
* Ensures the value is not empty and no extra proof elements exist.
192200
*/
193201
function _validateLastItem(
194-
RLP.Item memory item,
202+
Memory.Slice item,
195203
Node[] memory trieProof,
196204
uint256 i
197205
) private pure returns (bytes memory value, ProofError) {
@@ -209,16 +217,17 @@ library TrieProof {
209217
uint256 length = proof.length;
210218
proof_ = new Node[](length);
211219
for (uint256 i = 0; i < length; i++) {
212-
proof_[i] = Node(proof[i], proof[i].readList());
220+
proof_[i] = Node(proof[i], proof[i].decodeList());
213221
}
214222
}
215223

216224
/**
217225
* @dev Extracts the node ID (hash or raw data based on size).
218226
* For small nodes (<32 bytes), returns the raw bytes; for large nodes, returns the hash.
219227
*/
220-
function _id(RLP.Item memory node) private pure returns (bytes memory) {
221-
return node.length < 32 ? node.readRawBytes() : node.readBytes();
228+
function _id(Memory.Slice node) private pure returns (bytes memory) {
229+
bytes memory raw = node.readBytes();
230+
return raw.length < 32 ? raw : bytes.concat(keccak256(raw));
222231
}
223232

224233
/**
Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
const { ethers } = require('hardhat');
2+
const { expect } = require('chai');
3+
const { spawn } = require('child_process');
4+
5+
const anvilPort = 8546;
6+
const ProofError = {
7+
NO_ERROR: 0,
8+
EMPTY_KEY: 1,
9+
INDEX_OUT_OF_BOUNDS: 2,
10+
INVALID_ROOT_HASH: 3,
11+
INVALID_LARGE_INTERNAL_HASH: 4,
12+
INVALID_INTERNAL_NODE_HASH: 5,
13+
EMPTY_VALUE: 6,
14+
INVALID_EXTRA_PROOF_ELEMENT: 7,
15+
INVALID_PATH_REMAINDER: 8,
16+
INVALID_KEY_REMAINDER: 9,
17+
UNKNOWN_NODE_PREFIX: 10,
18+
UNPARSEABLE_NODE: 11,
19+
INVALID_PROOF: 12,
20+
};
21+
22+
async function fixture() {
23+
const anvil = spawn('anvil', ['--port', anvilPort], {
24+
timeout: 30000,
25+
}); // Method eth_getProof is not supported with default Hardhat Network
26+
await new Promise(resolve => {
27+
anvil.stdout.once('data', resolve);
28+
});
29+
if (process.env.ANVIL_LOGS === 'true') {
30+
anvil.stdout.on('data', function (data) {
31+
console.log(data.toString());
32+
});
33+
}
34+
const provider = new ethers.JsonRpcProvider(`http://localhost:${anvilPort}`);
35+
const account = await provider.getSigner(0);
36+
const mock = (await ethers.deployContract('$TrieProof', account)).connect(account);
37+
const storage = (await ethers.deployContract('StorageSlotMock', account)).connect(account);
38+
return {
39+
anvil,
40+
provider,
41+
mock,
42+
storage,
43+
};
44+
}
45+
46+
describe('TrieProof', function () {
47+
beforeEach(async function () {
48+
Object.assign(this, await fixture());
49+
});
50+
51+
afterEach(async function () {
52+
this.anvil.kill();
53+
});
54+
55+
describe('verify', function () {
56+
it('returns true for a valid proof with leaf', async function () {
57+
const slot = ethers.ZeroHash;
58+
const tx = await this.storage.setUint256Slot(slot, 42);
59+
const response = await this.provider.send('eth_getProof', [
60+
this.storage.target,
61+
[slot],
62+
ethers.toBeHex(tx.blockNumber),
63+
]);
64+
const { storageHash, storageProof } = response;
65+
const { key, value, proof } = storageProof[0];
66+
const result = await this.mock.$verify(key, value, proof, storageHash);
67+
expect(result).is.true;
68+
});
69+
70+
it('returns true for a valid proof with extension', async function () {
71+
const slot0 = ethers.ZeroHash;
72+
const slot1 = '0x0000000000000000000000000000000000000000000000000000000000000001';
73+
await this.storage.setUint256Slot(slot0, 42);
74+
const tx = await this.storage.setUint256Slot(slot1, 43);
75+
const response = await this.provider.send('eth_getProof', [
76+
this.storage.target,
77+
[slot1],
78+
ethers.toBeHex(tx.blockNumber),
79+
]);
80+
const { storageHash, storageProof } = response;
81+
const { key, value, proof } = storageProof[0];
82+
const result = await this.mock.$verify(key, value, proof, storageHash);
83+
expect(result).is.true;
84+
});
85+
86+
it('fails to process proof with empty key', async function () {
87+
const [value, error] = await this.mock.$processProof('0x', [], ethers.ZeroHash);
88+
expect(value).to.equal('0x');
89+
expect(error).to.equal(ProofError.EMPTY_KEY);
90+
});
91+
92+
it.skip('fails to process proof with key index out of bounds', async function () {}); // TODO: INDEX_OUT_OF_BOUNDS
93+
94+
it('fails to process proof with invalid root hash', async function () {
95+
const slot = ethers.ZeroHash;
96+
const tx = await this.storage.setUint256Slot(slot, 42);
97+
const { storageHash, storageProof } = await this.provider.send('eth_getProof', [
98+
this.storage.target,
99+
[slot],
100+
ethers.toBeHex(tx.blockNumber),
101+
]);
102+
const { key, proof } = storageProof[0];
103+
const [processedValue, error] = await this.mock.$processProof(key, proof, ethers.keccak256(storageHash)); // Corrupt root hash
104+
expect(processedValue).to.equal('0x');
105+
expect(error).to.equal(ProofError.INVALID_ROOT_HASH);
106+
});
107+
108+
it('fails to process proof with invalid internal large hash', async function () {
109+
const slot0 = ethers.ZeroHash;
110+
const slot1 = '0x0000000000000000000000000000000000000000000000000000000000000001';
111+
await this.storage.setUint256Slot(slot0, 42);
112+
const tx = await this.storage.setUint256Slot(slot1, 43);
113+
const { storageHash, storageProof } = await this.provider.send('eth_getProof', [
114+
this.storage.target,
115+
[slot1],
116+
ethers.toBeHex(tx.blockNumber),
117+
]);
118+
const { key, proof } = storageProof[0];
119+
proof[1] = ethers.toBeHex(BigInt(proof[1]) + 1n); // Corrupt internal large node hash
120+
const [processedValue, error] = await this.mock.$processProof(key, proof, storageHash);
121+
expect(processedValue).to.equal('0x');
122+
expect(error).to.equal(ProofError.INVALID_LARGE_INTERNAL_HASH);
123+
});
124+
125+
it.skip('fails to process proof with invalid internal short node', async function () {}); // TODO: INVALID_INTERNAL_NODE_HASH
126+
127+
it('fails to process proof with empty value', async function () {
128+
const proof = [ethers.encodeRlp(['0x20', '0x'])]; // Corrupt proof to yield empty value
129+
const [processedValue, error] = await this.mock.$processProof('0x00', proof, ethers.keccak256(proof[0]));
130+
expect(processedValue).to.equal('0x');
131+
expect(error).to.equal(ProofError.EMPTY_VALUE);
132+
});
133+
134+
it('fails to process proof with invalid extra proof', async function () {
135+
const slot0 = ethers.ZeroHash;
136+
const tx = await this.storage.setUint256Slot(slot0, 42);
137+
const { storageHash, storageProof } = await this.provider.send('eth_getProof', [
138+
this.storage.target,
139+
[slot0],
140+
ethers.toBeHex(tx.blockNumber),
141+
]);
142+
const { key, proof } = storageProof[0];
143+
proof[1] = ethers.encodeRlp([]); // extra proof element
144+
const [processedValue, error] = await this.mock.$processProof(key, proof, storageHash);
145+
expect(processedValue).to.equal('0x');
146+
expect(error).to.equal(ProofError.INVALID_EXTRA_PROOF_ELEMENT);
147+
});
148+
149+
it('fails to process proof with invalid path remainder', async function () {
150+
const proof = [ethers.encodeRlp(['0x0011', '0x'])]; // Corrupt proof to yield invalid path remainder
151+
const [processedValue, error] = await this.mock.$processProof(ethers.ZeroHash, proof, ethers.keccak256(proof[0]));
152+
expect(processedValue).to.equal('0x');
153+
expect(error).to.equal(ProofError.INVALID_PATH_REMAINDER);
154+
});
155+
156+
it.skip('fails to process proof with invalid key remainder', async function () {}); // TODO: INVALID_KEY_REMAINDER
157+
158+
it('fails to process proof with unknown node prefix', async function () {
159+
const proof = [ethers.encodeRlp(['0x40', '0x'])];
160+
const [processedValue, error] = await this.mock.$processProof('0x00', proof, ethers.keccak256(proof[0]));
161+
expect(processedValue).to.equal('0x');
162+
expect(error).to.equal(ProofError.UNKNOWN_NODE_PREFIX);
163+
});
164+
165+
it('fails to process proof with unparsable node', async function () {
166+
const proof = [ethers.encodeRlp(['0x00', '0x00', '0x00'])];
167+
const [processedValue, error] = await this.mock.$processProof('0x00', proof, ethers.keccak256(proof[0]));
168+
expect(processedValue).to.equal('0x');
169+
expect(error).to.equal(ProofError.UNPARSEABLE_NODE);
170+
});
171+
172+
it('fails to process proof with invalid proof', async function () {
173+
const [processedValue, error] = await this.mock.$processProof('0x00', [], ethers.ZeroHash);
174+
expect(processedValue).to.equal('0x');
175+
expect(error).to.equal(ProofError.INVALID_PROOF);
176+
});
177+
});
178+
});

0 commit comments

Comments
 (0)