Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
19626dd
refactor(common/l2): add serialized class newtypes
CHr15F0x Apr 14, 2026
f0c379d
refactor(storage/class): remove dead code
CHr15F0x Apr 14, 2026
8e2f506
refactor: use serialized class newtypes
CHr15F0x Apr 14, 2026
89158a6
refactor: rename SerializedClassDefinition into SerializedOpaqueClass…
CHr15F0x Apr 15, 2026
3a81ae1
chore: fmt
CHr15F0x Apr 15, 2026
0f56f66
fixup: cairo-native build failure, clippy
CHr15F0x Apr 16, 2026
254339b
refactor: derive Dummy
CHr15F0x Apr 16, 2026
b336aa8
fixup: use reinterpreted output from compute_class_hash in fake storage
CHr15F0x Apr 16, 2026
67ac536
fixup: use the hash helper, tests should be blocking
CHr15F0x Apr 19, 2026
3d1af5d
refactor: rename into from_serialized_def
CHr15F0x Apr 19, 2026
8bbab44
revert: removal of experimental libfunc list rationale
CHr15F0x Apr 19, 2026
a128e89
revert: adding try_ prefix
CHr15F0x Apr 19, 2026
8e6074d
chore: fmt
CHr15F0x Apr 19, 2026
f6d25b1
fixup: deserialize directly from slice
CHr15F0x Apr 19, 2026
391958c
refactor: use imports instead of qualified names
CHr15F0x Apr 19, 2026
8c77937
refactor(sync): use existing serialized class types
CHr15F0x Apr 19, 2026
5deb778
fixup! fixup: use the hash helper, tests should be blocking
CHr15F0x Apr 19, 2026
01e390d
fixup! refactor: use imports instead of qualified names
CHr15F0x Apr 19, 2026
4a030fb
refactor(sync): use existing serialized class type
CHr15F0x Apr 19, 2026
137f37d
fixup! refactor: use imports instead of qualified names
CHr15F0x Apr 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
144 changes: 81 additions & 63 deletions crates/class-hash/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@

use anyhow::{Context, Error, Result};
use pathfinder_common::class_definition::EntryPointType::*;
use pathfinder_common::class_definition::{
SerializedCairoDefinition,
SerializedClassDefinition,
SerializedOpaqueClassDefinition,
SerializedSierraDefinition,
};
use pathfinder_common::{felt_bytes, ClassHash};
use pathfinder_crypto::hash::{HashChain, PoseidonHasher};
use pathfinder_crypto::Felt;
Expand All @@ -80,22 +86,47 @@ impl ComputedClassHash {
}
}

/// Computes the starknet class hash for given class definition JSON blob.
/// Consumes an opaque serialized class definition and outputs the computed
/// class hash as well as the definition reinterpreted as either a serialized
/// Cairo or Sierra definition.
///
/// This function first parses the JSON blob to decide if it's a Cairo or Sierra
/// class definition and then calls the appropriate function to compute the
/// class hash with the parsed definition.
pub fn compute_class_hash(contract_definition_dump: &[u8]) -> Result<ComputedClassHash> {
let contract_definition = parse_contract_definition(contract_definition_dump)
pub fn compute_class_hash(
serialized_definition: SerializedOpaqueClassDefinition,
) -> Result<(ComputedClassHash, SerializedClassDefinition)> {
let contract_definition = parse_contract_definition(&serialized_definition)
.context("Failed to parse contract definition")?;

match contract_definition {
json::ContractDefinition::Sierra(definition) => compute_sierra_class_hash(definition)
.map(ComputedClassHash::Sierra)
.context("Compute class hash"),
.context("Compute class hash")
.map(|hash| {
(
hash,
// It is safe to reinterpret the serialized definition as a Sierra definition
// since the parsing step succeeded and confirmed it is a
// Sierra definition.
SerializedClassDefinition::Sierra(SerializedSierraDefinition::from_bytes(
serialized_definition.into_bytes(),
)),
)
}),
json::ContractDefinition::Cairo(definition) => compute_cairo_class_hash(definition.into())
.map(ComputedClassHash::Cairo)
.context("Compute class hash"),
.context("Compute class hash")
.map(|hash| {
(
hash,
// It is safe to reinterpret the serialized definition as a Cairo definition
// since the parsing step succeeded and confirmed it is a Cairo definition.
SerializedClassDefinition::Cairo(SerializedCairoDefinition::from_bytes(
serialized_definition.into_bytes(),
)),
)
}),
}
}

Expand Down Expand Up @@ -132,14 +163,16 @@ pub fn compute_cairo_hinted_class_hash(
///
/// Due to an issue in serde_json we can't use an untagged enum and simply
/// derive a Deserialize implementation: <https://github.com/serde-rs/json/issues/559>
pub fn parse_contract_definition(
contract_definition_dump: &[u8],
fn parse_contract_definition(
serialized_definition: &SerializedOpaqueClassDefinition,
) -> serde_json::Result<json::ContractDefinition<'_>> {
serde_json::from_slice::<json::SierraContractDefinition<'_>>(contract_definition_dump)
serde_json::from_slice::<json::SierraContractDefinition<'_>>(serialized_definition.as_bytes())
.map(json::ContractDefinition::Sierra)
.or_else(|_| {
serde_json::from_slice::<json::CairoContractDefinition<'_>>(contract_definition_dump)
.map(json::ContractDefinition::Cairo)
serde_json::from_slice::<json::CairoContractDefinition<'_>>(
serialized_definition.as_bytes(),
)
.map(json::ContractDefinition::Cairo)
})
}

Expand Down Expand Up @@ -799,17 +832,22 @@ pub mod json {

#[cfg(test)]
mod test_vectors {
use pathfinder_common::class_definition::SerializedOpaqueClassDefinition;
use pathfinder_common::macro_prelude::*;
use starknet_gateway_test_fixtures::class_definitions::*;

use super::super::{compute_class_hash, ComputedClassHash};

#[tokio::test]
async fn first() {
let hash = compute_class_hash(INTEGRATION_TEST).unwrap();
fn hash(data: &[u8]) -> ComputedClassHash {
compute_class_hash(SerializedOpaqueClassDefinition::from_slice(data))
.unwrap()
.0
}

#[test]
fn first() {
assert_eq!(
hash,
hash(INTEGRATION_TEST),
ComputedClassHash::Cairo(class_hash!(
"0x031da92cf5f54bcb81b447e219e2b791b23f3052d12b6c9abd04ff2e5626576"
))
Expand All @@ -818,113 +856,93 @@ pub mod json {

#[test]
fn second() {
let hash = super::super::compute_class_hash(CONTRACT_DEFINITION).unwrap();

assert_eq!(
hash,
hash(CONTRACT_DEFINITION),
ComputedClassHash::Cairo(class_hash!(
"0x50b2148c0d782914e0b12a1a32abe5e398930b7e914f82c65cb7afce0a0ab9b"
))
);
}

#[tokio::test]
async fn genesis_contract() {
let hash = compute_class_hash(GOERLI_GENESIS).unwrap();

#[test]
fn genesis_contract() {
assert_eq!(
hash,
hash(GOERLI_GENESIS),
ComputedClassHash::Cairo(class_hash!(
"0x10455c752b86932ce552f2b0fe81a880746649b9aee7e0d842bf3f52378f9f8"
))
);
}

#[tokio::test]
async fn cairo_0_8() {
#[test]
fn cairo_0_8() {
// Cairo 0.8 update broke our class hash calculation by adding new attribute
// fields (which we now need to ignore if empty).

let expected = ComputedClassHash::Cairo(class_hash!(
"056b96c1d1bbfa01af44b465763d1b71150fa00c6c9d54c3947f57e979ff68c3"
));

// Known contract which triggered a hash mismatch failure.
let extract = tokio::task::spawn_blocking(move || -> anyhow::Result<_> {
let hash = compute_class_hash(CAIRO_0_8_NEW_ATTRIBUTES)?;
Ok(hash)
});
let calculated_hash = extract.await.unwrap().unwrap();

assert_eq!(calculated_hash, expected);
assert_eq!(
// Known contract which triggered a hash mismatch failure.
hash(CAIRO_0_8_NEW_ATTRIBUTES),
ComputedClassHash::Cairo(class_hash!(
"056b96c1d1bbfa01af44b465763d1b71150fa00c6c9d54c3947f57e979ff68c3"
))
);
}

#[tokio::test]
async fn cairo_0_10() {
#[test]
fn cairo_0_10() {
// Contract whose class triggered a deserialization issue because of the new
// `compiler_version` property.
let hash = compute_class_hash(CAIRO_0_10_COMPILER_VERSION).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_10_COMPILER_VERSION),
ComputedClassHash::Cairo(class_hash!(
"0xa69700a89b1fa3648adff91c438b79c75f7dcb0f4798938a144cce221639d6"
))
);
}

#[tokio::test]
async fn cairo_0_10_part_2() {
#[test]
fn cairo_0_10_part_2() {
// Contract who's class contains `compiler_version` property as well as
// `cairo_type` with tuple values. These tuple values require a
// space to be injected in order to achieve the correct hash.
let hash = compute_class_hash(CAIRO_0_10_TUPLES_INTEGRATION).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_10_TUPLES_INTEGRATION),
ComputedClassHash::Cairo(class_hash!(
"0x542460935cea188d21e752d8459d82d60497866aaad21f873cbb61621d34f7f"
))
);
}

#[tokio::test]
async fn cairo_0_10_part_3() {
#[test]
fn cairo_0_10_part_3() {
// Contract who's class contains `compiler_version` property as well as
// `cairo_type` with tuple values. These tuple values require a
// space to be injected in order to achieve the correct hash.
let hash = compute_class_hash(CAIRO_0_10_TUPLES_GOERLI).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_10_TUPLES_GOERLI),
ComputedClassHash::Cairo(class_hash!(
"0x66af14b94491ba4e2aea1117acf0a3155c53d92fdfd9c1f1dcac90dc2d30157"
))
);
}

#[tokio::test]
async fn cairo_0_11_sierra() {
let hash = compute_class_hash(CAIRO_0_11_SIERRA).unwrap();

#[test]
fn cairo_0_11_sierra() {
assert_eq!(
hash,
hash(CAIRO_0_11_SIERRA),
ComputedClassHash::Sierra(class_hash!(
"0x4e70b19333ae94bd958625f7b61ce9eec631653597e68645e13780061b2136c"
))
)
}

#[tokio::test]
async fn cairo_0_11_with_decimal_entry_point_offset() {
let hash = compute_class_hash(CAIRO_0_11_WITH_DECIMAL_ENTRY_POINT_OFFSET).unwrap();

#[test]
fn cairo_0_11_with_decimal_entry_point_offset() {
assert_eq!(
hash,
hash(CAIRO_0_11_WITH_DECIMAL_ENTRY_POINT_OFFSET),
ComputedClassHash::Cairo(class_hash!(
"0x0484c163658bcce5f9916f486171ac60143a92897533aa7ff7ac800b16c63311"
))
)
);
}
}

Expand Down
10 changes: 5 additions & 5 deletions crates/common/src/casm_class.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@ pub enum NestedIntList {
Node(Vec<NestedIntList>),
}

impl TryFrom<&str> for CasmContractClass {
type Error = serde_json::Error;

fn try_from(value: &str) -> Result<Self, Self::Error> {
serde_json::from_str(value)
impl CasmContractClass {
pub fn from_serialized_def(
definition: &crate::class_definition::SerializedCasmDefinition,
) -> Result<Self, serde_json::Error> {
serde_json::from_slice(definition.as_bytes())
}
}
108 changes: 108 additions & 0 deletions crates/common/src/class_definition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,28 @@ use crate::{ByteCodeOffset, EntryPoint};

pub const CLASS_DEFINITION_MAX_ALLOWED_SIZE: u64 = 4 * 1024 * 1024;

#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedSierraDefinition(Vec<u8>);
Comment thread
zvolin marked this conversation as resolved.

#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedCasmDefinition(Vec<u8>);

#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedCairoDefinition(Vec<u8>);

/// Carries the definition of a serialized contract class, either Sierra or
/// Cairo. The caller does not care which class definition it is.
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedOpaqueClassDefinition(Vec<u8>);

/// Carries the definition of a serialized contract class, either Sierra or
/// Cairo.
#[derive(Clone, Debug)]
pub enum SerializedClassDefinition {
Sierra(SerializedSierraDefinition),
Cairo(SerializedCairoDefinition),
}

#[derive(Debug, Deserialize, Dummy)]
pub enum ClassDefinition<'a> {
Sierra(Sierra<'a>),
Expand Down Expand Up @@ -190,3 +212,89 @@ pub struct SelectorAndFunctionIndex {
pub selector: EntryPoint,
pub function_idx: u64,
}

impl SerializedSierraDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

impl SerializedCasmDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Comment thread
zvolin marked this conversation as resolved.
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

impl SerializedCairoDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

impl SerializedOpaqueClassDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

/// We can use `From` because this is always safe.
impl From<SerializedSierraDefinition> for SerializedOpaqueClassDefinition {
fn from(d: SerializedSierraDefinition) -> Self {
Self::from_bytes(d.into_bytes())
}
}

/// We can use `From` because this is always safe.
impl From<SerializedCairoDefinition> for SerializedOpaqueClassDefinition {
fn from(d: SerializedCairoDefinition) -> Self {
Self::from_bytes(d.into_bytes())
}
}
Loading
Loading