diff --git a/crates/cheatcodes/src/evm/fork.rs b/crates/cheatcodes/src/evm/fork.rs index aff4148b3912d..54e68e1feffc1 100644 --- a/crates/cheatcodes/src/evm/fork.rs +++ b/crates/cheatcodes/src/evm/fork.rs @@ -7,7 +7,7 @@ use alloy_primitives::{B256, U256}; use alloy_provider::Provider; use alloy_rpc_types::Filter; use alloy_sol_types::SolValue; -use foundry_common::provider::ProviderBuilder; +use foundry_common::{provider::ProviderBuilder, sema::StructDefinitions}; use foundry_evm_core::{AsEnvMut, ContextExt, fork::CreateFork}; impl Cheatcode for activeForkCall { @@ -208,7 +208,7 @@ impl Cheatcode for rpc_0Call { .database .active_fork_url() .ok_or_else(|| fmt_err!("no active fork URL found"))?; - rpc_call(&url, method, params) + rpc_call(&ccx.state.struct_defs, &url, method, params) } } @@ -216,7 +216,7 @@ impl Cheatcode for rpc_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { urlOrAlias, method, params } = self; let url = state.config.rpc_endpoint(urlOrAlias)?.url()?; - rpc_call(&url, method, params) + rpc_call(&state.struct_defs, &url, method, params) } } @@ -369,14 +369,15 @@ fn persist_caller(ccx: &mut CheatsCtxt) { } /// Performs an Ethereum JSON-RPC request to the given endpoint. -fn rpc_call(url: &str, method: &str, params: &str) -> Result { +fn rpc_call(struct_defs: &StructDefinitions, url: &str, method: &str, params: &str) -> Result { let provider = ProviderBuilder::new(url).build()?; let params_json: serde_json::Value = serde_json::from_str(params)?; let result = foundry_common::block_on(provider.raw_request(method.to_string().into(), params_json)) .map_err(|err| fmt_err!("{method:?}: {err}"))?; let result_as_tokens = convert_to_bytes( - &json_value_to_token(&result).map_err(|err| fmt_err!("failed to parse result: {err}"))?, + &json_value_to_token(struct_defs, &result) + .map_err(|err| fmt_err!("failed to parse result: {err}"))?, ); Ok(result_as_tokens.abi_encode()) diff --git a/crates/cheatcodes/src/fs.rs b/crates/cheatcodes/src/fs.rs index a9585a31d99fa..c107d3197a593 100644 --- a/crates/cheatcodes/src/fs.rs +++ b/crates/cheatcodes/src/fs.rs @@ -855,6 +855,7 @@ fn latest_broadcast( mod tests { use super::*; use crate::CheatsConfig; + use foundry_common::sema::StructDefinitions; use std::sync::Arc; fn cheats() -> Cheatcodes { @@ -863,7 +864,7 @@ mod tests { root: PathBuf::from(&env!("CARGO_MANIFEST_DIR")), ..Default::default() }; - Cheatcodes::new(Arc::new(config)) + Cheatcodes::new(Arc::new(config), StructDefinitions::default()) } #[test] diff --git a/crates/cheatcodes/src/inspector.rs b/crates/cheatcodes/src/inspector.rs index 276caeef59d66..e4d7140d3fcf7 100644 --- a/crates/cheatcodes/src/inspector.rs +++ b/crates/cheatcodes/src/inspector.rs @@ -33,7 +33,9 @@ use alloy_rpc_types::{ request::{TransactionInput, TransactionRequest}, }; use alloy_sol_types::{SolCall, SolInterface, SolValue}; -use foundry_common::{SELECTOR_LEN, TransactionMaybeSigned, evm::Breakpoints}; +use foundry_common::{ + SELECTOR_LEN, TransactionMaybeSigned, evm::Breakpoints, sema::StructDefinitions, +}; use foundry_evm_core::{ InspectorExt, abi::Vm::stopExpectSafeMemoryCall, @@ -453,6 +455,9 @@ pub struct Cheatcodes { /// Used to prevent duplicate changes file executing non-committing calls. pub fs_commit: bool, + /// Struct definitions in the contracts. Used to keep field order when parsing JSON values. + pub struct_defs: StructDefinitions, + /// Serialized JSON values. // **Note**: both must a BTreeMap to ensure the order of the keys is deterministic. pub serialized_jsons: BTreeMap>, @@ -500,13 +505,13 @@ pub struct Cheatcodes { // create. impl Default for Cheatcodes { fn default() -> Self { - Self::new(Arc::default()) + Self::new(Arc::default(), StructDefinitions::default()) } } impl Cheatcodes { /// Creates a new `Cheatcodes` with the given settings. - pub fn new(config: Arc) -> Self { + pub fn new(config: Arc, struct_defs: StructDefinitions) -> Self { Self { fs_commit: true, labels: config.labels.clone(), @@ -535,6 +540,7 @@ impl Cheatcodes { access_list: Default::default(), test_context: Default::default(), serialized_jsons: Default::default(), + struct_defs, eth_deals: Default::default(), gas_metering: Default::default(), gas_snapshots: Default::default(), diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index 932555041d7da..9e49c61b1db17 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -1,13 +1,16 @@ //! Implementations of [`Json`](spec::Group::Json) cheatcodes. use crate::{Cheatcode, Cheatcodes, Result, Vm::*, string}; -use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712_parser::EncodeType}; +use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712_parser}; use alloy_primitives::{Address, B256, I256, hex}; use alloy_sol_types::SolValue; -use foundry_common::fs; +use foundry_common::{fs, sema::StructDefinitions}; use foundry_config::fs_permissions::FsAccessKind; use serde_json::{Map, Value}; -use std::{borrow::Cow, collections::BTreeMap}; +use std::{ + borrow::Cow, + collections::{BTreeMap, BTreeSet, HashMap}, +}; impl Cheatcode for keyExistsCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { @@ -24,16 +27,16 @@ impl Cheatcode for keyExistsJsonCall { } impl Cheatcode for parseJson_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json } = self; - parse_json(json, "$") + parse_json(json, "$", &state.struct_defs) } } impl Cheatcode for parseJson_1Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json(json, key) + parse_json(json, key, &state.struct_defs) } } @@ -136,23 +139,25 @@ impl Cheatcode for parseJsonBytes32ArrayCall { } impl Cheatcode for parseJsonType_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, typeDescription } = self; - parse_json_coerce(json, "$", &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; + parse_json_coerce(json, "$", &ty).map(|v| v.abi_encode()) } } impl Cheatcode for parseJsonType_1Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key, typeDescription } = self; - parse_json_coerce(json, key, &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; + parse_json_coerce(json, key, &ty).map(|v| v.abi_encode()) } } impl Cheatcode for parseJsonTypeArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key, typeDescription } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode()) } } @@ -308,11 +313,11 @@ impl Cheatcode for serializeBytes_1Call { } impl Cheatcode for serializeJsonType_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { typeDescription, value } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; let value = ty.abi_decode(value)?; - let value = serialize_value_as_json(value)?; + let value = serialize_value_as_json(value, &state.struct_defs)?; Ok(value.to_string().abi_encode()) } } @@ -320,7 +325,7 @@ impl Cheatcode for serializeJsonType_0Call { impl Cheatcode for serializeJsonType_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, typeDescription, value } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; let value = ty.abi_decode(value)?; serialize_json(state, objectKey, valueKey, value) } @@ -368,10 +373,10 @@ pub(super) fn check_json_key_exists(json: &str, key: &str) -> Result { Ok(exists.abi_encode()) } -pub(super) fn parse_json(json: &str, path: &str) -> Result { +pub(super) fn parse_json(json: &str, path: &str, defs: &StructDefinitions) -> Result { let value = parse_json_str(json)?; let selected = select(&value, path)?; - let sol = json_to_sol(&selected)?; + let sol = json_to_sol(defs, &selected)?; Ok(encode(sol)) } @@ -432,17 +437,15 @@ pub(super) fn parse_json_map(map: &Map, ty: &DynSolType) -> Resul bail!("expected {ty}, found JSON object"); }; - let mut values = Vec::with_capacity(fields.len()); - for (field, ty) in fields.iter().zip(types.iter()) { - let Some(value) = map.get(field) else { bail!("field {field:?} not found in JSON object") }; - values.push(parse_json_as(value, ty)?); + let mut tuple = Vec::with_capacity(fields.len()); + for (i, field_name) in fields.iter().enumerate() { + let Some(value) = map.get(field_name) else { + bail!("field {field_name:?} not found in JSON object") + }; + tuple.push(parse_json_as(value, &types[i])?); } - Ok(DynSolValue::CustomStruct { - name: name.to_string(), - prop_names: fields.to_vec(), - tuple: values, - }) + Ok(DynSolValue::CustomStruct { name: name.to_string(), prop_names: fields.to_owned(), tuple }) } pub(super) fn parse_json_keys(json: &str, key: &str) -> Result { @@ -462,10 +465,10 @@ fn parse_json_str(json: &str) -> Result { serde_json::from_str(json).map_err(|e| fmt_err!("failed parsing JSON: {e}")) } -fn json_to_sol(json: &[&Value]) -> Result> { +fn json_to_sol(defs: &StructDefinitions, json: &[&Value]) -> Result> { let mut sol = Vec::with_capacity(json.len()); for value in json { - sol.push(json_value_to_token(value)?); + sol.push(json_value_to_token(defs, value)?); } Ok(sol) } @@ -503,22 +506,44 @@ pub(super) fn canonicalize_json_path(path: &str) -> Cow<'_, str> { /// it will call itself to convert each of it's value and encode the whole as a /// Tuple #[instrument(target = "cheatcodes", level = "trace", ret)] -pub(super) fn json_value_to_token(value: &Value) -> Result { +pub(super) fn json_value_to_token(defs: &StructDefinitions, value: &Value) -> Result { match value { Value::Null => Ok(DynSolValue::FixedBytes(B256::ZERO, 32)), Value::Bool(boolean) => Ok(DynSolValue::Bool(*boolean)), - Value::Array(array) => { - array.iter().map(json_value_to_token).collect::>().map(DynSolValue::Array) - } - value @ Value::Object(_) => { - // See: [#3647](https://github.com/foundry-rs/foundry/pull/3647) - let ordered_object: BTreeMap = - serde_json::from_value(value.clone()).unwrap(); - ordered_object - .values() - .map(json_value_to_token) - .collect::>() - .map(DynSolValue::Tuple) + Value::Array(array) => array + .iter() + .map(|v| json_value_to_token(defs, v)) + .collect::>() + .map(DynSolValue::Array), + Value::Object(map) => { + // Try to find a struct definition that matches the object keys. + let keys: BTreeSet<_> = map.keys().map(|s| s.as_str()).collect(); + let matching_def = defs.values().find(|fields| { + fields.len() == keys.len() + && fields.iter().map(|(name, _)| name.as_str()).collect::>() == keys + }); + + if let Some(fields) = matching_def { + // Found a struct with matching field names, use the order from the definition. + fields + .iter() + .map(|(name, _)| { + // unwrap is safe because we know the key exists. + json_value_to_token(defs, map.get(name).unwrap()) + }) + .collect::>() + .map(DynSolValue::Tuple) + } else { + // Fallback to alphabetical sorting if no matching struct is found. + // See: [#3647](https://github.com/foundry-rs/foundry/pull/3647) + let ordered_object: BTreeMap<_, _> = + map.iter().map(|(k, v)| (k.clone(), v.clone())).collect(); + ordered_object + .values() + .map(|value| json_value_to_token(defs, value)) + .collect::>() + .map(DynSolValue::Tuple) + } } Value::Number(number) => { if let Some(f) = number.as_f64() { @@ -586,7 +611,7 @@ pub(super) fn json_value_to_token(value: &Value) -> Result { } /// Serializes given [DynSolValue] into a [serde_json::Value]. -fn serialize_value_as_json(value: DynSolValue) -> Result { +fn serialize_value_as_json(value: DynSolValue, defs: &StructDefinitions) -> Result { match value { DynSolValue::Bool(b) => Ok(Value::Bool(b)), DynSolValue::String(s) => { @@ -611,18 +636,33 @@ fn serialize_value_as_json(value: DynSolValue) -> Result { Ok(Value::Number(n)) } DynSolValue::Address(a) => Ok(Value::String(a.to_string())), - DynSolValue::Array(e) | DynSolValue::FixedArray(e) => { - Ok(Value::Array(e.into_iter().map(serialize_value_as_json).collect::>()?)) - } - DynSolValue::CustomStruct { name: _, prop_names, tuple } => { - let values = - tuple.into_iter().map(serialize_value_as_json).collect::>>()?; - let map = prop_names.into_iter().zip(values).collect(); + DynSolValue::Array(e) | DynSolValue::FixedArray(e) => Ok(Value::Array( + e.into_iter().map(|v| serialize_value_as_json(v, defs)).collect::>()?, + )), + DynSolValue::CustomStruct { name, prop_names, tuple } => { + let values = tuple + .into_iter() + .map(|v| serialize_value_as_json(v, defs)) + .collect::>>()?; + let mut map: HashMap = prop_names.into_iter().zip(values).collect(); + + // If the struct def is known, manually build a `Map` to preserve the order. + if let Some(fields) = defs.get(&name) { + let mut ordered_map = Map::with_capacity(fields.len()); + for (field_name, _) in fields { + if let Some(serialized_value) = map.remove(field_name) { + ordered_map.insert(field_name.clone(), serialized_value); + } + } + // Explicitly return a `Value::Object` to avoid ambiguity. + return Ok(Value::Object(ordered_map)); + } - Ok(Value::Object(map)) + // Otherwise, fall back to alphabetical sorting for deterministic output. + Ok(Value::Object(map.into_iter().collect::>())) } DynSolValue::Tuple(values) => Ok(Value::Array( - values.into_iter().map(serialize_value_as_json).collect::>()?, + values.into_iter().map(|v| serialize_value_as_json(v, defs)).collect::>()?, )), DynSolValue::Function(_) => bail!("cannot serialize function pointer"), } @@ -642,7 +682,7 @@ fn serialize_json( value_key: &str, value: DynSolValue, ) -> Result { - let value = serialize_value_as_json(value)?; + let value = serialize_value_as_json(value, &state.struct_defs)?; let map = state.serialized_jsons.entry(object_key.into()).or_default(); map.insert(value_key.into(), value); let stringified = serde_json::to_string(map).unwrap(); @@ -650,29 +690,87 @@ fn serialize_json( } /// Resolves a [DynSolType] from user input. -pub(super) fn resolve_type(type_description: &str) -> Result { +pub(super) fn resolve_type( + type_description: &str, + struct_defs: Option<&StructDefinitions>, +) -> Result { + let ordered_ty = |ty| -> Result { + if let Some(defs) = struct_defs { reorder_type(ty, defs) } else { Ok(ty) } + }; + if let Ok(ty) = DynSolType::parse(type_description) { - return Ok(ty); + return ordered_ty(ty); }; - if let Ok(encoded) = EncodeType::parse(type_description) { + if let Ok(encoded) = eip712_parser::EncodeType::parse(type_description) { let main_type = encoded.types[0].type_name; let mut resolver = Resolver::default(); - for t in encoded.types { + for t in &encoded.types { resolver.ingest(t.to_owned()); } - return Ok(resolver.resolve(main_type)?); - }; + // Get the alphabetically-sorted type from the resolver, and reorder if necessary. + return ordered_ty(resolver.resolve(main_type)?); + } bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string") } +/// Recursively traverses a `DynSolType` and reorders the fields of any +/// `CustomStruct` variants according to the provided `StructDefinitions`. +/// +/// This is necessary because the EIP-712 resolver sorts struct fields alphabetically, +/// but we want to respect the order defined in the Solidity source code. +fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> Result { + match ty { + DynSolType::CustomStruct { name, prop_names, tuple } => { + if let Some(def) = struct_defs.get(&name) { + // The incoming `prop_names` and `tuple` are alphabetically sorted. + let type_map: std::collections::HashMap = + prop_names.into_iter().zip(tuple).collect(); + + let mut sorted_props = Vec::with_capacity(def.len()); + let mut sorted_tuple = Vec::with_capacity(def.len()); + for (field_name, _) in def { + sorted_props.push(field_name.clone()); + if let Some(field_ty) = type_map.get(field_name) { + sorted_tuple.push(reorder_type(field_ty.clone(), struct_defs)?); + } else { + bail!( + "mismatch between struct definition and type description: field '{field_name}' not found in provided type for struct '{name}'" + ); + } + } + Ok(DynSolType::CustomStruct { name, prop_names: sorted_props, tuple: sorted_tuple }) + } else { + // No definition found, so we can't reorder. However, we still reorder its children + // in case they have known structs. + let new_tuple = tuple + .into_iter() + .map(|t| reorder_type(t, struct_defs)) + .collect::>>()?; + Ok(DynSolType::CustomStruct { name, prop_names, tuple: new_tuple }) + } + } + DynSolType::Array(inner) => { + Ok(DynSolType::Array(Box::new(reorder_type(*inner, struct_defs)?))) + } + DynSolType::FixedArray(inner, len) => { + Ok(DynSolType::FixedArray(Box::new(reorder_type(*inner, struct_defs)?), len)) + } + DynSolType::Tuple(inner) => Ok(DynSolType::Tuple( + inner.into_iter().map(|t| reorder_type(t, struct_defs)).collect::>>()?, + )), + _ => Ok(ty), + } +} + #[cfg(test)] mod tests { use super::*; - use alloy_primitives::FixedBytes; - use proptest::strategy::Strategy; + use alloy_primitives::{FixedBytes, U256}; + use proptest::{arbitrary::any, prop_oneof, strategy::Strategy}; + use std::collections::HashSet; fn contains_tuple(value: &DynSolValue) -> bool { match value { @@ -709,18 +807,61 @@ mod tests { } fn guessable_types() -> impl proptest::strategy::Strategy { - proptest::arbitrary::any::() + any::() .prop_map(fixup_guessable) .prop_filter("tuples are not supported", |v| !contains_tuple(v)) .prop_filter("filter out values without type", |v| v.as_type().is_some()) } + /// A proptest strategy for generating a (simple) `DynSolValue::CustomStruct` + /// and its corresponding `StructDefinitions` object. + fn custom_struct_strategy() -> impl Strategy { + // Define a strategy for basic field names and values. + let field_name_strat = "[a-z]{4,12}"; + let field_value_strat = prop_oneof![ + any::().prop_map(DynSolValue::Bool), + any::().prop_map(|v| DynSolValue::Uint(U256::from(v), 256)), + any::<[u8; 20]>().prop_map(Address::from).prop_map(DynSolValue::Address), + any::<[u8; 32]>().prop_map(B256::from).prop_map(|b| DynSolValue::FixedBytes(b, 32)), + ".*".prop_map(DynSolValue::String), + ]; + + // Combine them to create a list of unique fields that preserve the random order. + let fields_strat = proptest::collection::vec((field_name_strat, field_value_strat), 1..8) + .prop_map(|fields| { + let mut unique_fields = Vec::with_capacity(fields.len()); + let mut seen_names = HashSet::new(); + for (name, value) in fields { + if seen_names.insert(name.clone()) { + unique_fields.push((name, value)); + } + } + unique_fields + }); + + // Generate the `CustomStruct` and its definition. + ("[A-Z][a-z]{4,8}", fields_strat).prop_map(|(struct_name, fields)| { + let (prop_names, tuple): (Vec, Vec) = + fields.clone().into_iter().unzip(); + let def_fields: Vec<(String, String)> = fields + .iter() + .map(|(name, value)| (name.clone(), value.as_type().unwrap().to_string())) + .collect(); + let mut defs_map = BTreeMap::new(); + defs_map.insert(struct_name.clone(), def_fields); + ( + StructDefinitions::new(defs_map), + DynSolValue::CustomStruct { name: struct_name, prop_names, tuple }, + ) + }) + } + // Tests to ensure that conversion [DynSolValue] -> [serde_json::Value] -> [DynSolValue] proptest::proptest! { #[test] fn test_json_roundtrip_guessed(v in guessable_types()) { - let json = serialize_value_as_json(v.clone()).unwrap(); - let value = json_value_to_token(&json).unwrap(); + let json = serialize_value_as_json(v.clone(), &StructDefinitions::default()).unwrap(); + let value = json_value_to_token(&StructDefinitions::default(), &json).unwrap(); // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. let decoded = v.as_type().unwrap().abi_decode(&value.abi_encode()).unwrap(); @@ -728,10 +869,224 @@ mod tests { } #[test] - fn test_json_roundtrip(v in proptest::arbitrary::any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { - let json = serialize_value_as_json(v.clone()).unwrap(); + fn test_json_roundtrip(v in any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { + let json = serialize_value_as_json(v.clone(), &StructDefinitions::default()).unwrap(); let value = parse_json_as(&json, &v.as_type().unwrap()).unwrap(); - assert_eq!(value, v); + assert_eq!(value, v); + } + + #[test] + fn test_json_roundtrip_with_struct_defs((struct_defs, v) in custom_struct_strategy()) { + let json = serialize_value_as_json(v.clone(), &struct_defs).unwrap(); + let sol_type = v.as_type().unwrap(); + let parsed_value = parse_json_as(&json, &sol_type).unwrap(); + assert_eq!(parsed_value, v); } } + + #[test] + fn test_resolve_type_with_definitions() -> Result<()> { + // Define a struct with fields in a specific order (not alphabetical) + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Apple".to_string(), + vec![ + ("color".to_string(), "string".to_string()), + ("sweetness".to_string(), "uint8".to_string()), + ("sourness".to_string(), "uint8".to_string()), + ], + ); + defs_map.insert( + "FruitStall".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("apples".to_string(), "Apple[]".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // Simulate resolver output: type string, using alphabetical order for fields. + let ty_desc = "FruitStall(Apple[] apples,string name)Apple(string color,uint8 sourness,uint8 sweetness)"; + + // Resolve type and ensure struct definition order is preserved. + let ty = resolve_type(ty_desc, Some(&struct_defs)).unwrap(); + if let DynSolType::CustomStruct { name, prop_names, tuple } = ty { + assert_eq!(name, "FruitStall"); + assert_eq!(prop_names, vec!["name", "apples"]); + assert_eq!(tuple.len(), 2); + assert_eq!(tuple[0], DynSolType::String); + + if let DynSolType::Array(apple_ty_boxed) = &tuple[1] + && let DynSolType::CustomStruct { name, prop_names, tuple } = &**apple_ty_boxed + { + assert_eq!(*name, "Apple"); + // Check that the inner struct's fields are also in definition order. + assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); + assert_eq!( + *tuple, + vec![DynSolType::String, DynSolType::Uint(8), DynSolType::Uint(8)] + ); + + return Ok(()); + } + } + panic!("Expected FruitStall and Apple to be CustomStruct"); + } + + #[test] + fn test_resolve_type_without_definitions() -> Result<()> { + // Simulate resolver output: type string, using alphabetical order for fields. + let ty_desc = "Person(bool active,uint256 age,string name)"; + + // Resolve the type without providing any struct definitions and ensure that original + // (alphabetical) order is unchanged. + let ty = resolve_type(ty_desc, None).unwrap(); + if let DynSolType::CustomStruct { name, prop_names, tuple } = ty { + assert_eq!(name, "Person"); + assert_eq!(prop_names, vec!["active", "age", "name"]); + assert_eq!(tuple.len(), 3); + assert_eq!(tuple, vec![DynSolType::Bool, DynSolType::Uint(256), DynSolType::String]); + return Ok(()); + } + panic!("Expected Person to be CustomStruct"); + } + + #[test] + fn test_resolve_type_for_array_of_structs() -> Result<()> { + // Define a struct with fields in a specific, non-alphabetical order. + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Item".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("price".to_string(), "uint256".to_string()), + ("id".to_string(), "uint256".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // Simulate resolver output: type string, using alphabetical order for fields. + let ty_desc = "Item(uint256 id,string name,uint256 price)"; + + // Resolve type and ensure struct definition order is preserved. + let ty = resolve_type(ty_desc, Some(&struct_defs)).unwrap(); + let array_ty = DynSolType::Array(Box::new(ty)); + if let DynSolType::Array(item_ty) = array_ty + && let DynSolType::CustomStruct { name, prop_names, tuple } = *item_ty + { + assert_eq!(name, "Item"); + assert_eq!(prop_names, vec!["name", "price", "id"]); + assert_eq!( + tuple, + vec![DynSolType::String, DynSolType::Uint(256), DynSolType::Uint(256)] + ); + return Ok(()); + } + panic!("Expected CustomStruct in array"); + } + + #[test] + fn test_parse_json_missing_field() { + // Define a struct with a specific field order. + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Person".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("age".to_string(), "uint256".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // JSON missing the "age" field + let json_str = r#"{ "name": "Alice" }"#; + + // Simulate resolver output: type string, using alphabetical order for fields. + let type_description = "Person(uint256 age,string name)"; + let ty = resolve_type(type_description, Some(&struct_defs)).unwrap(); + + // Now, attempt to parse the incomplete JSON using the ordered type. + let json_value: Value = serde_json::from_str(json_str).unwrap(); + let result = parse_json_as(&json_value, &ty); + + // Should fail with a missing field error because `parse_json_map` requires all fields. + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("field \"age\" not found in JSON object")); + } + + #[test] + fn test_serialize_json_with_struct_def_order() { + // Define a struct with a specific, non-alphabetical field order. + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Item".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("id".to_string(), "uint256".to_string()), + ("active".to_string(), "bool".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // Create a DynSolValue instance for the struct. + let item_struct = DynSolValue::CustomStruct { + name: "Item".to_string(), + prop_names: vec!["name".to_string(), "id".to_string(), "active".to_string()], + tuple: vec![ + DynSolValue::String("Test Item".to_string()), + DynSolValue::Uint(U256::from(123), 256), + DynSolValue::Bool(true), + ], + }; + + // Serialize the value to JSON and verify that the order is preserved. + let json_value = serialize_value_as_json(item_struct, &struct_defs).unwrap(); + let json_string = serde_json::to_string(&json_value).unwrap(); + assert_eq!(json_string, r#"{"name":"Test Item","id":123,"active":true}"#); + } + + #[test] + fn test_json_full_cycle_typed_with_struct_defs() { + // Define a struct with a specific, non-alphabetical field order. + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Wallet".to_string(), + vec![ + ("owner".to_string(), "address".to_string()), + ("balance".to_string(), "uint256".to_string()), + ("id".to_string(), "bytes32".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // Create the "original" DynSolValue instance. + let owner_address = Address::from([1; 20]); + let wallet_id = B256::from([2; 32]); + let original_wallet = DynSolValue::CustomStruct { + name: "Wallet".to_string(), + prop_names: vec!["owner".to_string(), "balance".to_string(), "id".to_string()], + tuple: vec![ + DynSolValue::Address(owner_address), + DynSolValue::Uint(U256::from(5000), 256), + DynSolValue::FixedBytes(wallet_id, 32), + ], + }; + + // Serialize it. The resulting JSON should respect the struct definition order. + let json_value = serialize_value_as_json(original_wallet.clone(), &struct_defs).unwrap(); + let json_string = serde_json::to_string(&json_value).unwrap(); + assert_eq!( + json_string, + format!(r#"{{"owner":"{owner_address}","balance":5000,"id":"{wallet_id}"}}"#) + ); + + // Resolve the type, which should also respect the struct definition order. + let type_description = "Wallet(uint256 balance,bytes32 id,address owner)"; + let resolved_type = resolve_type(type_description, Some(&struct_defs)).unwrap(); + + // Parse the JSON using the correctly ordered resolved type. Ensure that it is identical to + // the original one. + let parsed_value = parse_json_as(&json_value, &resolved_type).unwrap(); + assert_eq!(parsed_value, original_wallet); + } } diff --git a/crates/cheatcodes/src/toml.rs b/crates/cheatcodes/src/toml.rs index 4ae16340ec6be..e36b26f244966 100644 --- a/crates/cheatcodes/src/toml.rs +++ b/crates/cheatcodes/src/toml.rs @@ -23,16 +23,16 @@ impl Cheatcode for keyExistsTomlCall { } impl Cheatcode for parseToml_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { toml } = self; - parse_toml(toml, "$") + parse_toml(state, toml, "$") } } impl Cheatcode for parseToml_1Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml(toml, key) + parse_toml(state, toml, key) } } @@ -137,21 +137,21 @@ impl Cheatcode for parseTomlBytes32ArrayCall { impl Cheatcode for parseTomlType_0Call { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, typeDescription } = self; - parse_toml_coerce(toml, "$", &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + parse_toml_coerce(toml, "$", &resolve_type(typeDescription, None)?).map(|v| v.abi_encode()) } } impl Cheatcode for parseTomlType_1Call { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key, typeDescription } = self; - parse_toml_coerce(toml, key, &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + parse_toml_coerce(toml, key, &resolve_type(typeDescription, None)?).map(|v| v.abi_encode()) } } impl Cheatcode for parseTomlTypeArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key, typeDescription } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, None)?; parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode()) } } @@ -200,8 +200,8 @@ fn parse_toml_str(toml: &str) -> Result { } /// Parse a TOML string and return the value at the given path. -fn parse_toml(toml: &str, key: &str) -> Result { - parse_json(&toml_to_json_string(toml)?, key) +fn parse_toml(state: &Cheatcodes, toml: &str, key: &str) -> Result { + parse_json(&toml_to_json_string(toml)?, key, &state.struct_defs) } /// Parse a TOML string and return the value at the given path, coercing it to the given type. diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index c672f014df226..83bb94420bb5a 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -30,6 +30,7 @@ pub mod provider; pub mod reports; pub mod retry; pub mod selectors; +pub mod sema; pub mod serde_helpers; pub mod term; pub mod traits; diff --git a/crates/common/src/sema.rs b/crates/common/src/sema.rs new file mode 100644 index 0000000000000..9347d591c1320 --- /dev/null +++ b/crates/common/src/sema.rs @@ -0,0 +1,136 @@ +//! Semantic analysis helpers for extracting type information and other metadata from the HIR. + +use eyre::{Result, eyre}; +use solar_sema::{ + GcxWrapper, Hir, hir, + ty::{Ty, TyKind}, +}; +use std::{collections::BTreeMap, ops::Deref, sync::Arc}; + +#[derive(Debug, Clone)] +pub struct StructDefinitions(Arc>>); + +impl StructDefinitions { + pub fn new(map: BTreeMap>) -> Self { + Self(Arc::new(map)) + } +} + +impl Default for StructDefinitions { + fn default() -> Self { + Self(Arc::new(BTreeMap::new())) + } +} + +impl Deref for StructDefinitions { + type Target = BTreeMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl AsRef>>> for StructDefinitions { + fn as_ref(&self) -> &Arc>> { + &self.0 + } +} + +/// Generates a map of all struct definitions from the HIR using the resolved `Ty` system. +pub struct SemanticAnalysisProcessor<'hir> { + gcx: GcxWrapper<'hir>, + struct_defs: BTreeMap>, +} + +impl<'hir> SemanticAnalysisProcessor<'hir> { + /// Constructs a new generator. + pub fn new(gcx: GcxWrapper<'hir>) -> Self { + Self { gcx, struct_defs: BTreeMap::new() } + } + + /// Processes the HIR to generate all the struct definitions. + pub fn process(mut self) -> Result { + for id in self.hir().strukt_ids() { + self.resolve_struct_definition(id)?; + } + + Ok(self) + } + + pub fn struct_defs(self) -> StructDefinitions { + StructDefinitions(Arc::new(self.struct_defs)) + } + + #[inline] + fn hir(&self) -> &'hir Hir<'hir> { + &self.gcx.get().hir + } + + /// The recursive core of the generator. Resolves a single struct and adds it to the cache. + fn resolve_struct_definition(&mut self, id: hir::StructId) -> Result<()> { + let qualified_name = self.get_fully_qualified_name(id); + if self.struct_defs.contains_key(&qualified_name) { + return Ok(()); + } + + let gcx = self.gcx.get(); + let hir = &gcx.hir; + let strukt = hir.strukt(id); + let mut fields = Vec::with_capacity(strukt.fields.len()); + + for &field_id in strukt.fields { + let var = hir.variable(field_id); + let name = var.name.ok_or_else(|| eyre!("Struct field is missing a name"))?.to_string(); + if let Some(ty_str) = self.ty_to_string(gcx.type_of_hir_ty(&var.ty)) { + fields.push((name, ty_str)); + } + } + + if !fields.is_empty() { + self.struct_defs.insert(qualified_name, fields); + } + + Ok(()) + } + + /// Converts a resolved `Ty` into its canonical string representation. + fn ty_to_string(&mut self, ty: Ty<'hir>) -> Option { + let ty = ty.peel_refs(); + let res = match ty.kind { + TyKind::Elementary(e) => e.to_string(), + TyKind::Array(ty, size) => { + let inner_type = self.ty_to_string(ty)?; + format!("{inner_type}[{size}]") + } + TyKind::DynArray(ty) => { + let inner_type = self.ty_to_string(ty)?; + format!("{inner_type}[]") + } + TyKind::Struct(id) => { + // Ensure the nested struct is resolved before proceeding. + self.resolve_struct_definition(id).ok()?; + self.get_fully_qualified_name(id) + } + TyKind::Udvt(ty, _) => self.ty_to_string(ty)?, + // For now, map enums to `uint8` + TyKind::Enum(_) => "uint8".to_string(), + // For now, map contracts to `address` + TyKind::Contract(_) => "address".to_string(), + // Explicitly disallow unsupported types + _ => return None, + }; + + Some(res) + } + + /// Helper to get the fully qualified name `Contract.Struct`. + fn get_fully_qualified_name(&self, id: hir::StructId) -> String { + let hir = self.hir(); + let strukt = hir.strukt(id); + if let Some(contract_id) = strukt.contract { + format!("{}.{}", hir.contract(contract_id).name.as_str(), strukt.name.as_str()) + } else { + strukt.name.as_str().into() + } + } +} diff --git a/crates/evm/evm/src/inspectors/stack.rs b/crates/evm/evm/src/inspectors/stack.rs index 0cf637c4349f0..30ef38dfd6f00 100644 --- a/crates/evm/evm/src/inspectors/stack.rs +++ b/crates/evm/evm/src/inspectors/stack.rs @@ -8,6 +8,7 @@ use alloy_primitives::{ map::{AddressHashMap, HashMap}, }; use foundry_cheatcodes::{CheatcodesExecutor, Wallets}; +use foundry_common::sema::StructDefinitions; use foundry_evm_core::{ ContextExt, Env, InspectorExt, backend::{DatabaseExt, JournaledState}, @@ -71,6 +72,8 @@ pub struct InspectorStackBuilder { pub wallets: Option, /// The CREATE2 deployer address. pub create2_deployer: Address, + /// The user-defined structs of the contracts. + pub struct_defs: StructDefinitions, } impl InspectorStackBuilder { @@ -175,6 +178,12 @@ impl InspectorStackBuilder { self } + #[inline] + pub fn struct_defs(mut self, struct_defs: StructDefinitions) -> Self { + self.struct_defs = struct_defs; + self + } + /// Builds the stack of inspectors to use when transacting/committing on the EVM. pub fn build(self) -> InspectorStack { let Self { @@ -191,12 +200,13 @@ impl InspectorStackBuilder { odyssey, wallets, create2_deployer, + struct_defs, } = self; let mut stack = InspectorStack::new(); // inspectors if let Some(config) = cheatcodes { - let mut cheatcodes = Cheatcodes::new(config); + let mut cheatcodes = Cheatcodes::new(config, struct_defs); // Set wallets if they are provided if let Some(wallets) = wallets { cheatcodes.set_wallets(wallets); @@ -736,10 +746,12 @@ impl InspectorStackRefMut<'_> { /// it. fn with_stack(&mut self, f: impl FnOnce(&mut InspectorStack) -> O) -> O { let mut stack = InspectorStack { - cheatcodes: self - .cheatcodes - .as_deref_mut() - .map(|cheats| core::mem::replace(cheats, Cheatcodes::new(cheats.config.clone()))), + cheatcodes: self.cheatcodes.as_deref_mut().map(|cheats| { + core::mem::replace( + cheats, + Cheatcodes::new(cheats.config.clone(), cheats.struct_defs.clone()), + ) + }), inner: std::mem::take(self.inner), }; diff --git a/crates/forge/src/cmd/test/mod.rs b/crates/forge/src/cmd/test/mod.rs index e6171f0cf17c5..8d7c8a4972a77 100644 --- a/crates/forge/src/cmd/test/mod.rs +++ b/crates/forge/src/cmd/test/mod.rs @@ -2,6 +2,7 @@ use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs}; use crate::{ MultiContractRunner, MultiContractRunnerBuilder, TestFilter, decode::decode_console_logs, + foundry_common::sema::{SemanticAnalysisProcessor, StructDefinitions}, gas_report::GasReport, multi_runner::matches_contract, result::{SuiteResult, TestOutcome, TestStatus}, @@ -18,7 +19,7 @@ use clap::{Parser, ValueHint}; use eyre::{Context, OptionExt, Result, bail}; use foundry_block_explorers::EtherscanApiVersion; use foundry_cli::{ - opts::{BuildOpts, GlobalArgs}, + opts::{BuildOpts, GlobalArgs, solar_pcx_from_build_opts}, utils::{self, LoadConfig}, }; use foundry_common::{TestFunctionExt, compile::ProjectCompiler, evm::EvmArgs, fs, shell}; @@ -42,6 +43,7 @@ use foundry_config::{ use foundry_debugger::Debugger; use foundry_evm::traces::identifier::TraceIdentifiers; use regex::Regex; +use solar_sema::interface::Session; use std::{ collections::{BTreeMap, BTreeSet}, fmt::Write, @@ -308,6 +310,7 @@ impl TestArgs { trace!(target: "forge::test", ?filter, "using filter"); let sources_to_compile = self.get_sources_to_compile(&config, &filter)?; + let input: Vec = sources_to_compile.iter().cloned().collect(); let compiler = ProjectCompiler::new() .dynamic_test_linking(config.dynamic_test_linking) @@ -316,6 +319,23 @@ impl TestArgs { let output = compiler.compile(&project)?; + // Instantiate solar's parsing context + let mut sess = Session::builder().with_stderr_emitter().build(); + sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + + let pcx = solar_pcx_from_build_opts(&sess, &self.build, Some(&project), Some(&input))?; + let struct_defs = sess.enter_parallel(|| -> Result { + // Parse and lower to HIR + let hir_arena = solar_sema::thread_local::ThreadLocal::new(); + let hir_result = pcx.parse_and_lower(&hir_arena); + + if let Ok(Some(gcx)) = hir_result { + return SemanticAnalysisProcessor::new(gcx).process().map(|res| res.struct_defs()); + } + + Err(eyre::eyre!("Error lowering AST")) + })?; + // Create test options from general project settings and compiler output. let project_root = &project.paths.root; @@ -355,6 +375,7 @@ impl TestArgs { .with_fork(evm_opts.get_fork(&config, env.clone())) .enable_isolation(evm_opts.isolate) .odyssey(evm_opts.odyssey) + .struct_defs(struct_defs) .build::(project_root, &output, env, evm_opts)?; let libraries = runner.libraries.clone(); diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 76ae5d6da10e9..3cd89db017b17 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -7,7 +7,10 @@ use crate::{ use alloy_json_abi::{Function, JsonAbi}; use alloy_primitives::{Address, Bytes, U256}; use eyre::Result; -use foundry_common::{ContractsByArtifact, TestFunctionExt, get_contract_name, shell::verbosity}; +use foundry_common::{ + ContractsByArtifact, TestFunctionExt, get_contract_name, sema::StructDefinitions, + shell::verbosity, +}; use foundry_compilers::{ Artifact, ArtifactId, ProjectCompileOutput, artifacts::{Contract, Libraries}, @@ -58,6 +61,8 @@ pub struct MultiContractRunner { pub libs_to_deploy: Vec, /// Library addresses used to link contracts. pub libraries: Libraries, + /// Other metadata extracted from the semantic analysis of the contracts. + pub metadata: StructDefinitions, /// The fork to use at launch pub fork: Option, @@ -249,7 +254,12 @@ impl MultiContractRunner { debug!("start executing all tests in contract"); - let executor = self.tcfg.executor(self.known_contracts.clone(), artifact_id, db.clone()); + let executor = self.tcfg.executor( + self.known_contracts.clone(), + artifact_id, + db.clone(), + self.metadata.clone(), + ); let runner = ContractRunner::new( &identifier, contract, @@ -347,6 +357,7 @@ impl TestRunnerConfig { known_contracts: ContractsByArtifact, artifact_id: &ArtifactId, db: Backend, + struct_defs: StructDefinitions, ) -> Executor { let cheats_config = Arc::new(CheatsConfig::new( &self.config, @@ -363,6 +374,7 @@ impl TestRunnerConfig { .enable_isolation(self.isolation) .odyssey(self.odyssey) .create2_deployer(self.evm_opts.create2_deployer) + .struct_defs(struct_defs) }) .spec_id(self.spec_id) .gas_limit(self.evm_opts.gas_limit()) @@ -404,6 +416,8 @@ pub struct MultiContractRunnerBuilder { pub isolation: bool, /// Whether to enable Odyssey features. pub odyssey: bool, + /// The user-defined structs of the contracts. + pub struct_defs: StructDefinitions, } impl MultiContractRunnerBuilder { @@ -419,6 +433,7 @@ impl MultiContractRunnerBuilder { isolation: Default::default(), decode_internal: Default::default(), odyssey: Default::default(), + struct_defs: Default::default(), } } @@ -467,6 +482,11 @@ impl MultiContractRunnerBuilder { self } + pub fn struct_defs(mut self, struct_defs: StructDefinitions) -> Self { + self.struct_defs = struct_defs; + self + } + /// Given an EVM, proceeds to return a runner which is able to execute all tests /// against that evm pub fn build>( @@ -527,6 +547,7 @@ impl MultiContractRunnerBuilder { known_contracts, libs_to_deploy, libraries, + metadata: self.struct_defs, fork: self.fork, diff --git a/crates/forge/tests/cli/bind_json.rs b/crates/forge/tests/cli/bind_json.rs index fcc081f6b6f06..fc6aa1c2b2db4 100644 --- a/crates/forge/tests/cli/bind_json.rs +++ b/crates/forge/tests/cli/bind_json.rs @@ -123,3 +123,69 @@ library JsonBindings { cmd.forge_fuse().args(["test"]).assert_success(); }); + +// tests enhanced `vm.parseJson` and `vm.serializeJson` cheatcodes, which are not constrained to +// alphabetical ordering of struct keys, but rather respect the Solidity struct definition. +forgetest_init!(test_parse_json, |prj, cmd| { + prj.add_test( + "JsonCheats", + r#" +import {Test} from "forge-std/Test.sol"; + +// Definition order: color, sweetness, sourness +// Alphabetical order: color, sourness, sweetness +struct Apple { + string color; + uint8 sweetness; + uint8 sourness; +} + +// Definition order: name, apples +// Alphabetical order: apples, name +struct FruitStall { + string name; + Apple[] apples; +} + +contract SimpleJsonCheatsTest is Test { + function testJsonParseAndSerialize() public { + // Initial JSON has keys in a custom order, different from definition and alphabetical. + string memory originalJson = + '{"name":"Fresh Fruit","apples":[{"sweetness":7,"sourness":3,"color":"Red"},{"sweetness":5,"sourness":5,"color":"Green"}]}'; + + // Parse the original JSON. The parser should correctly handle the unordered keys. + bytes memory decoded = vm.parseJson(originalJson); + FruitStall memory originalType = abi.decode(decoded, (FruitStall)); + + // Assert initial parsing is correct + assertEq(originalType.name, "Fresh Fruit"); + assertEq(originalType.apples[0].color, "Red"); + assertEq(originalType.apples[0].sweetness, 7); + assertEq(originalType.apples[1].sourness, 5); + + // Serialize the struct back to JSON. `vm.serializeJson` should respect the order for all keys. + string memory serializedJson = vm.serializeJsonType( + "FruitStall(Apple[] apples,string name)Apple(string color,uint8 sourness,uint8 sweetness)", + abi.encode(originalType) + ); + + // The expected JSON should have keys ordered according to the struct definitions. + string memory expectedJson = + '{"name":"Fresh Fruit","apples":[{"color":"Red","sweetness":7,"sourness":3},{"color":"Green","sweetness":5,"sourness":5}]}'; + assertEq(serializedJson, expectedJson); + + // Parse the newly serialized JSON to complete the cycle. + bytes memory redecoded = vm.parseJson(serializedJson); + FruitStall memory finalType = abi.decode(redecoded, (FruitStall)); + + // Assert that the struct from the full cycle is identical to the original parsed struct. + assertEq(keccak256(abi.encode(finalType)), keccak256(abi.encode(originalType))); + } +} +"#, + ) + .unwrap(); + + // Directly run the test. No `bind-json` or type schemas are needed. + cmd.forge_fuse().args(["test"]).assert_success(); +});