refactor: Misc improvements to code (#277)

* refactor: Misc improvements to code

- Decouple validating logic from TychoRunner
- Move all data fetching and decoding the tycho message into the same method
- Split validate_state into validate_state, validate_token_balances and simulate_and_execute
- Make rpc_provider and runtime attributes of TestRunner
- Add references where possible to avoid clones
- Remove unnecessary code
- Make clippy happy

#time 2h 36m

#time 0m


#time 3m

* chore: Use tycho deps and foundry from tycho_simulation

This is to try to decrease the risk of using conflicting versions in the different repositories

#time 32m


#time 0m

* chore: Read RPC_URL in main.rs

#time 10m

* fix: Support eth trades (skip balance and allowance overwrites) and set balance overwrite to amount in

For tokens like USDC setting the balance super high was making us getting blacklisted

#time 1h 12m

* fix: Fix curve tests and filter components_by_id with the expected_component_ids

#time 1h 30m


#time 0m

* fix: Don't use all the possible executor addresses. Hardcode just one for the test

Refactor overwrites logic:
- renamed functions
- moved logic around that fits together
- don't use StateOverrides and then convert to alloy overrides. Use alloy's directly

#time 1h 21m

* fix: Assume that the executors mapping starts at storage value 1

Move setup_router_overwrites away from the rpc and into the execution file
Delete unnecessary get_storage_at

#time 33m
This commit is contained in:
dianacarvalho1
2025-09-25 17:27:05 +01:00
committed by GitHub
parent 12369c3981
commit b577e7d6b2
13 changed files with 1161 additions and 1087 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -8,13 +8,7 @@ glob = "0.3.0"
miette = { version = "7.6.0", features = ["fancy"] } miette = { version = "7.6.0", features = ["fancy"] }
# Logging & Tracing # Logging & Tracing
tracing = "0.1.37" tracing = "0.1.37"
# Tycho dependencies tycho-simulation = { git = "https://github.com/propeller-heads/tycho-simulation.git", rev = "f73c2ef28328abdde791edf1fb21748f78dbee6a", features = ["evm"] }
tycho-common = { git = "https://github.com/propeller-heads/tycho-indexer.git", rev = "28d013a906c497d95e27f01f48fc887fb22dbbbc" }
tycho-client = { git = "https://github.com/propeller-heads/tycho-indexer.git", rev = "28d013a906c497d95e27f01f48fc887fb22dbbbc" }
tycho-simulation = { git = "https://github.com/propeller-heads/tycho-simulation.git", rev = "6585823a859a29bd64600cc1d2fa7d502d48d3e6" }
## TODO: for local development
#tycho-simulation = { path = "../../tycho-simulation", features = ["evm"] }
tycho-ethereum = { git = "https://github.com/propeller-heads/tycho-indexer.git", rev = "28d013a906c497d95e27f01f48fc887fb22dbbbc", features = ["onchain_data"] }
num-bigint = "0.4" num-bigint = "0.4"
num-traits = "0.2" num-traits = "0.2"
num-rational = "0.4.2" num-rational = "0.4.2"
@@ -36,7 +30,3 @@ colored = "3.0.0"
similar = "2.7.0" similar = "2.7.0"
termsize = "0.1.9" termsize = "0.1.9"
itertools = "0.14.0" itertools = "0.14.0"
# Foundry dependencies (same versions as tycho-simulation)
foundry-config = { git = "https://github.com/foundry-rs/foundry", rev = "5a552bb0de7126fa35170fd84532bbd3d40cd348" }
foundry-evm = { git = "https://github.com/foundry-rs/foundry", rev = "5a552bb0de7126fa35170fd84532bbd3d40cd348" }
revm = { version = "27.0.3", features = ["alloydb", "serde"] }

View File

@@ -3,7 +3,7 @@ use std::collections::{HashMap, HashSet};
use colored::Colorize; use colored::Colorize;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use similar::{ChangeTag, TextDiff}; use similar::{ChangeTag, TextDiff};
use tycho_common::{dto::ProtocolComponent, Bytes}; use tycho_simulation::{protocol::models::ProtocolComponent, tycho_common::Bytes};
/// Represents a ProtocolComponent with its main attributes /// Represents a ProtocolComponent with its main attributes
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
@@ -31,14 +31,18 @@ impl ProtocolComponentExpectation {
let mut diffs = Vec::new(); let mut diffs = Vec::new();
// Compare id (case-insensitive) // Compare id (case-insensitive)
if self.id.to_lowercase() != other.id.to_lowercase() { if self.id.to_lowercase() != other.id.to_string().to_lowercase() {
let diff = self.format_diff("id", &self.id, &other.id, colorize_output); let diff = self.format_diff("id", &self.id, &other.id.to_string(), colorize_output);
diffs.push(format!("Field 'id' mismatch for {}:\n{}", self.id, diff)); diffs.push(format!("Field 'id' mismatch for {}:\n{}", self.id, diff));
} }
// Compare tokens (order-independent) // Compare tokens (order-independent)
let self_tokens_set: HashSet<_> = self.tokens.iter().collect(); let self_tokens_set: HashSet<_> = self.tokens.iter().collect();
let other_tokens_set: HashSet<_> = other.tokens.iter().collect(); let other_tokens_set: HashSet<_> = other
.tokens
.iter()
.map(|token| &token.address)
.collect();
if self_tokens_set != other_tokens_set { if self_tokens_set != other_tokens_set {
let self_tokens = format!("{:?}", self.tokens); let self_tokens = format!("{:?}", self.tokens);
let other_tokens = format!("{:?}", other.tokens); let other_tokens = format!("{:?}", other.tokens);

View File

@@ -9,10 +9,11 @@ use std::str::FromStr;
use alloy::{primitives::Keccak256, sol_types::SolValue}; use alloy::{primitives::Keccak256, sol_types::SolValue};
use miette::{IntoDiagnostic, WrapErr}; use miette::{IntoDiagnostic, WrapErr};
use num_bigint::BigUint; use num_bigint::BigUint;
use tycho_common::{dto::Chain, Bytes}; use serde_json::json;
use tycho_simulation::{ use tycho_simulation::{
evm::protocol::u256_num::biguint_to_u256, evm::protocol::u256_num::biguint_to_u256,
protocol::models::ProtocolComponent, protocol::models::ProtocolComponent,
tycho_common::{dto::Chain, Bytes},
tycho_execution::encoding::{ tycho_execution::encoding::{
errors::EncodingError, errors::EncodingError,
evm::{encoder_builders::TychoRouterEncoderBuilder, utils::bytes_to_address}, evm::{encoder_builders::TychoRouterEncoderBuilder, utils::bytes_to_address},
@@ -22,7 +23,7 @@ use tycho_simulation::{
}, },
}; };
use crate::execution::EXECUTORS_JSON; use crate::execution::EXECUTOR_ADDRESS;
/// Creates a Solution for the given swap parameters. /// Creates a Solution for the given swap parameters.
/// ///
@@ -36,17 +37,17 @@ use crate::execution::EXECUTORS_JSON;
/// # Returns /// # Returns
/// A `Result<Solution, EncodingError>` containing the solution, or an error if creation fails. /// A `Result<Solution, EncodingError>` containing the solution, or an error if creation fails.
pub fn get_solution( pub fn get_solution(
component: ProtocolComponent, component: &ProtocolComponent,
token_in: Bytes, token_in: &Bytes,
token_out: Bytes, token_out: &Bytes,
amount_in: BigUint, amount_in: &BigUint,
amount_out: BigUint, amount_out: &BigUint,
) -> miette::Result<Solution> { ) -> miette::Result<Solution> {
let alice_address = Bytes::from_str("0xcd09f75E2BF2A4d11F3AB23f1389FcC1621c0cc2") let user_address = Bytes::from_str("0xf847a638E44186F3287ee9F8cAF73FF4d4B80784")
.into_diagnostic() .into_diagnostic()
.wrap_err("Failed to parse Alice's address for Tycho router encoding")?; .wrap_err("Failed to parse Alice's address for Tycho router encoding")?;
let swap = SwapBuilder::new(component, token_in.clone(), token_out.clone()).build(); let swap = SwapBuilder::new(component.clone(), token_in.clone(), token_out.clone()).build();
let slippage = 0.0025; // 0.25% slippage let slippage = 0.0025; // 0.25% slippage
let bps = BigUint::from(10_000u32); let bps = BigUint::from(10_000u32);
@@ -55,11 +56,11 @@ pub fn get_solution(
let min_amount_out = (amount_out * &multiplier) / &bps; let min_amount_out = (amount_out * &multiplier) / &bps;
Ok(Solution { Ok(Solution {
sender: alice_address.clone(), sender: user_address.clone(),
receiver: alice_address.clone(), receiver: user_address.clone(),
given_token: token_in, given_token: token_in.clone(),
given_amount: amount_in, given_amount: amount_in.clone(),
checked_token: token_out, checked_token: token_out.clone(),
exact_out: false, exact_out: false,
checked_amount: min_amount_out, checked_amount: min_amount_out,
swaps: vec![swap], swaps: vec![swap],
@@ -83,18 +84,26 @@ pub fn get_solution(
/// A `Result<Transaction, EncodingError>` containing the encoded transaction data for the Tycho /// A `Result<Transaction, EncodingError>` containing the encoded transaction data for the Tycho
/// router, or an error if encoding fails. /// router, or an error if encoding fails.
pub fn encode_swap( pub fn encode_swap(
component: ProtocolComponent, component: &ProtocolComponent,
token_in: Bytes, token_in: &Bytes,
token_out: Bytes, token_out: &Bytes,
amount_in: BigUint, amount_in: &BigUint,
amount_out: BigUint, amount_out: &BigUint,
) -> miette::Result<(Transaction, Solution)> { ) -> miette::Result<(Transaction, Solution)> {
let chain: tycho_common::models::Chain = Chain::Ethereum.into(); let protocol_system = component.protocol_system.clone();
let executors_json = json!({
"ethereum": {
(protocol_system):EXECUTOR_ADDRESS
}
})
.to_string();
let chain: tycho_simulation::tycho_common::models::Chain = Chain::Ethereum.into();
let encoder = TychoRouterEncoderBuilder::new() let encoder = TychoRouterEncoderBuilder::new()
.chain(chain) .chain(chain)
.user_transfer_type(UserTransferType::TransferFrom) .user_transfer_type(UserTransferType::TransferFrom)
.executors_addresses(EXECUTORS_JSON.to_string()) .executors_addresses(executors_json)
.historical_trade() .historical_trade()
.build() .build()
.into_diagnostic() .into_diagnostic()

View File

@@ -7,25 +7,29 @@
use std::{collections::HashMap, str::FromStr, sync::LazyLock}; use std::{collections::HashMap, str::FromStr, sync::LazyLock};
use alloy::{ use alloy::{
primitives::{Address, U256}, primitives::{keccak256, map::AddressHashMap, Address, FixedBytes, U256},
rpc::types::{Block, TransactionRequest}, rpc::types::{state::AccountOverride, Block, TransactionRequest},
}; };
use miette::{miette, IntoDiagnostic, WrapErr}; use miette::{miette, IntoDiagnostic, WrapErr};
use num_bigint::BigUint; use num_bigint::BigUint;
use serde_json::Value;
use tracing::info; use tracing::info;
use tycho_common::traits::{AllowanceSlotDetector, BalanceSlotDetector}; use tycho_simulation::{
use tycho_ethereum::entrypoint_tracer::{ evm::protocol::u256_num::{biguint_to_u256, u256_to_biguint},
tycho_common::{
traits::{AllowanceSlotDetector, BalanceSlotDetector},
Bytes,
},
tycho_ethereum::entrypoint_tracer::{
allowance_slot_detector::{AllowanceSlotDetectorConfig, EVMAllowanceSlotDetector}, allowance_slot_detector::{AllowanceSlotDetectorConfig, EVMAllowanceSlotDetector},
balance_slot_detector::{BalanceSlotDetectorConfig, EVMBalanceSlotDetector}, balance_slot_detector::{BalanceSlotDetectorConfig, EVMBalanceSlotDetector},
}; },
use tycho_simulation::{ tycho_execution::encoding::models::Solution,
evm::protocol::u256_num::u256_to_biguint, tycho_execution::encoding::models::Solution,
}; };
use crate::rpc::RPCProvider; use crate::rpc::RPCProvider;
const ROUTER_BYTECODE_JSON: &str = include_str!("../../evm/test/router/TychoRouter.runtime.json"); pub const ROUTER_BYTECODE_JSON: &str =
pub const EXECUTORS_JSON: &str = include_str!("../test_executor_addresses.json"); include_str!("../../evm/test/router/TychoRouter.runtime.json");
pub const EXECUTOR_ADDRESS: &str = "0xaE04CA7E9Ed79cBD988f6c536CE11C621166f41B";
// Include all executor bytecode files at compile time // Include all executor bytecode files at compile time
const UNISWAP_V2_BYTECODE_JSON: &str = const UNISWAP_V2_BYTECODE_JSON: &str =
@@ -60,62 +64,6 @@ static EXECUTOR_MAPPING: LazyLock<HashMap<&'static str, &'static str>> = LazyLoc
map map
}); });
/// Executor addresses loaded from test_executor_addresses.json at startup
pub static EXECUTOR_ADDRESSES: LazyLock<HashMap<String, Address>> = LazyLock::new(|| {
let json_value: Value = serde_json::from_str(&EXECUTORS_JSON)
.expect("Failed to parse test_executor_addresses.json");
let ethereum_addresses = json_value["ethereum"]
.as_object()
.expect("Missing 'ethereum' key in test_executor_addresses.json");
let mut addresses = HashMap::new();
for (protocol_name, address_value) in ethereum_addresses {
let address_str = address_value
.as_str()
.unwrap_or_else(|| panic!("Invalid address format for protocol '{protocol_name}'"));
let address = Address::from_str(address_str).unwrap_or_else(|_| {
panic!("Invalid address '{address_str}' for protocol '{protocol_name}'")
});
addresses.insert(protocol_name.clone(), address);
}
addresses
});
#[derive(Debug, Clone)]
pub struct StateOverride {
pub code: Option<Vec<u8>>,
pub balance: Option<U256>,
pub state_diff: HashMap<alloy::primitives::Bytes, alloy::primitives::Bytes>,
}
impl StateOverride {
pub fn new() -> Self {
Self { code: None, balance: None, state_diff: HashMap::new() }
}
pub fn with_code(mut self, code: Vec<u8>) -> Self {
self.code = Some(code);
self
}
pub fn with_balance(mut self, balance: U256) -> Self {
self.balance = Some(balance);
self
}
pub fn with_state_diff(
mut self,
slot: alloy::primitives::Bytes,
value: alloy::primitives::Bytes,
) -> Self {
self.state_diff.insert(slot, value);
self
}
}
/// Get executor bytecode JSON based on component ID /// Get executor bytecode JSON based on component ID
fn get_executor_bytecode_json(component_id: &str) -> miette::Result<&'static str> { fn get_executor_bytecode_json(component_id: &str) -> miette::Result<&'static str> {
for (pattern, executor_json) in EXECUTOR_MAPPING.iter() { for (pattern, executor_json) in EXECUTOR_MAPPING.iter() {
@@ -126,20 +74,9 @@ fn get_executor_bytecode_json(component_id: &str) -> miette::Result<&'static str
Err(miette!("Unknown component type '{}' - no matching executor found", component_id)) Err(miette!("Unknown component type '{}' - no matching executor found", component_id))
} }
/// Get executor address for a given component ID /// Load executor bytecode from embedded constants based on the protocol system
fn get_executor_address(component_id: &str) -> miette::Result<Address> { pub fn load_executor_bytecode(protocol_system: &str) -> miette::Result<Vec<u8>> {
if let Some(&address) = EXECUTOR_ADDRESSES.get(component_id) { let executor_json = get_executor_bytecode_json(protocol_system)?;
return Ok(address);
}
Err(miette!("No executor address found for component type '{}'", component_id))
}
/// Load executor bytecode from embedded constants based on solution component
fn load_executor_bytecode(solution: &Solution) -> miette::Result<Vec<u8>> {
let first_swap = solution.swaps.first().unwrap();
let component_id = &first_swap.component;
let executor_json = get_executor_bytecode_json(&component_id.protocol_system)?;
let json_value: serde_json::Value = serde_json::from_str(executor_json) let json_value: serde_json::Value = serde_json::from_str(executor_json)
.into_diagnostic() .into_diagnostic()
@@ -177,36 +114,143 @@ fn calculate_gas_fees(block_header: &Block) -> miette::Result<(U256, U256)> {
Ok((max_fee_per_gas, max_priority_fee_per_gas)) Ok((max_fee_per_gas, max_priority_fee_per_gas))
} }
/// Set up all state overrides needed for simulation /// Calculate storage slot for Solidity mapping.
async fn setup_state_overrides( ///
/// The solidity code:
/// keccak256(abi.encodePacked(bytes32(key), bytes32(slot)))
pub fn calculate_executor_storage_slot(key: Address) -> FixedBytes<32> {
// Convert key (20 bytes) to 32-byte left-padded array (uint256)
let mut key_bytes = [0u8; 32];
key_bytes[12..].copy_from_slice(key.as_slice());
// The base of the executor storage slot is 1, since there is only one
// variable that is initialized before it (which is _roles in AccessControl.sol).
// In this case, _roles gets slot 0.
// The slots are given in order to the parent contracts' variables first and foremost.
let slot = U256::from(1);
// Convert U256 slot to 32-byte big-endian array
let slot_bytes = slot.to_be_bytes::<32>();
// Concatenate key_bytes + slot_bytes, then keccak hash
let mut buf = [0u8; 64];
buf[..32].copy_from_slice(&key_bytes);
buf[32..].copy_from_slice(&slot_bytes);
keccak256(buf)
}
/// Sets up state overwrites for the Tycho router and its associated executor.
///
/// This method prepares the router for simulation by:
/// 1. Overriding the router's bytecode with the embedded runtime bytecode
/// 2. Copying executor approval storage from the current block to maintain permissions
/// 3. Overriding the executor's bytecode based on the protocol system
///
/// # Arguments
/// * `router_address` - The address of the Tycho router contract
/// * `protocol_system` - The protocol system identifier (e.g., "uniswap_v2", "vm:balancer_v2")
///
/// # Returns
/// A HashMap containing account overwrites for both the router and executor addresses.
/// The router override includes bytecode and executor approval storage.
/// The executor override includes the appropriate bytecode for the protocol.
///
/// # Errors
/// Returns an error if:
/// - Router bytecode JSON parsing fails
/// - Executor address parsing fails
/// - Storage slot fetching fails
/// - Executor bytecode loading fails
pub async fn setup_router_overwrites(
router_address: Address,
protocol_system: &str,
) -> miette::Result<AddressHashMap<AccountOverride>> {
let json_value: serde_json::Value = serde_json::from_str(ROUTER_BYTECODE_JSON)
.into_diagnostic()
.wrap_err("Failed to parse router JSON")?;
let bytecode_str = json_value["runtimeBytecode"]
.as_str()
.ok_or_else(|| miette::miette!("No runtimeBytecode field found in router JSON"))?;
// Remove 0x prefix if present
let bytecode_hex =
if let Some(stripped) = bytecode_str.strip_prefix("0x") { stripped } else { bytecode_str };
let router_bytecode = hex::decode(bytecode_hex)
.into_diagnostic()
.wrap_err("Failed to decode router bytecode from hex")?;
// Start with the router bytecode override
let mut state_overwrites = AddressHashMap::default();
let mut tycho_router_override = AccountOverride::default().with_code(router_bytecode);
// Find executor address approval storage slot
let executor_address = Address::from_str(EXECUTOR_ADDRESS).into_diagnostic()?;
let storage_slot = calculate_executor_storage_slot(executor_address);
// The executors mapping starts at storage value 1
let storage_value = FixedBytes::<32>::from(U256::ONE);
tycho_router_override =
tycho_router_override.with_state_diff(vec![(storage_slot, storage_value)]);
state_overwrites.insert(router_address, tycho_router_override);
// Add bytecode overwrite for the executor
let executor_bytecode = load_executor_bytecode(protocol_system)?;
state_overwrites
.insert(executor_address, AccountOverride::default().with_code(executor_bytecode.to_vec()));
Ok(state_overwrites)
}
/// Sets up state overwrites for user accounts and tokens required for swap simulation.
///
/// This method prepares the user environment for historical block simulation by:
/// 1. Providing the user with sufficient ETH balance (100 ETH) for gas payments
/// 2. For ETH swaps: Adding the swap amount to the user's ETH balance
/// 3. For ERC20 swaps: Overriding token balance and allowance storage slots to ensure:
/// - User has sufficient tokens for the swap
/// - Router has unlimited allowance to spend user's tokens
///
/// The function uses EVM storage slot detection to find the correct storage locations
/// for token balances and allowances, then applies state overwrites to simulate the
/// required pre-conditions without executing actual token transfers.
///
/// # Arguments
/// * `solution` - The encoded swap solution containing token and amount information
/// * `transaction` - The transaction details for determining router address
/// * `user_address` - The address of the user performing the swap
/// * `rpc_url` - RPC endpoint URL for storage slot detection
/// * `block` - The historical block context for storage queries
///
/// # Returns
/// A HashMap containing account overwrites for:
/// - User account: ETH balance override
/// - Token contract: Balance and allowance storage slot overwrites (for ERC20 swaps)
///
/// # Errors
/// Returns an error if:
/// - Storage slot detection fails for balance or allowance
/// - Token address parsing fails
/// - RPC queries for storage detection fail
async fn setup_user_overwrites(
solution: &Solution, solution: &Solution,
transaction: &tycho_simulation::tycho_execution::encoding::models::Transaction, transaction: &tycho_simulation::tycho_execution::encoding::models::Transaction,
user_address: Address, user_address: Address,
executor_bytecode: &[u8],
rpc_url: String, rpc_url: String,
block: &Block, block: &Block,
) -> miette::Result<HashMap<Address, StateOverride>> { ) -> miette::Result<AddressHashMap<AccountOverride>> {
let mut state_overwrites = HashMap::new(); let mut overwrites = AddressHashMap::default();
let token_address = Address::from_slice(&solution.given_token[..20]);
// Extract executor address from the encoded solution's swaps data.
// The solution should only have one swap for the test, so this should be safe.
let executor_address = if let Some(first_swap) = solution.swaps.first() {
get_executor_address(&first_swap.component.protocol_system)?
} else {
return Err(miette!("No swaps in solution - cannot determine executor address"));
};
// Add bytecode overwrite for the executor
state_overwrites
.insert(executor_address, StateOverride::new().with_code(executor_bytecode.to_vec()));
// Add ETH balance override for the user to ensure they have enough gas funds // Add ETH balance override for the user to ensure they have enough gas funds
state_overwrites.insert( let mut eth_balance = U256::from_str("100000000000000000000").unwrap(); // 100 ETH
user_address,
StateOverride::new().with_balance(U256::from_str("100000000000000000000").unwrap()), // 100 ETH
);
let token_address = Address::from_slice(&solution.given_token[..20]);
// If given token is ETH, add the given amount to the balance
if solution.given_token == Bytes::zero(20) {
eth_balance += biguint_to_u256(&solution.given_amount);
// if the given token is not ETH, do balance and allowance slots overwrites
} else {
let detector = EVMBalanceSlotDetector::new(BalanceSlotDetectorConfig { let detector = EVMBalanceSlotDetector::new(BalanceSlotDetectorConfig {
rpc_url: rpc_url.clone(), rpc_url: rpc_url.clone(),
..Default::default() ..Default::default()
@@ -215,7 +259,7 @@ async fn setup_state_overrides(
let results = detector let results = detector
.detect_balance_slots( .detect_balance_slots(
&[solution.given_token.clone()], std::slice::from_ref(&solution.given_token),
(**user_address).into(), (**user_address).into(),
(*block.header.hash).into(), (*block.header.hash).into(),
) )
@@ -236,34 +280,40 @@ async fn setup_state_overrides(
let results = detector let results = detector
.detect_allowance_slots( .detect_allowance_slots(
&[solution.given_token.clone()], std::slice::from_ref(&solution.given_token),
(**user_address).into(), (**user_address).into(),
transaction.to.clone(), // tycho router transaction.to.clone(), // tycho router
(*block.header.hash).into(), (*block.header.hash).into(),
) )
.await; .await;
let allowance_slot = let allowance_slot = if let Some(Ok((_storage_addr, slot))) =
if let Some(Ok((_storage_addr, slot))) = results.get(&solution.given_token.clone()) { results.get(&solution.given_token.clone())
{
slot slot
} else { } else {
return Err(miette!("Couldn't find allowance storage slot for token {token_address}")); return Err(miette!("Couldn't find allowance storage slot for token {token_address}"));
}; };
state_overwrites.insert( overwrites.insert(
token_address, token_address,
StateOverride::new() AccountOverride::default().with_state_diff(vec![
.with_state_diff( (
alloy::primitives::Bytes::from(allowance_slot.to_vec()), alloy::primitives::B256::from_slice(allowance_slot),
alloy::primitives::Bytes::from(U256::MAX.to_be_bytes::<32>()), alloy::primitives::B256::from_slice(&U256::MAX.to_be_bytes::<32>()),
)
.with_state_diff(
alloy::primitives::Bytes::from(balance_slot.to_vec()),
alloy::primitives::Bytes::from(U256::MAX.to_be_bytes::<32>()),
), ),
(
alloy::primitives::B256::from_slice(balance_slot),
alloy::primitives::B256::from_slice(
&biguint_to_u256(&solution.given_amount).to_be_bytes::<32>(),
),
),
]),
); );
}
overwrites.insert(user_address, AccountOverride::default().with_balance(eth_balance));
Ok(state_overwrites) Ok(overwrites)
} }
/// Simulate a trade using eth_call for historical blocks /// Simulate a trade using eth_call for historical blocks
@@ -271,10 +321,11 @@ pub async fn simulate_trade_with_eth_call(
rpc_provider: &RPCProvider, rpc_provider: &RPCProvider,
transaction: &tycho_simulation::tycho_execution::encoding::models::Transaction, transaction: &tycho_simulation::tycho_execution::encoding::models::Transaction,
solution: &Solution, solution: &Solution,
block_number: u64,
block: &Block, block: &Block,
) -> miette::Result<BigUint> { ) -> miette::Result<BigUint> {
let executor_bytecode = load_executor_bytecode(solution)?; let first_swap = solution.swaps.first().unwrap();
let protocol_system = &first_swap.component.protocol_system;
let user_address = Address::from_slice(&solution.sender[..20]); let user_address = Address::from_slice(&solution.sender[..20]);
let (max_fee_per_gas, max_priority_fee_per_gas) = calculate_gas_fees(block)?; let (max_fee_per_gas, max_priority_fee_per_gas) = calculate_gas_fees(block)?;
// Convert main transaction to alloy TransactionRequest // Convert main transaction to alloy TransactionRequest
@@ -295,28 +346,24 @@ pub async fn simulate_trade_with_eth_call(
); );
let tycho_router_address = Address::from_slice(&transaction.to[..20]); let tycho_router_address = Address::from_slice(&transaction.to[..20]);
// Copy router storage and code from current block to historical block let router_overwrites = setup_router_overwrites(tycho_router_address, protocol_system)
let router_override = rpc_provider
.copy_contract_storage_and_code(tycho_router_address, ROUTER_BYTECODE_JSON)
.await .await
.wrap_err("Failed to create router override")?; .wrap_err("Failed to create router override")?;
// Set up state overrides including router override let mut user_overwrites = setup_user_overwrites(
let mut state_overwrites = setup_state_overrides(
solution, solution,
transaction, transaction,
user_address, user_address,
&executor_bytecode,
rpc_provider.url.to_string(), rpc_provider.url.to_string(),
block, block,
) )
.await?; // Include executor override for historical blocks .await?;
// Add the router override // Merge router overwrites with user overwrites
state_overwrites.insert(tycho_router_address, router_override); user_overwrites.extend(router_overwrites);
let execution_amount_out = rpc_provider let execution_amount_out = rpc_provider
.simulate_transactions_with_tracing(execution_tx, block_number, state_overwrites) .simulate_transactions_with_tracing(execution_tx, block.number(), user_overwrites)
.await .await
.map_err(|e| { .map_err(|e| {
info!("Execution transaction failed with error: {}", e); info!("Execution transaction failed with error: {}", e);

View File

@@ -9,9 +9,10 @@ mod tycho_rpc;
mod tycho_runner; mod tycho_runner;
mod utils; mod utils;
use std::{fmt::Display, path::PathBuf}; use std::{env, fmt::Display, path::PathBuf};
use clap::Parser; use clap::Parser;
use dotenv::dotenv;
use miette::{miette, IntoDiagnostic, WrapErr}; use miette::{miette, IntoDiagnostic, WrapErr};
use tracing::info; use tracing::info;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
@@ -78,6 +79,9 @@ impl Args {
} }
fn main() -> miette::Result<()> { fn main() -> miette::Result<()> {
// Load .env file before setting up logging
dotenv().ok();
tracing_subscriber::fmt() tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env()) .with_env_filter(EnvFilter::from_default_env())
.with_target(false) .with_target(false)
@@ -90,6 +94,10 @@ fn main() -> miette::Result<()> {
} }
info!("{version}"); info!("{version}");
let rpc_url = env::var("RPC_URL")
.into_diagnostic()
.wrap_err("Missing RPC_URL in environment")?;
let args = Args::parse(); let args = Args::parse();
let test_runner = TestRunner::new( let test_runner = TestRunner::new(
@@ -99,7 +107,8 @@ fn main() -> miette::Result<()> {
args.db_url, args.db_url,
args.vm_simulation_traces, args.vm_simulation_traces,
args.execution_traces, args.execution_traces,
); rpc_url,
)?;
test_runner.run_tests() test_runner.run_tests()
} }

View File

@@ -1,10 +1,10 @@
use std::{collections::HashMap, str::FromStr}; use std::str::FromStr;
use alloy::{ use alloy::{
contract::{ContractInstance, Interface}, contract::{ContractInstance, Interface},
dyn_abi::DynSolValue, dyn_abi::DynSolValue,
eips::eip1898::BlockId, eips::eip1898::BlockId,
primitives::{address, keccak256, map::AddressHashMap, Address, FixedBytes, U256}, primitives::{address, map::AddressHashMap, Address, U256},
providers::{Provider, ProviderBuilder}, providers::{Provider, ProviderBuilder},
rpc::types::{ rpc::types::{
state::AccountOverride, state::AccountOverride,
@@ -20,12 +20,9 @@ use alloy::{
use miette::{IntoDiagnostic, WrapErr}; use miette::{IntoDiagnostic, WrapErr};
use serde_json::Value; use serde_json::Value;
use tracing::info; use tracing::info;
use tycho_common::Bytes; use tycho_simulation::tycho_common::Bytes;
use crate::{ use crate::traces::print_call_trace;
execution::{StateOverride, EXECUTOR_ADDRESSES},
traces::print_call_trace,
};
const NATIVE_ALIASES: &[Address] = &[ const NATIVE_ALIASES: &[Address] = &[
address!("0x0000000000000000000000000000000000000000"), address!("0x0000000000000000000000000000000000000000"),
@@ -101,139 +98,13 @@ impl RPCProvider {
.and_then(|block_opt| block_opt.ok_or_else(|| miette::miette!("Block not found"))) .and_then(|block_opt| block_opt.ok_or_else(|| miette::miette!("Block not found")))
} }
/// Helper function to get the contract's storage at the given slot at the latest block.
pub async fn get_storage_at(
&self,
contract_address: Address,
slot: FixedBytes<32>,
) -> miette::Result<FixedBytes<32>> {
let provider = ProviderBuilder::new().connect_http(self.url.clone());
let storage_value = provider
.get_storage_at(contract_address, slot.into())
.await
.into_diagnostic()
.wrap_err("Failed to fetch storage slot")?;
Ok(storage_value.into())
}
pub async fn copy_contract_storage_and_code(
&self,
contract_address: Address,
router_bytecode_json: &str,
) -> miette::Result<StateOverride> {
let json_value: serde_json::Value = serde_json::from_str(router_bytecode_json)
.into_diagnostic()
.wrap_err("Failed to parse router JSON")?;
let bytecode_str = json_value["runtimeBytecode"]
.as_str()
.ok_or_else(|| miette::miette!("No runtimeBytecode field found in router JSON"))?;
// Remove 0x prefix if present
let bytecode_hex = if let Some(stripped) = bytecode_str.strip_prefix("0x") {
stripped
} else {
bytecode_str
};
let router_bytecode = hex::decode(bytecode_hex)
.into_diagnostic()
.wrap_err("Failed to decode router bytecode from hex")?;
// Start with the router bytecode override
let mut state_override = StateOverride::new().with_code(router_bytecode);
for (protocol_name, &executor_address) in EXECUTOR_ADDRESSES.iter() {
let storage_slot = self.calculate_executor_storage_slot(executor_address);
match self
.get_storage_at(contract_address, storage_slot)
.await
{
Ok(value) => {
state_override = state_override.with_state_diff(
alloy::primitives::Bytes::from(storage_slot.to_vec()),
alloy::primitives::Bytes::from(value.to_vec()),
);
}
Err(e) => {
info!(
"Failed to fetch executor approval for {} ({:?}): {}",
protocol_name, executor_address, e
);
}
}
}
Ok(state_override)
}
/// Calculate storage slot for Solidity mapping.
///
/// The solidity code:
/// keccak256(abi.encodePacked(bytes32(key), bytes32(slot)))
pub fn calculate_executor_storage_slot(&self, key: Address) -> FixedBytes<32> {
// Convert key (20 bytes) to 32-byte left-padded array (uint256)
let mut key_bytes = [0u8; 32];
key_bytes[12..].copy_from_slice(key.as_slice());
// The base of the executor storage slot is 1, since there is only one
// variable that is initialized before it (which is _roles in AccessControl.sol).
// In this case, _roles gets slot 0.
// The slots are given in order to the parent contracts' variables first and foremost.
let slot = U256::from(1);
// Convert U256 slot to 32-byte big-endian array
let slot_bytes = slot.to_be_bytes::<32>();
// Concatenate key_bytes + slot_bytes, then keccak hash
let mut buf = [0u8; 64];
buf[..32].copy_from_slice(&key_bytes);
buf[32..].copy_from_slice(&slot_bytes);
keccak256(buf)
}
fn bytes_to_fixed_32(bytes: &[u8]) -> [u8; 32] {
let mut arr = [0u8; 32];
let len = bytes.len().min(32);
// Right-pad by copying to the end of the array
arr[32 - len..].copy_from_slice(&bytes[bytes.len() - len..]);
arr
}
pub async fn simulate_transactions_with_tracing( pub async fn simulate_transactions_with_tracing(
&self, &self,
transaction: TransactionRequest, transaction: TransactionRequest,
block_number: u64, block_number: u64,
state_overwrites: HashMap<Address, StateOverride>, state_overwrites: AddressHashMap<AccountOverride>,
) -> miette::Result<U256> { ) -> miette::Result<U256> {
let provider = ProviderBuilder::new().connect_http(self.url.clone()); let provider = ProviderBuilder::new().connect_http(self.url.clone());
// Convert our StateOverride to alloy's state override format
let mut alloy_state_overrides = AddressHashMap::default();
for (address, override_data) in state_overwrites {
let mut account_override = AccountOverride::default();
if let Some(code) = override_data.code {
account_override.code = Some(alloy::primitives::Bytes::from(code));
}
if let Some(balance) = override_data.balance {
account_override.balance = Some(balance);
}
if !override_data.state_diff.is_empty() {
// Convert Bytes to FixedBytes<32> for storage slots
let mut state_diff = HashMap::default();
for (slot, value) in override_data.state_diff {
let slot_bytes = Self::bytes_to_fixed_32(&slot);
let value_bytes = Self::bytes_to_fixed_32(&value);
state_diff.insert(FixedBytes(slot_bytes), FixedBytes(value_bytes));
}
account_override.state_diff = Some(state_diff);
}
alloy_state_overrides.insert(address, account_override);
}
// Configure tracing options - use callTracer for better formatted results // Configure tracing options - use callTracer for better formatted results
let tracing_options = GethDebugTracingOptions { let tracing_options = GethDebugTracingOptions {
@@ -247,10 +118,10 @@ impl RPCProvider {
let trace_options = GethDebugTracingCallOptions { let trace_options = GethDebugTracingCallOptions {
tracing_options, tracing_options,
state_overrides: if alloy_state_overrides.is_empty() { state_overrides: if state_overwrites.is_empty() {
None None
} else { } else {
Some(alloy_state_overrides) Some(state_overwrites)
}, },
block_overrides: None, block_overrides: None,
}; };

View File

@@ -1,6 +1,14 @@
use std::{collections::HashMap, env, path::PathBuf, str::FromStr, sync::LazyLock}; use std::{
collections::{HashMap, HashSet},
path::PathBuf,
str::FromStr,
sync::LazyLock,
};
use alloy::primitives::{Address, U256}; use alloy::{
primitives::{Address, U256},
rpc::types::Block,
};
use figment::{ use figment::{
providers::{Format, Yaml}, providers::{Format, Yaml},
Figment, Figment,
@@ -13,27 +21,27 @@ use num_traits::{Signed, ToPrimitive, Zero};
use postgres::{Client, Error, NoTls}; use postgres::{Client, Error, NoTls};
use tokio::runtime::Runtime; use tokio::runtime::Runtime;
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
use tycho_client::feed::BlockHeader;
use tycho_common::{
dto::{Chain, ProtocolComponent, ResponseAccount, ResponseProtocolState},
models::token::Token,
Bytes,
};
use tycho_simulation::{ use tycho_simulation::{
evm::{ evm::{
decoder::TychoStreamDecoder, decoder::TychoStreamDecoder,
engine_db::tycho_db::PreCachedDB, engine_db::tycho_db::PreCachedDB,
protocol::{ protocol::{
pancakeswap_v2::state::PancakeswapV2State, u256_num::bytes_to_u256, ekubo::state::EkuboState, pancakeswap_v2::state::PancakeswapV2State,
uniswap_v2::state::UniswapV2State, uniswap_v3::state::UniswapV3State, u256_num::bytes_to_u256, uniswap_v2::state::UniswapV2State,
vm::state::EVMPoolState, uniswap_v3::state::UniswapV3State, vm::state::EVMPoolState,
}, },
}, },
protocol::models::DecoderContext, protocol::models::{DecoderContext, Update},
tycho_client::feed::{ tycho_client::feed::{
synchronizer::{ComponentWithState, Snapshot, StateSyncMessage}, synchronizer::{ComponentWithState, Snapshot, StateSyncMessage},
FeedMessage, BlockHeader, FeedMessage,
}, },
tycho_common::{
dto::{Chain, ProtocolComponent, ResponseAccount, ResponseProtocolState},
models::token::Token,
Bytes,
},
tycho_execution::encoding::evm::utils::bytes_to_address,
}; };
use crate::{ use crate::{
@@ -57,11 +65,12 @@ static CLONE_TO_BASE_PROTOCOL: LazyLock<HashMap<&str, &str>> = LazyLock::new(||
pub struct TestRunner { pub struct TestRunner {
db_url: String, db_url: String,
vm_simulation_traces: bool, vm_simulation_traces: bool,
execution_traces: bool,
substreams_path: PathBuf, substreams_path: PathBuf,
adapter_contract_builder: AdapterContractBuilder, adapter_contract_builder: AdapterContractBuilder,
match_test: Option<String>, match_test: Option<String>,
config_file_path: PathBuf, config_file_path: PathBuf,
runtime: Runtime,
rpc_provider: RPCProvider,
} }
impl TestRunner { impl TestRunner {
@@ -72,7 +81,8 @@ impl TestRunner {
db_url: String, db_url: String,
vm_traces: bool, vm_traces: bool,
execution_traces: bool, execution_traces: bool,
) -> Self { rpc_url: String,
) -> miette::Result<Self> {
let base_protocol = CLONE_TO_BASE_PROTOCOL let base_protocol = CLONE_TO_BASE_PROTOCOL
.get(protocol.as_str()) .get(protocol.as_str())
.unwrap_or(&protocol.as_str()) .unwrap_or(&protocol.as_str())
@@ -98,15 +108,19 @@ impl TestRunner {
}; };
let config_file_path = substreams_path.join(&config_file_name); let config_file_path = substreams_path.join(&config_file_name);
Self { let rpc_provider = RPCProvider::new(rpc_url, execution_traces);
let runtime = Runtime::new().into_diagnostic()?;
Ok(Self {
db_url, db_url,
vm_simulation_traces: vm_traces, vm_simulation_traces: vm_traces,
execution_traces, runtime,
rpc_provider,
substreams_path, substreams_path,
adapter_contract_builder, adapter_contract_builder,
match_test, match_test,
config_file_path, config_file_path,
} })
} }
pub fn run_tests(&self) -> miette::Result<()> { pub fn run_tests(&self) -> miette::Result<()> {
@@ -231,22 +245,51 @@ impl TestRunner {
) )
.wrap_err("Failed to run Tycho")?; .wrap_err("Failed to run Tycho")?;
tycho_runner.run_with_rpc_server( let rpc_server = tycho_runner.start_rpc_server()?;
|expected_components, start_block, stop_block| {
validate_state( let expected_ids = test
expected_components, .expected_components
start_block, .iter()
stop_block, .map(|c| c.base.id.to_lowercase())
config, .collect::<Vec<String>>();
&self.adapter_contract_builder,
let (update, component_tokens, response_protocol_states_by_id, block) = self
.fetch_from_tycho_rpc(
&config.protocol_system,
expected_ids,
&config.adapter_contract,
&config.adapter_build_signature,
&config.adapter_build_args,
self.vm_simulation_traces, self.vm_simulation_traces,
self.execution_traces,
)
},
&test.expected_components,
test.start_block,
test.stop_block, test.stop_block,
)? )?;
// Step 1: Validate that all expected components are present on Tycho after indexing
self.validate_state(&test.expected_components, update.clone())?;
// Step 2: Validate Token Balances
match config.skip_balance_check {
true => info!("Skipping balance check"),
false => {
self.validate_token_balances(
&component_tokens,
&response_protocol_states_by_id,
test.start_block,
)?;
info!("All token balances match the values found onchain")
}
}
// Step 3: Run Tycho Simulation and Execution
self.simulate_and_execute(
update,
&component_tokens,
block,
Some(test.expected_components.clone()),
)?;
tycho_runner.stop_rpc_server(rpc_server)?;
Ok(())
} }
fn empty_database(&self) -> Result<(), Error> { fn empty_database(&self) -> Result<(), Error> {
@@ -263,55 +306,85 @@ impl TestRunner {
Ok(()) Ok(())
} }
}
fn validate_state( /// Fetches protocol data from the Tycho RPC server and prepares it for validation and
expected_components: &Vec<ProtocolComponentWithTestConfig>, /// simulation.
start_block: u64, ///
stop_block: u64, /// This method connects to the running Tycho RPC server to retrieve protocol components,
config: &IntegrationTestsConfig, /// states, and contract storage. It then sets up the Tycho Decoder and creates an update
adapter_contract_builder: &AdapterContractBuilder, /// message that can be used for validation and simulation testing.
///
/// # Arguments
/// * `protocol_system` - The protocol system identifier (e.g., "uniswap_v2", "balancer_v2")
/// * `expected_component_ids` - List of component IDs to fetch from Tycho
/// * `adapter_contract` - Optional adapter contract name for VM-based protocols
/// * `adapter_build_signature` - Optional build signature for the adapter contract
/// * `adapter_build_args` - Optional build arguments for the adapter contract
/// * `vm_simulation_traces` - Whether to enable VM simulation traces
/// * `stop_block` - The block number to fetch data for
///
/// # Returns
/// A tuple containing:
/// - `Update` - Decoded protocol state update for simulation
/// - `HashMap<String, Vec<Token>>` - Token mappings for each component
/// - `HashMap<String, ResponseProtocolState>` - Protocol states by component ID
/// - `Block` - The block header for the specified block
#[allow(clippy::type_complexity, clippy::too_many_arguments)]
fn fetch_from_tycho_rpc(
&self,
protocol_system: &str,
expected_component_ids: Vec<String>,
adapter_contract: &Option<String>,
adapter_build_signature: &Option<String>,
adapter_build_args: &Option<String>,
vm_simulation_traces: bool, vm_simulation_traces: bool,
execution_traces: bool, stop_block: u64,
) -> miette::Result<()> { ) -> miette::Result<(
let rt = Runtime::new().unwrap(); Update,
HashMap<String, Vec<Token>>,
HashMap<String, ResponseProtocolState>,
Block,
)> {
// Create Tycho client for the RPC server // Create Tycho client for the RPC server
let tycho_client = TychoClient::new("http://localhost:4242") let tycho_client = TychoClient::new("http://localhost:4242")
.into_diagnostic() .into_diagnostic()
.wrap_err("Failed to create Tycho client")?; .wrap_err("Failed to create Tycho client")?;
let chain = Chain::Ethereum; let chain = Chain::Ethereum;
let protocol_system = &config.protocol_system;
// Fetch data from Tycho RPC. We use block_on to avoid using async functions on the testing // Fetch data from Tycho RPC. We use block_on to avoid using async functions on the testing
// module, in order to simplify debugging // module, in order to simplify debugging
let protocol_components = rt let protocol_components = self
.runtime
.block_on(tycho_client.get_protocol_components(protocol_system, chain)) .block_on(tycho_client.get_protocol_components(protocol_system, chain))
.into_diagnostic() .into_diagnostic()
.wrap_err("Failed to get protocol components")?; .wrap_err("Failed to get protocol components")?;
let expected_ids = expected_components let protocol_states = self
.iter() .runtime
.map(|c| c.base.id.to_lowercase()) .block_on(tycho_client.get_protocol_state(
.collect::<Vec<String>>(); protocol_system,
expected_component_ids.clone(),
let protocol_states = rt chain,
.block_on(tycho_client.get_protocol_state(protocol_system, expected_ids, chain)) ))
.into_diagnostic() .into_diagnostic()
.wrap_err("Failed to get protocol state")?; .wrap_err("Failed to get protocol state")?;
let vm_storages = rt let vm_storages = self
.runtime
.block_on(tycho_client.get_contract_state(protocol_system, chain)) .block_on(tycho_client.get_contract_state(protocol_system, chain))
.into_diagnostic() .into_diagnostic()
.wrap_err("Failed to get contract state")?; .wrap_err("Failed to get contract state")?;
// Create a map of component IDs to components for easy lookup // Create a map of component IDs to components for easy lookup
let components_by_id: HashMap<String, ProtocolComponent> = protocol_components let mut components_by_id: HashMap<String, ProtocolComponent> = protocol_components
.clone() .clone()
.into_iter() .into_iter()
.map(|c| (c.id.to_lowercase(), c)) .map(|c| (c.id.to_lowercase(), c))
.collect(); .collect();
if !expected_component_ids.is_empty() {
components_by_id.retain(|id, _| expected_component_ids.contains(id))
};
let protocol_states_by_id: HashMap<String, ResponseProtocolState> = protocol_states let protocol_states_by_id: HashMap<String, ResponseProtocolState> = protocol_states
.into_iter() .into_iter()
@@ -321,7 +394,189 @@ fn validate_state(
debug!("Found {} protocol components", components_by_id.len()); debug!("Found {} protocol components", components_by_id.len());
debug!("Found {} protocol states", protocol_states_by_id.len()); debug!("Found {} protocol states", protocol_states_by_id.len());
// Step 1: Validate that all expected components are present on Tycho after indexing let adapter_contract_path;
let mut adapter_contract_path_str: Option<&str> = None;
// Adapter contract will only be configured for VM protocols, not natively implemented
// protocols.
if let Some(adapter_contract_name) = &adapter_contract {
// Build/find the adapter contract
adapter_contract_path = match self
.adapter_contract_builder
.find_contract(adapter_contract_name)
{
Ok(path) => {
debug!("Found adapter contract at: {}", path.display());
path
}
Err(_) => {
info!("Adapter contract not found, building it...");
self.adapter_contract_builder
.build_target(
adapter_contract_name,
adapter_build_signature.as_deref(),
adapter_build_args.as_deref(),
)
.wrap_err("Failed to build adapter contract")?
}
};
debug!("Using adapter contract: {}", adapter_contract_path.display());
adapter_contract_path_str = Some(adapter_contract_path.to_str().unwrap());
}
// Clear the shared database state to ensure test isolation
// This prevents state from previous tests from affecting the current test
tycho_simulation::evm::engine_db::SHARED_TYCHO_DB.clear();
let mut decoder = TychoStreamDecoder::new();
decoder.skip_state_decode_failures(true);
let mut decoder_context = DecoderContext::new().vm_traces(vm_simulation_traces);
if let Some(vm_adapter_path) = adapter_contract_path_str {
decoder_context = decoder_context.vm_adapter_path(vm_adapter_path);
}
match protocol_system {
"uniswap_v2" | "sushiswap_v2" => {
decoder.register_decoder_with_context::<UniswapV2State>(
protocol_system,
decoder_context,
);
}
"pancakeswap_v2" => {
decoder.register_decoder_with_context::<PancakeswapV2State>(
protocol_system,
decoder_context,
);
}
"uniswap_v3" | "pancakeswap_v3" => {
decoder.register_decoder_with_context::<UniswapV3State>(
protocol_system,
decoder_context,
);
}
"ekubo_v2" => {
decoder
.register_decoder_with_context::<EkuboState>(protocol_system, decoder_context);
}
_ => {
decoder.register_decoder_with_context::<EVMPoolState<PreCachedDB>>(
protocol_system,
decoder_context,
);
}
}
// Mock a stream message, with only a Snapshot and no deltas
let mut states: HashMap<String, ComponentWithState> = HashMap::new();
for (id, component) in &components_by_id {
let component_id = id;
let state = protocol_states_by_id
.get(component_id)
.wrap_err(format!(
"Component {id} does not exist in protocol_states_by_id {protocol_states_by_id:?}"
))?
.clone();
let component_with_state = ComponentWithState {
state,
component: component.clone(),
component_tvl: None,
// Neither UniswapV4 with hooks not certain balancer pools are currently supported
// for SDK testing
entrypoints: vec![],
};
states.insert(component_id.clone(), component_with_state);
}
// Convert vm_storages to a HashMap
let vm_storage: HashMap<Bytes, ResponseAccount> = vm_storages
.into_iter()
.map(|x| (x.address.clone(), x))
.collect();
let snapshot = Snapshot { states, vm_storage };
// Get block header to extract the timestamp
let block_header = self
.runtime
.block_on(
self.rpc_provider
.get_block_header(stop_block),
)
.wrap_err("Failed to get block header")?;
let state_msgs: HashMap<String, StateSyncMessage<BlockHeader>> = HashMap::from([(
String::from(protocol_system),
StateSyncMessage {
header: BlockHeader {
hash: (*block_header.hash()).into(),
number: stop_block,
parent_hash: Bytes::default(),
revert: false,
timestamp: block_header.header.timestamp,
},
snapshots: snapshot,
deltas: None,
removed_components: HashMap::new(),
},
)]);
let all_tokens = self
.runtime
.block_on(tycho_client.get_tokens(Chain::Ethereum, None, None))
.into_diagnostic()
.wrap_err("Failed to get tokens")?;
debug!("Loaded {} tokens", all_tokens.len());
self.runtime
.block_on(decoder.set_tokens(all_tokens));
let message: FeedMessage = FeedMessage { state_msgs, sync_states: Default::default() };
let block_msg = self
.runtime
.block_on(decoder.decode(&message))
.into_diagnostic()
.wrap_err("Failed to decode message")?;
let mut component_tokens: HashMap<String, Vec<Token>> = HashMap::new();
for (id, comp) in block_msg.new_pairs.iter() {
component_tokens
.entry(id.clone())
.or_insert_with(|| comp.tokens.clone());
}
Ok((block_msg, component_tokens, protocol_states_by_id, block_header))
}
/// Validates that the protocol components retrieved from Tycho match the expected
/// configuration.
///
/// This method compares each expected component from the test configuration against
/// the actual components found in the protocol state update. It ensures that all
/// expected components are present and their properties (tokens, addresses, fees, etc.)
/// match the expected values.
///
/// # Arguments
/// * `expected_components` - Vector of expected protocol components with their test
/// configuration
/// * `block_msg` - The decoded protocol state update containing the actual component data
///
/// # Returns
/// Returns `Ok(())` if all expected components are found and match their expected state.
///
/// # Errors
/// Returns an error if:
/// - Any expected component is missing from the Tycho state
/// - Any component's properties don't match the expected values (shows detailed diff)
fn validate_state(
&self,
expected_components: &Vec<ProtocolComponentWithTestConfig>,
block_msg: Update,
) -> miette::Result<()> {
debug!("Validating {:?} expected components", expected_components.len()); debug!("Validating {:?} expected components", expected_components.len());
for expected_component in expected_components { for expected_component in expected_components {
let component_id = expected_component let component_id = expected_component
@@ -329,7 +584,8 @@ fn validate_state(
.id .id
.to_lowercase(); .to_lowercase();
let component = components_by_id let component = block_msg
.new_pairs
.get(&component_id) .get(&component_id)
.ok_or_else(|| miette!("Component {:?} was not found on Tycho", component_id))?; .ok_or_else(|| miette!("Component {:?} was not found on Tycho", component_id))?;
@@ -349,200 +605,71 @@ fn validate_state(
} }
} }
} }
info!("All expected components were successfully found on Tycho and match the expected state"); info!(
"All expected components were successfully found on Tycho and match the expected state"
// Step 2: Validate Token Balances );
let rpc_url = env::var("RPC_URL") Ok(())
.into_diagnostic()
.wrap_err("Missing RPC_URL in environment")?;
let rpc_provider = RPCProvider::new(rpc_url, execution_traces);
match config.skip_balance_check {
true => info!("Skipping balance check"),
false => {
validate_token_balances(
&components_by_id,
&protocol_states_by_id,
start_block,
&rt,
&rpc_provider,
)?;
info!("All token balances match the values found onchain")
}
} }
// Step 3: Run Tycho Simulation /// Performs comprehensive simulation and execution testing on protocol components.
// Filter out components that have skip_simulation = true ///
let simulation_component_ids: std::collections::HashSet<String> = expected_components /// This method tests each protocol component by:
/// 1. Computing spot prices for all token pairs
/// 2. Simulating swaps with different input amounts (0.1%, 1%, 10% of limits)
/// 3. Testing all possible swap directions between tokens
/// 4. Simulating actual execution using historical block state
/// 5. Comparing simulation results with execution results for accuracy
///
/// The simulation uses the Tycho SDK to calculate expected outputs, while execution
/// uses `debug_traceCall` with state overwrites to simulate actual on-chain behavior
/// at historical blocks.
///
/// # Arguments
/// * `update` - The decoded protocol state containing all component data
/// * `component_tokens` - Mapping of component IDs to their associated tokens
/// * `block` - The historical block to use for execution testing
/// * `expected_components` - Optional test configuration to determine which components to skip
///
/// # Returns
/// Returns `Ok(())` if all simulations and executions complete successfully within tolerance.
///
/// # Errors
/// Returns an error if:
/// - Spot price calculation fails for any component
/// - Simulation fails to calculate amount out
/// - Execution simulation fails or reverts
/// - Difference between simulation and execution exceeds 5% slippage tolerance
///
/// Components can be skipped using `skip_simulation` or `skip_execution` flags
/// in the test configuration.
fn simulate_and_execute(
&self,
update: Update,
component_tokens: &HashMap<String, Vec<Token>>,
block: Block,
expected_components: Option<Vec<ProtocolComponentWithTestConfig>>,
) -> miette::Result<()> {
let mut skip_simulation = HashSet::new();
let mut skip_execution = HashSet::new();
if let Some(components) = expected_components {
skip_simulation = components
.iter() .iter()
.filter(|c| !c.skip_simulation) .filter(|c| c.skip_simulation)
.map(|c| c.base.id.to_lowercase()) .map(|c| c.base.id.to_lowercase())
.collect(); .collect();
skip_execution = components
info!("Components to simulate: {}", simulation_component_ids.len());
for id in &simulation_component_ids {
info!("Simulating component: {}", id);
}
if simulation_component_ids.is_empty() {
info!("No components to simulate, skipping simulation validation");
return Ok(());
}
// Filter out components that have skip_execution = true
let execution_component_ids: std::collections::HashSet<String> = expected_components
.iter() .iter()
.filter(|c| !c.skip_execution) .filter(|c| c.skip_execution)
.map(|c| c.base.id.clone().to_lowercase()) .map(|c| c.base.id.to_lowercase())
.collect(); .collect();
let adapter_contract_path;
let mut adapter_contract_path_str: Option<&str> = None;
// Adapter contract will only be configured for VM protocols, not natively implemented
// protocols.
if let Some(adapter_contract_name) = &config.adapter_contract {
// Build/find the adapter contract
adapter_contract_path = match adapter_contract_builder.find_contract(adapter_contract_name)
{
Ok(path) => {
debug!("Found adapter contract at: {}", path.display());
path
}
Err(_) => {
info!("Adapter contract not found, building it...");
adapter_contract_builder
.build_target(
adapter_contract_name,
config
.adapter_build_signature
.as_deref(),
config.adapter_build_args.as_deref(),
)
.wrap_err("Failed to build adapter contract")?
}
};
debug!("Using adapter contract: {}", adapter_contract_path.display());
adapter_contract_path_str = Some(adapter_contract_path.to_str().unwrap());
} }
// Clear the shared database state to ensure test isolation for (id, state) in update.states.iter() {
// This prevents state from previous tests from affecting the current test if skip_simulation.contains(id) {
tycho_simulation::evm::engine_db::SHARED_TYCHO_DB.clear(); info!("Skipping simulation for component {id}");
continue
let mut decoder = TychoStreamDecoder::new();
let mut decoder_context = DecoderContext::new().vm_traces(vm_simulation_traces);
if let Some(vm_adapter_path) = adapter_contract_path_str {
decoder_context = decoder_context.vm_adapter_path(vm_adapter_path);
} }
match protocol_system.as_str() { if let Some(tokens) = component_tokens.get(id) {
"uniswap_v2" | "sushiswap_v2" => {
decoder
.register_decoder_with_context::<UniswapV2State>(protocol_system, decoder_context);
}
"pancakeswap_v2" => {
decoder.register_decoder_with_context::<PancakeswapV2State>(
protocol_system,
decoder_context,
);
}
"uniswap_v3" | "pancakeswap_v3" => {
decoder
.register_decoder_with_context::<UniswapV3State>(protocol_system, decoder_context);
}
_ => {
decoder.register_decoder_with_context::<EVMPoolState<PreCachedDB>>(
protocol_system,
decoder_context,
);
}
}
// Mock a stream message, with only a Snapshot and no deltas
let mut states: HashMap<String, ComponentWithState> = HashMap::new();
for (id, component) in &components_by_id {
let component_id = id;
// Only include components that should be simulated
if !simulation_component_ids.contains(component_id) {
continue;
}
let state = protocol_states_by_id
.get(component_id)
.wrap_err(format!(
"Component {id} does not exist in protocol_states_by_id {protocol_states_by_id:?}"
))?
.clone();
let component_with_state = ComponentWithState {
state,
component: component.clone(),
component_tvl: None,
// Neither UniswapV4 with hooks not certain balancer pools are currently supported for
// SDK testing
entrypoints: vec![],
};
states.insert(component_id.clone(), component_with_state);
}
// Convert vm_storages to a HashMap - match Python behavior exactly
let vm_storage: HashMap<Bytes, ResponseAccount> = vm_storages
.into_iter()
.map(|x| (x.address.clone(), x))
.collect();
let snapshot = Snapshot { states, vm_storage };
let bytes = [0u8; 32];
// Get block header to extract the timestamp
let block_header = rt
.block_on(rpc_provider.get_block_header(stop_block))
.wrap_err("Failed to get block header")?;
let state_msgs: HashMap<String, StateSyncMessage<BlockHeader>> = HashMap::from([(
String::from(protocol_system),
StateSyncMessage {
header: BlockHeader {
hash: Bytes::from(bytes),
number: stop_block,
parent_hash: Bytes::from(bytes),
revert: false,
timestamp: block_header.header.timestamp,
},
snapshots: snapshot,
deltas: None,
removed_components: HashMap::new(),
},
)]);
let all_tokens = rt
.block_on(tycho_client.get_tokens(Chain::Ethereum, None, None))
.into_diagnostic()
.wrap_err("Failed to get tokens")?;
debug!("Loaded {} tokens", all_tokens.len());
rt.block_on(decoder.set_tokens(all_tokens));
let mut pairs: HashMap<String, Vec<Token>> = HashMap::new();
let message: FeedMessage = FeedMessage { state_msgs, sync_states: Default::default() };
let block_msg = rt
.block_on(decoder.decode(&message))
.into_diagnostic()
.wrap_err("Failed to decode message")?;
for (id, comp) in block_msg.new_pairs.iter() {
pairs
.entry(id.clone())
.or_insert_with(|| comp.tokens.clone());
}
for (id, state) in block_msg.states.iter() {
if let Some(tokens) = pairs.get(id) {
let formatted_token_str = format!("{:}/{:}", &tokens[0].symbol, &tokens[1].symbol); let formatted_token_str = format!("{:}/{:}", &tokens[0].symbol, &tokens[1].symbol);
state state
.spot_price(&tokens[0], &tokens[1]) .spot_price(&tokens[0], &tokens[1])
@@ -550,11 +677,12 @@ fn validate_state(
.into_diagnostic() .into_diagnostic()
.wrap_err(format!("Error calculating spot price for Pool {id:?}."))?; .wrap_err(format!("Error calculating spot price for Pool {id:?}."))?;
// Test get_amount_out with different percentages of limits. The reserves or limits are // Test get_amount_out with different percentages of limits. The reserves or limits
// relevant because we need to know how much to test with. We don't know if a pool is // are relevant because we need to know how much to test with. We
// going to revert with 10 or 10 million USDC, for example, so by using the limits we // don't know if a pool is going to revert with 10 or 10 million
// can use "safe values" where the sim shouldn't break. // USDC, for example, so by using the limits we can use "safe
// We then retrieve the amount out for 0.1%, 1% and 10%. // values" where the sim shouldn't break. We then retrieve the
// amount out for 0.1%, 1% and 10%.
let percentages = [0.001, 0.01, 0.1]; let percentages = [0.001, 0.01, 0.1];
// Test all permutations of swap directions // Test all permutations of swap directions
@@ -610,30 +738,30 @@ fn validate_state(
); );
// Only execute for components that should have execution // Only execute for components that should have execution
if !execution_component_ids.contains(id) { if skip_execution.contains(id) {
info!("Skipping execution for component {id}"); info!("Skipping execution for component {id}");
continue; continue;
} }
let protocol_component = block_msg.new_pairs.get(id).unwrap(); let protocol_component = update.new_pairs.get(id).unwrap();
let (calldata, solution) = encode_swap( let (calldata, solution) = encode_swap(
protocol_component.clone(), protocol_component,
token_in.address.clone(), &token_in.address,
token_out.address.clone(), &token_out.address,
amount_in, &amount_in,
amount_out_result.amount.clone(), &amount_out_result.amount,
)?; )?;
info!("Simulating swap at historical block {}", block_header.header.number); info!("Simulating swap at historical block {}", block.number());
// Simulate the trade using debug_traceCall with overwrites // Simulate the trade using debug_traceCall with overwrites
let execution_amount_out = let execution_amount_out =
rt.block_on(execution::simulate_trade_with_eth_call( self.runtime
&rpc_provider, .block_on(execution::simulate_trade_with_eth_call(
&self.rpc_provider,
&calldata, &calldata,
&solution, &solution,
stop_block, &block,
&block_header,
)); ));
match execution_amount_out { match execution_amount_out {
@@ -672,36 +800,37 @@ fn validate_state(
} }
} }
Ok(()) Ok(())
} }
/// Validate that the token balances of the components match the values /// Validate that the token balances of the components match the values
/// on-chain, extracted by querying the token balances using a node. /// on-chain, extracted by querying the token balances using a node.
fn validate_token_balances( fn validate_token_balances(
components_by_id: &HashMap<String, ProtocolComponent>, &self,
component_tokens: &HashMap<String, Vec<Token>>,
protocol_states_by_id: &HashMap<String, ResponseProtocolState>, protocol_states_by_id: &HashMap<String, ResponseProtocolState>,
start_block: u64, start_block: u64,
rt: &Runtime, ) -> miette::Result<()> {
rpc_provider: &RPCProvider, for (id, component) in protocol_states_by_id.iter() {
) -> miette::Result<()> { let tokens = component_tokens.get(id);
for (id, component) in components_by_id.iter() { if let Some(tokens) = tokens {
let component_state = protocol_states_by_id.get(id); for token in tokens {
for token in &component.tokens {
let mut balance: U256 = U256::from(0); let mut balance: U256 = U256::from(0);
let bal = component.balances.get(&token.address);
if let Some(state) = component_state {
let bal = state.balances.get(token);
if let Some(bal) = bal { if let Some(bal) = bal {
let bal = bal.clone().into(); let bal = bal.clone().into();
balance = bytes_to_u256(bal); balance = bytes_to_u256(bal);
} }
}
info!("Validating token balance for component {} and token {}", component.id, token); info!(
let token_address = Address::from_slice(&token[..20]); "Validating token balance for component {} and token {}",
let component_address = Address::from_str(component.id.as_str()) id, token.symbol
.expect("Failed to parse component address"); );
let node_balance = rt.block_on(rpc_provider.get_token_balance( let token_address = bytes_to_address(&token.address).into_diagnostic()?;
let component_address =
Address::from_str(id.as_str()).expect("Failed to parse component address");
let node_balance =
self.runtime
.block_on(self.rpc_provider.get_token_balance(
token_address, token_address,
component_address, component_address,
start_block, start_block,
@@ -709,29 +838,30 @@ fn validate_token_balances(
if balance != node_balance { if balance != node_balance {
return Err(miette!( return Err(miette!(
"Token balance mismatch for component {} and token {}", "Token balance mismatch for component {} and token {}",
component.id, id,
token token.symbol
)); ));
} }
info!( info!(
"Token balance for component {} and token {} matches the expected value", "Token balance for component {} and token {} matches the expected value",
component.id, token id, token.symbol
); );
} }
} else {
return Err(miette!("Couldn't find tokens for component {}", id,));
}
} }
Ok(()) Ok(())
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{collections::HashMap, str::FromStr}; use std::{collections::HashMap, env, str::FromStr};
use dotenv::dotenv; use dotenv::dotenv;
use glob::glob; use glob::glob;
use tycho_common::{ use tycho_simulation::tycho_common::{dto::ResponseProtocolState, Bytes};
dto::{ProtocolComponent, ResponseProtocolState},
Bytes,
};
use super::*; use super::*;
@@ -781,21 +911,29 @@ mod tests {
} }
} }
fn get_mocked_runner() -> TestRunner {
dotenv().ok();
let rpc_url = env::var("RPC_URL").unwrap();
let current_dir = std::env::current_dir().unwrap();
TestRunner::new(
current_dir,
"test-protocol".to_string(),
None,
"".to_string(),
false,
false,
rpc_url,
)
.unwrap()
}
#[test] #[test]
fn test_token_balance_validation() { fn test_token_balance_validation() {
dotenv().ok(); let runner = get_mocked_runner();
let eth_rpc_url = env::var("RPC_URL").expect("Missing RPC_URL in environment");
let rpc_provider = RPCProvider::new(eth_rpc_url, false);
// Setup mock data // Setup mock data
let block_number = 21998530; let block_number = 21998530;
let token_bytes = Bytes::from_str("0x0000000000000000000000000000000000000000").unwrap(); let token_bytes = Bytes::from_str("0x0000000000000000000000000000000000000000").unwrap();
let component_id = "0x787B8840100d9BaAdD7463f4a73b5BA73B00C6cA".to_string(); let component_id = "0x787B8840100d9BaAdD7463f4a73b5BA73B00C6cA".to_string();
let token = Token::new(&token_bytes, "FAKE", 18, 0, &[], Chain::Ethereum.into(), 100);
let component = ProtocolComponent {
id: component_id.clone(),
tokens: vec![token_bytes.clone()],
..Default::default()
};
let mut balances = HashMap::new(); let mut balances = HashMap::new();
let balance_bytes = Bytes::from( let balance_bytes = Bytes::from(
@@ -810,38 +948,25 @@ mod tests {
..Default::default() ..Default::default()
}; };
let mut components_by_id = HashMap::new(); let mut component_tokens = HashMap::new();
components_by_id.insert(component_id.clone(), component.clone()); component_tokens.insert(component_id.clone(), vec![token]);
let mut protocol_states_by_id = HashMap::new(); let mut protocol_states_by_id = HashMap::new();
protocol_states_by_id.insert(component_id.clone(), protocol_state.clone()); protocol_states_by_id.insert(component_id.clone(), protocol_state.clone());
let rt = Runtime::new().unwrap(); let result =
dotenv().ok(); runner.validate_token_balances(&component_tokens, &protocol_states_by_id, block_number);
let result = validate_token_balances(
&components_by_id,
&protocol_states_by_id,
block_number,
&rt,
&rpc_provider,
);
assert!(result.is_ok(), "Should pass when balance check is performed and balances match"); assert!(result.is_ok(), "Should pass when balance check is performed and balances match");
} }
#[test] #[test]
fn test_token_balance_validation_fails_on_mismatch() { fn test_token_balance_validation_fails_on_mismatch() {
dotenv().ok(); let runner = get_mocked_runner();
let eth_rpc_url = env::var("RPC_URL").expect("Missing RPC_URL in environment");
let rpc_provider = RPCProvider::new(eth_rpc_url, false);
// Setup mock data // Setup mock data
let block_number = 21998530; let block_number = 21998530;
let token_bytes = Bytes::from_str("0x0000000000000000000000000000000000000000").unwrap(); let token_bytes = Bytes::from_str("0x0000000000000000000000000000000000000000").unwrap();
let component_id = "0x787B8840100d9BaAdD7463f4a73b5BA73B00C6cA".to_string(); let component_id = "0x787B8840100d9BaAdD7463f4a73b5BA73B00C6cA".to_string();
let token = Token::new(&token_bytes, "FAKE", 18, 0, &[], Chain::Ethereum.into(), 100);
let component = ProtocolComponent {
id: component_id.clone(),
tokens: vec![token_bytes.clone()],
..Default::default()
};
// Set expected balance to zero // Set expected balance to zero
let mut balances = HashMap::new(); let mut balances = HashMap::new();
@@ -853,20 +978,14 @@ mod tests {
..Default::default() ..Default::default()
}; };
let mut components_by_id = HashMap::new(); let mut component_tokens = HashMap::new();
components_by_id.insert(component_id.clone(), component.clone()); component_tokens.insert(component_id.clone(), vec![token]);
let mut protocol_states_by_id = HashMap::new(); let mut protocol_states_by_id = HashMap::new();
protocol_states_by_id.insert(component_id.clone(), protocol_state.clone()); protocol_states_by_id.insert(component_id.clone(), protocol_state.clone());
let rt = Runtime::new().unwrap();
dotenv().ok(); dotenv().ok();
let result = validate_token_balances( let result =
&components_by_id, runner.validate_token_balances(&component_tokens, &protocol_states_by_id, block_number);
&protocol_states_by_id,
block_number,
&rt,
&rpc_provider,
);
assert!( assert!(
result.is_err(), result.is_err(),
"Should fail when balance check is performed and balances do not match" "Should fail when balance check is performed and balances do not match"

View File

@@ -5,8 +5,8 @@
use alloy::dyn_abi::{DynSolType, DynSolValue}; use alloy::dyn_abi::{DynSolType, DynSolValue};
use colored::Colorize; use colored::Colorize;
use foundry_evm::traces::identifier::SignaturesIdentifier;
use serde_json::Value; use serde_json::Value;
use tycho_simulation::foundry_evm::traces::identifier::SignaturesIdentifier;
/// Decode method selectors and return function info /// Decode method selectors and return function info
pub async fn decode_method_selector_with_info(input: &str) -> Option<(String, Vec<DynSolType>)> { pub async fn decode_method_selector_with_info(input: &str) -> Option<(String, Vec<DynSolType>)> {
@@ -84,10 +84,9 @@ pub async fn decode_function_with_params(input: &str) -> Option<String> {
if input.len() > 10 { if input.len() > 10 {
let calldata_hex = &input[10..]; // Skip the 4-byte selector let calldata_hex = &input[10..]; // Skip the 4-byte selector
if let Ok(calldata) = hex::decode(calldata_hex) { if let Ok(calldata) = hex::decode(calldata_hex) {
if let Ok(decoded_values) = if let Ok(DynSolValue::Tuple(values)) =
DynSolType::Tuple(param_types.clone()).abi_decode(&calldata) DynSolType::Tuple(param_types.clone()).abi_decode(&calldata)
{ {
if let DynSolValue::Tuple(values) = decoded_values {
let formatted_params: Vec<String> = values let formatted_params: Vec<String> = values
.iter() .iter()
.zip(param_types.iter()) .zip(param_types.iter())
@@ -98,7 +97,6 @@ pub async fn decode_function_with_params(input: &str) -> Option<String> {
} }
} }
} }
}
// Fallback: if decoding fails, put the whole calldata inside the method call // Fallback: if decoding fails, put the whole calldata inside the method call
return Some(format!("{}({})", name, input)); return Some(format!("{}({})", name, input));
@@ -255,11 +253,6 @@ pub async fn print_call_trace(call: &Value, depth: usize) {
.any(|field| call_obj.get(*field).is_some()); .any(|field| call_obj.get(*field).is_some());
let call_failed = has_error || has_revert || has_other_error; let call_failed = has_error || has_revert || has_other_error;
// Debug: if there's any failure, print all fields to help identify the error structure
if call_failed && depth <= 2 {
eprintln!("DEBUG: Failed call at depth {}: {:#?}", depth, call_obj);
}
// Create tree structure prefix // Create tree structure prefix
let tree_prefix = if depth == 0 { "".to_string() } else { " ".repeat(depth) + "├─ " }; let tree_prefix = if depth == 0 { "".to_string() } else { " ".repeat(depth) + "├─ " };
@@ -295,19 +288,19 @@ pub async fn print_call_trace(call: &Value, depth: usize) {
let mut found_error = false; let mut found_error = false;
if let Some(error) = call_obj.get("error") { if let Some(error) = call_obj.get("error") {
println!("{}{}", result_indent, format!("[Error] {}", error)); println!("{result_indent} [Error] {error}");
found_error = true; found_error = true;
} }
if let Some(revert_reason) = call_obj.get("revertReason") { if let Some(revert_reason) = call_obj.get("revertReason") {
println!("{}{}", result_indent, format!("[Revert] {}", revert_reason)); println!("{}[Revert] {}", result_indent, revert_reason);
found_error = true; found_error = true;
} }
// Check for other possible error fields // Check for other possible error fields
for error_field in ["revert", "reverted", "message", "errorMessage", "reason"] { for error_field in ["revert", "reverted", "message", "errorMessage", "reason"] {
if let Some(error_val) = call_obj.get(error_field) { if let Some(error_val) = call_obj.get(error_field) {
println!("{}{}", result_indent, format!("[{}] {}", error_field, error_val)); println!("{}[{}] {}", result_indent, error_field, error_val);
found_error = true; found_error = true;
} }
} }
@@ -319,12 +312,12 @@ pub async fn print_call_trace(call: &Value, depth: usize) {
{ {
if !output.is_empty() && output != "0x" { if !output.is_empty() && output != "0x" {
// Try to decode revert reason from output if it looks like revert data // Try to decode revert reason from output if it looks like revert data
if output.starts_with("0x08c379a0") { if let Some(stripped) = output.strip_prefix("0x08c379a0") {
// Error(string) selector // Error(string) selector
if let Ok(decoded) = hex::decode(&output[10..]) { if let Ok(decoded) = hex::decode(stripped) {
if let Ok(reason) = alloy::dyn_abi::DynSolType::String.abi_decode(&decoded) if let Ok(alloy::dyn_abi::DynSolValue::String(reason_str)) =
alloy::dyn_abi::DynSolType::String.abi_decode(&decoded)
{ {
if let alloy::dyn_abi::DynSolValue::String(reason_str) = reason {
println!( println!(
"{}{}", "{}{}",
result_indent, result_indent,
@@ -334,13 +327,12 @@ pub async fn print_call_trace(call: &Value, depth: usize) {
} }
} }
} }
}
if !found_error { if !found_error {
println!("{}{}", result_indent, format!("[Return] {}", output)); println!("{}[Return] {}", result_indent, output);
} }
} else if !found_error { } else if !found_error {
println!("{}{}", result_indent, "[Return]"); println!("{}[Return]", result_indent);
} }
} }

View File

@@ -1,14 +1,16 @@
use std::{collections::HashMap, error::Error as StdError, fmt}; use std::{collections::HashMap, error::Error as StdError, fmt};
use tracing::debug; use tracing::debug;
use tycho_client::{rpc::RPCClient, HttpRPCClient}; use tycho_simulation::{
use tycho_common::{ tycho_client::{rpc::RPCClient, HttpRPCClient},
tycho_common::{
dto::{ dto::{
Chain, PaginationParams, ProtocolComponent, ProtocolComponentsRequestBody, ResponseAccount, Chain, PaginationParams, ProtocolComponent, ProtocolComponentsRequestBody,
ResponseProtocolState, ResponseToken, StateRequestBody, VersionParam, ResponseAccount, ResponseProtocolState, ResponseToken, StateRequestBody, VersionParam,
}, },
models::token::Token, models::token::Token,
Bytes, Bytes,
},
}; };
/// Custom error type for RPC operations /// Custom error type for RPC operations
@@ -33,8 +35,8 @@ impl From<Box<dyn StdError>> for RpcError {
} }
} }
impl From<tycho_client::RPCError> for RpcError { impl From<tycho_simulation::tycho_client::RPCError> for RpcError {
fn from(error: tycho_client::RPCError) -> Self { fn from(error: tycho_simulation::tycho_client::RPCError) -> Self {
RpcError::ClientError(error.to_string()) RpcError::ClientError(error.to_string())
} }
} }
@@ -79,7 +81,7 @@ impl TychoClient {
) -> Result<Vec<ResponseProtocolState>, RpcError> { ) -> Result<Vec<ResponseProtocolState>, RpcError> {
let chunk_size = 100; let chunk_size = 100;
let concurrency = 1; let concurrency = 1;
let version: tycho_common::dto::VersionParam = VersionParam::default(); let version: tycho_simulation::tycho_common::dto::VersionParam = VersionParam::default();
let protocol_states = self let protocol_states = self
.http_client .http_client

View File

@@ -9,15 +9,16 @@ use std::{
use miette::{IntoDiagnostic, WrapErr}; use miette::{IntoDiagnostic, WrapErr};
use tracing::{debug, info}; use tracing::{debug, info};
use crate::config::ProtocolComponentWithTestConfig;
pub struct TychoRunner { pub struct TychoRunner {
db_url: String, db_url: String,
initialized_accounts: Vec<String>, initialized_accounts: Vec<String>,
} }
// TODO: Currently Tycho-Indexer cannot be run as a lib. We need to expose the entrypoints to allow pub struct TychoRpcServer {
// running it as a lib sender: Sender<bool>,
thread_handle: thread::JoinHandle<()>,
}
impl TychoRunner { impl TychoRunner {
pub fn new(db_url: String, initialized_accounts: Vec<String>) -> Self { pub fn new(db_url: String, initialized_accounts: Vec<String>) -> Self {
Self { db_url, initialized_accounts } Self { db_url, initialized_accounts }
@@ -93,16 +94,7 @@ impl TychoRunner {
Ok(()) Ok(())
} }
pub fn run_with_rpc_server<F, R>( pub fn start_rpc_server(&self) -> miette::Result<TychoRpcServer> {
&self,
func: F,
expected_components: &Vec<ProtocolComponentWithTestConfig>,
start_block: u64,
stop_block: u64,
) -> miette::Result<R>
where
F: FnOnce(&Vec<ProtocolComponentWithTestConfig>, u64, u64) -> R,
{
let (tx, rx): (Sender<bool>, Receiver<bool>) = mpsc::channel(); let (tx, rx): (Sender<bool>, Receiver<bool>) = mpsc::channel();
let db_url = self.db_url.clone(); let db_url = self.db_url.clone();
@@ -140,18 +132,22 @@ impl TychoRunner {
// Give the RPC server time to start // Give the RPC server time to start
thread::sleep(Duration::from_secs(3)); thread::sleep(Duration::from_secs(3));
// Run the provided function Ok(TychoRpcServer { sender: tx, thread_handle: rpc_thread })
let result = func(expected_components, start_block, stop_block); }
tx.send(true) pub fn stop_rpc_server(&self, server: TychoRpcServer) -> miette::Result<()> {
.expect("Failed to send termination message"); server
.sender
.send(true)
.into_diagnostic()
.wrap_err("Failed to send termination message")?;
// Wait for the RPC thread to finish // Wait for the RPC thread to finish
if rpc_thread.join().is_err() { if server.thread_handle.join().is_err() {
eprintln!("Failed to join RPC thread"); eprintln!("Failed to join RPC thread");
} }
Ok(result) Ok(())
} }
// Helper method to handle process output in separate threads // Helper method to handle process output in separate threads

View File

@@ -1,14 +0,0 @@
{
"ethereum": {
"uniswap_v2": "0xaE04CA7E9Ed79cBD988f6c536CE11C621166f41B",
"uniswap_v3": "0xbab7124C9662B15C6b9AF0b1f329907dD55a24FC",
"uniswap_v4": "0x2C2EaB81Cf983602153E67b1890164BC4CABC6ed",
"vm:balancer_v2": "0xB5b8dc3F0a1Be99685a0DEd015Af93bFBB55C411",
"vm:balancer_v3": "0xec5cE4bF6FbcB7bB0148652c92a4AEC8c1d474Ec",
"sushiswap_v2": "0x2017ad7035D781C14699C8E44ed62d3083723A18",
"pancakeswap_v2": "0xC9db3FEB380E4fd9af239e2595ECdEcE3b5c34A4",
"pancakeswap_v3": "0x9D32e9F569B22Ae8d8C6f788037C1CD53632A059",
"vm:curve": "0x879F3008D96EBea0fc584aD684c7Df31777F3165",
"vm:maverick_v2": "0xF35e3F5F205769B41508A18787b62A21bC80200B"
}
}

View File

@@ -24,6 +24,7 @@ tests:
coins: "0x5b22307836623137353437346538393039346334346461393862393534656564656163343935323731643066222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438222c22307864616331376639353864326565353233613232303632303639393435393763313364383331656337225d" # ["0x6b175474e89094c44da98b954eedeac495271d0f","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48","0xdac17f958d2ee523a2206206994597c13d831ec7"] coins: "0x5b22307836623137353437346538393039346334346461393862393534656564656163343935323731643066222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438222c22307864616331376639353864326565353233613232303632303639393435393763313364383331656337225d" # ["0x6b175474e89094c44da98b954eedeac495271d0f","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48","0xdac17f958d2ee523a2206206994597c13d831ec7"]
creation_tx: "0x20793bbf260912aae189d5d261ff003c9b9166da8191d8f9d63ff1c7722f3ac6" creation_tx: "0x20793bbf260912aae189d5d261ff003c9b9166da8191d8f9d63ff1c7722f3ac6"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before EIP-1559 where base fee was added. The current testing logic doesn't work without it
# Unique pool (no factory) steth - 0xdc24316b9ae028f1497c275eb9192a3ea0f67022 # Unique pool (no factory) steth - 0xdc24316b9ae028f1497c275eb9192a3ea0f67022
- name: test_steth - name: test_steth
@@ -41,6 +42,7 @@ tests:
coins: "0x5b22307865656565656565656565656565656565656565656565656565656565656565656565656565656565222c22307861653761623936353230646533613138653565313131623565616162303935333132643766653834225d" # ["0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee","0xae7ab96520de3a18e5e111b5eaab095312d7fe84"] coins: "0x5b22307865656565656565656565656565656565656565656565656565656565656565656565656565656565222c22307861653761623936353230646533613138653565313131623565616162303935333132643766653834225d" # ["0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee","0xae7ab96520de3a18e5e111b5eaab095312d7fe84"]
creation_tx: "0xfac67ecbd423a5b915deff06045ec9343568edaec34ae95c43d35f2c018afdaa" creation_tx: "0xfac67ecbd423a5b915deff06045ec9343568edaec34ae95c43d35f2c018afdaa"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before EIP-1559 where base fee was added. The current testing logic doesn't work without it
# Unique pool (no factory) tricrypto2 - 0xd51a44d3fae010294c616388b506acda1bfaae46 # Unique pool (no factory) tricrypto2 - 0xd51a44d3fae010294c616388b506acda1bfaae46
- name: test_tricrypto2 - name: test_tricrypto2
@@ -59,6 +61,7 @@ tests:
coins: "0x5b22307864616331376639353864326565353233613232303632303639393435393763313364383331656337222c22307832323630666163356535353432613737336161343466626366656466376331393362633263353939222c22307863303261616133396232323366653864306130653563346632376561643930383363373536636332225d" # ["0xdac17f958d2ee523a2206206994597c13d831ec7","0x2260fac5e5542a773aa44fbcfedf7c193bc2c599","0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"] coins: "0x5b22307864616331376639353864326565353233613232303632303639393435393763313364383331656337222c22307832323630666163356535353432613737336161343466626366656466376331393362633263353939222c22307863303261616133396232323366653864306130653563346632376561643930383363373536636332225d" # ["0xdac17f958d2ee523a2206206994597c13d831ec7","0x2260fac5e5542a773aa44fbcfedf7c193bc2c599","0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"]
creation_tx: "0xdafb6385ed988ce8aacecfe1d97b38ea5e60b1ebce74d2423f71ddd621680138" creation_tx: "0xdafb6385ed988ce8aacecfe1d97b38ea5e60b1ebce74d2423f71ddd621680138"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before EIP-1559 where base fee was added. The current testing logic doesn't work without it
# Unique pool (no factory) susd - 0xa5407eae9ba41422680e2e00537571bcc53efbfd # Unique pool (no factory) susd - 0xa5407eae9ba41422680e2e00537571bcc53efbfd
- name: test_susd - name: test_susd
@@ -78,6 +81,7 @@ tests:
coins: "0x5b22307836623137353437346538393039346334346461393862393534656564656163343935323731643066222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438222c22307864616331376639353864326565353233613232303632303639393435393763313364383331656337222c22307835376162316563323864313239373037303532646634646634313864353861326434366435663531225d" # ["0x6b175474e89094c44da98b954eedeac495271d0f","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48","0xdac17f958d2ee523a2206206994597c13d831ec7","0x57ab1ec28d129707052df4df418d58a2d46d5f51"] coins: "0x5b22307836623137353437346538393039346334346461393862393534656564656163343935323731643066222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438222c22307864616331376639353864326565353233613232303632303639393435393763313364383331656337222c22307835376162316563323864313239373037303532646634646634313864353861326434366435663531225d" # ["0x6b175474e89094c44da98b954eedeac495271d0f","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48","0xdac17f958d2ee523a2206206994597c13d831ec7","0x57ab1ec28d129707052df4df418d58a2d46d5f51"]
creation_tx: "0x51aca4a03a395de8855fa2ca59b7febe520c2a223e69c502066162f7c1a95ec2" creation_tx: "0x51aca4a03a395de8855fa2ca59b7febe520c2a223e69c502066162f7c1a95ec2"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before EIP-1559 where base fee was added. The current testing logic doesn't work without it
# Unique pool (no factory) fraxusdc - 0xdcef968d416a41cdac0ed8702fac8128a64241a2 # Unique pool (no factory) fraxusdc - 0xdcef968d416a41cdac0ed8702fac8128a64241a2
- name: test_fraxusdc - name: test_fraxusdc
@@ -95,6 +99,7 @@ tests:
coins: "0x5b22307838353364393535616365663832326462303538656238353035393131656437376631373562393965222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438225d" # ["0x853d955acef822db058eb8505911ed77f175b99e","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"] coins: "0x5b22307838353364393535616365663832326462303538656238353035393131656437376631373562393965222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438225d" # ["0x853d955acef822db058eb8505911ed77f175b99e","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"]
creation_tx: "0x1f4254004ce9e19d4eb742ee5a69d30f29085902d976f73e97c44150225ef775" creation_tx: "0x1f4254004ce9e19d4eb742ee5a69d30f29085902d976f73e97c44150225ef775"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Shanghai upgrade and our router needs functionality introduced there
# CryptoSwapNG factory 0x6A8cbed756804B16E05E741eDaBd5cB544AE21bf - PlainPool # CryptoSwapNG factory 0x6A8cbed756804B16E05E741eDaBd5cB544AE21bf - PlainPool
- name: test_crypto_swap_ng_factory_plain_pool - name: test_crypto_swap_ng_factory_plain_pool
@@ -120,6 +125,7 @@ tests:
coins: "0x5b22307834633965646435383532636439303566303836633735396538333833653039626666316536386233222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438225d" # ["0x4c9edd5852cd905f086c759e8383e09bff1e68b3","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"] coins: "0x5b22307834633965646435383532636439303566303836633735396538333833653039626666316536386233222c22307861306238363939316336323138623336633164313964346132653965623063653336303665623438225d" # ["0x4c9edd5852cd905f086c759e8383e09bff1e68b3","0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"]
creation_tx: "0x6f4438aa1785589e2170599053a0cdc740d8987746a4b5ad9614b6ab7bb4e550" creation_tx: "0x6f4438aa1785589e2170599053a0cdc740d8987746a4b5ad9614b6ab7bb4e550"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# CryptoSwapNG factory 0x6A8cbed756804B16E05E741eDaBd5cB544AE21bf - MetaPool # CryptoSwapNG factory 0x6A8cbed756804B16E05E741eDaBd5cB544AE21bf - MetaPool
- name: test_crypto_swap_ng_factory_metapool - name: test_crypto_swap_ng_factory_metapool
@@ -143,6 +149,7 @@ tests:
coins: "0x5b22307838363533373733363730353435313665313730313463636465643165376438313465646339636534222c22307861353538386637636466353630383131373130613264383264336339633939373639646231646362225d" # ["0x865377367054516e17014ccded1e7d814edc9ce4","0xa5588f7cdf560811710a2d82d3c9c99769db1dcb"] coins: "0x5b22307838363533373733363730353435313665313730313463636465643165376438313465646339636534222c22307861353538386637636466353630383131373130613264383264336339633939373639646231646362225d" # ["0x865377367054516e17014ccded1e7d814edc9ce4","0xa5588f7cdf560811710a2d82d3c9c99769db1dcb"]
creation_tx: "0x3cfeecae1b43086ee5705f89b803e21eb0492d7d5db06c229586db8fc72f5665" creation_tx: "0x3cfeecae1b43086ee5705f89b803e21eb0492d7d5db06c229586db8fc72f5665"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# # Metapool factory 0xB9fC157394Af804a3578134A6585C0dc9cc990d4 - MetaPool # # Metapool factory 0xB9fC157394Af804a3578134A6585C0dc9cc990d4 - MetaPool
# - name: test_metapool_factory_metapool # - name: test_metapool_factory_metapool
@@ -184,6 +191,8 @@ tests:
coins: "0x5b22307865393633336335326634633862376264656230386334613766653861356331623834616663663637222c22307837376530366339656363663265373937666434363261393262366437363432656638356230613434225d" # ["0xe9633c52f4c8b7bdeb08c4a7fe8a5c1b84afcf67","0x77e06c9eccf2e797fd462a92b6d7642ef85b0a44"] coins: "0x5b22307865393633336335326634633862376264656230386334613766653861356331623834616663663637222c22307837376530366339656363663265373937666434363261393262366437363432656638356230613434225d" # ["0xe9633c52f4c8b7bdeb08c4a7fe8a5c1b84afcf67","0x77e06c9eccf2e797fd462a92b6d7642ef85b0a44"]
creation_tx: "0xeb34c90d352f18ffcfe78b7e393e155f0314acf06c54d1ac9996e4ee5a9b4742" creation_tx: "0xeb34c90d352f18ffcfe78b7e393e155f0314acf06c54d1ac9996e4ee5a9b4742"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
- id: "0x3f67dc2AdBA4B1beB6A48c30AB3AFb1c1440d35B" - id: "0x3f67dc2AdBA4B1beB6A48c30AB3AFb1c1440d35B"
tokens: tokens:
- "0xe9633C52f4c8B7BDeb08c4A7fE8a5c1B84AFCf67" - "0xe9633C52f4c8B7BDeb08c4A7fE8a5c1B84AFCf67"
@@ -196,6 +205,7 @@ tests:
coins: "0x5b22307865393633336335326634633862376264656230386334613766653861356331623834616663663637222c22307837376530366339656363663265373937666434363261393262366437363432656638356230613434225d" # ["0xe9633c52f4c8b7bdeb08c4a7fe8a5c1b84afcf67","0x77e06c9eccf2e797fd462a92b6d7642ef85b0a44"] coins: "0x5b22307865393633336335326634633862376264656230386334613766653861356331623834616663663637222c22307837376530366339656363663265373937666434363261393262366437363432656638356230613434225d" # ["0xe9633c52f4c8b7bdeb08c4a7fe8a5c1b84afcf67","0x77e06c9eccf2e797fd462a92b6d7642ef85b0a44"]
creation_tx: "0x455559b43afaf429c15c1d807fd7f5dd47be30f6411a854499f719b944f4c024" creation_tx: "0x455559b43afaf429c15c1d807fd7f5dd47be30f6411a854499f719b944f4c024"
skip_simulation: true # Reason: this pool has no liquidity at stop_block skip_simulation: true # Reason: this pool has no liquidity at stop_block
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# CryptoPool factory 0xF18056Bbd320E96A48e3Fbf8bC061322531aac99 # CryptoPool factory 0xF18056Bbd320E96A48e3Fbf8bC061322531aac99
- name: test_cryptopool_factory - name: test_cryptopool_factory
@@ -217,6 +227,7 @@ tests:
coins: "0x5b22307830346331353462363663623334306633616532343131316363373637653031383465643030636336222c22307834353931646266663632363536653738353961666535653435663666343764333636396662623238225d" # ["0x04c154b66cb340f3ae24111cc767e0184ed00cc6","0x4591dbff62656e7859afe5e45f6f47d3669fbb28"] coins: "0x5b22307830346331353462363663623334306633616532343131316363373637653031383465643030636336222c22307834353931646266663632363536653738353961666535653435663666343764333636396662623238225d" # ["0x04c154b66cb340f3ae24111cc767e0184ed00cc6","0x4591dbff62656e7859afe5e45f6f47d3669fbb28"]
creation_tx: "0xa89c09a7e0dfd84f3a294b8df4f33cc4a623e6d52deee357457afe2591ea596f" creation_tx: "0xa89c09a7e0dfd84f3a294b8df4f33cc4a623e6d52deee357457afe2591ea596f"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
- id: "0x6c9Fe53cC13b125d6476E5Ce2b76983bd5b7A112" - id: "0x6c9Fe53cC13b125d6476E5Ce2b76983bd5b7A112"
tokens: tokens:
- "0x35fA164735182de50811E8e2E824cFb9B6118ac2" - "0x35fA164735182de50811E8e2E824cFb9B6118ac2"
@@ -230,6 +241,7 @@ tests:
coins: "0x5b22307833356661313634373335313832646535303831316538653265383234636662396236313138616332222c22307866393531653333356166623238393335336463323439653832393236313738656163376465643738225d" # ["0x35fa164735182de50811e8e2e824cfb9b6118ac2","0xf951e335afb289353dc249e82926178eac7ded78"] coins: "0x5b22307833356661313634373335313832646535303831316538653265383234636662396236313138616332222c22307866393531653333356166623238393335336463323439653832393236313738656163376465643738225d" # ["0x35fa164735182de50811e8e2e824cfb9b6118ac2","0xf951e335afb289353dc249e82926178eac7ded78"]
creation_tx: "0xa5b13d50c56242f7994b8e1339032bb4c6f9ac3af3054d4eae3ce9e32e3c1a50" creation_tx: "0xa5b13d50c56242f7994b8e1339032bb4c6f9ac3af3054d4eae3ce9e32e3c1a50"
skip_simulation: true # Reason: this pool has no liquidity at stop_block skip_simulation: true # Reason: this pool has no liquidity at stop_block
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# CryptoPool factory 0xF18056Bbd320E96A48e3Fbf8bC061322531aac99 - with ETH # CryptoPool factory 0xF18056Bbd320E96A48e3Fbf8bC061322531aac99 - with ETH
- name: test_cryptopool_factory_with_eth - name: test_cryptopool_factory_with_eth
@@ -251,6 +263,7 @@ tests:
coins: "0x5b22307865656565656565656565656565656565656565656565656565656565656565656565656565656565222c22307835353239366636396634306561366432306534373835333363313561366230386236353465373538225d" # ["0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee","0x55296f69f40ea6d20e478533c15a6b08b654e758"] coins: "0x5b22307865656565656565656565656565656565656565656565656565656565656565656565656565656565222c22307835353239366636396634306561366432306534373835333363313561366230386236353465373538225d" # ["0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee","0x55296f69f40ea6d20e478533c15a6b08b654e758"]
creation_tx: "0x52f0f76d97e77579eebd32876de99f656930a99131dc4c4f1dec005786c8782b" creation_tx: "0x52f0f76d97e77579eebd32876de99f656930a99131dc4c4f1dec005786c8782b"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# Tricrypto factory 0x0c0e5f2fF0ff18a3be9b835635039256dC4B4963 # Tricrypto factory 0x0c0e5f2fF0ff18a3be9b835635039256dC4B4963
- name: test_tricrypto_factory - name: test_tricrypto_factory
@@ -273,6 +286,7 @@ tests:
coins: "0x5b22307861306238363939316336323138623336633164313964346132653965623063653336303665623438222c22307832323630666163356535353432613737336161343466626366656466376331393362633263353939222c22307865656565656565656565656565656565656565656565656565656565656565656565656565656565225d" # ["0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48","0x2260fac5e5542a773aa44fbcfedf7c193bc2c599","0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"] coins: "0x5b22307861306238363939316336323138623336633164313964346132653965623063653336303665623438222c22307832323630666163356535353432613737336161343466626366656466376331393362633263353939222c22307865656565656565656565656565656565656565656565656565656565656565656565656565656565225d" # ["0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48","0x2260fac5e5542a773aa44fbcfedf7c193bc2c599","0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"]
creation_tx: "0x2bd59c19f993b83729fb23498f897a58567c6f0b3ee2f00613ba515a7b19fe23" creation_tx: "0x2bd59c19f993b83729fb23498f897a58567c6f0b3ee2f00613ba515a7b19fe23"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# Twocrypto factory 0x98ee851a00abee0d95d08cf4ca2bdce32aeaaf7f # Twocrypto factory 0x98ee851a00abee0d95d08cf4ca2bdce32aeaaf7f
- name: test_twocrypto_factory - name: test_twocrypto_factory
@@ -313,6 +327,7 @@ tests:
coins: "0x5b22307864616331376639353864326565353233613232303632303639393435393763313364383331656337222c22307866393339653061303366623037663539613733333134653733373934626530653537616331623465225d" # ["0xdac17f958d2ee523a2206206994597c13d831ec7","0xf939e0a03fb07f59a73314e73794be0e57ac1b4e"] coins: "0x5b22307864616331376639353864326565353233613232303632303639393435393763313364383331656337222c22307866393339653061303366623037663539613733333134653733373934626530653537616331623465225d" # ["0xdac17f958d2ee523a2206206994597c13d831ec7","0xf939e0a03fb07f59a73314e73794be0e57ac1b4e"]
creation_tx: "0x40b25773bf8ea673434277d279af40a85b09072072e7004e9048a2ec0f0dd5a0" creation_tx: "0x40b25773bf8ea673434277d279af40a85b09072072e7004e9048a2ec0f0dd5a0"
skip_simulation: false skip_simulation: false
skip_execution: true # the block is before the Cancun upgrade and our router needs functionality introduced there (transient storage)
# StableSwap factory 0x4F8846Ae9380B90d2E71D5e3D042dff3E7ebb40d - Metapool # StableSwap factory 0x4F8846Ae9380B90d2E71D5e3D042dff3E7ebb40d - Metapool
# - name: test_stableswap_factory_meta_pool # - name: test_stableswap_factory_meta_pool