Merge pull request #48 from propeller-heads/sdk/ENG-3338-add-third-party-pool
SDK V2: Add ThirdPartyPool for integration testing
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -19,4 +19,6 @@ substreams/ethereum-template/Cargo.lock
|
||||
.DS_Store
|
||||
|
||||
tycho-indexer
|
||||
substreams/my_substream
|
||||
substreams/my_substream
|
||||
testing/tycho-client/build/*
|
||||
testing/tycho-client/*.egg-info
|
||||
@@ -169,12 +169,26 @@ contract BalancerV2SwapAdapter is ISwapAdapter {
|
||||
returns (uint256[] memory limits)
|
||||
{
|
||||
limits = new uint256[](2);
|
||||
address pool;
|
||||
(pool,) = vault.getPool(poolId);
|
||||
uint256 bptIndex = maybeGetBptTokenIndex(pool);
|
||||
uint256 circulatingSupply = getBptCirculatingSupply(pool);
|
||||
|
||||
(address[] memory tokens, uint256[] memory balances,) =
|
||||
vault.getPoolTokens(poolId);
|
||||
|
||||
for (uint256 i = 0; i < tokens.length; i++) {
|
||||
if (tokens[i] == sellToken) {
|
||||
limits[0] = balances[i] * RESERVE_LIMIT_FACTOR / 10;
|
||||
if (i == bptIndex) {
|
||||
// Some pools pre-mint the bpt tokens and keep the balance
|
||||
// on the
|
||||
// pool we can't sell more than the circulating supply
|
||||
// though,
|
||||
// else we get an underflow error.
|
||||
limits[0] = circulatingSupply;
|
||||
} else {
|
||||
limits[0] = balances[i] * RESERVE_LIMIT_FACTOR / 10;
|
||||
}
|
||||
}
|
||||
if (tokens[i] == buyToken) {
|
||||
limits[1] = balances[i] * RESERVE_LIMIT_FACTOR / 10;
|
||||
@@ -182,6 +196,35 @@ contract BalancerV2SwapAdapter is ISwapAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
function maybeGetBptTokenIndex(address poolAddress)
|
||||
internal
|
||||
view
|
||||
returns (uint256)
|
||||
{
|
||||
IPool pool = IPool(poolAddress);
|
||||
|
||||
try pool.getBptIndex() returns (uint256 index) {
|
||||
return index;
|
||||
} catch {
|
||||
return type(uint256).max;
|
||||
}
|
||||
}
|
||||
|
||||
function getBptCirculatingSupply(address poolAddress)
|
||||
internal
|
||||
view
|
||||
returns (uint256)
|
||||
{
|
||||
IPool pool = IPool(poolAddress);
|
||||
try pool.getActualSupply() returns (uint256 supply) {
|
||||
return supply;
|
||||
} catch {}
|
||||
try pool.getVirtualSupply() returns (uint256 supply) {
|
||||
return supply;
|
||||
} catch {}
|
||||
return type(uint256).max;
|
||||
}
|
||||
|
||||
function getCapabilities(bytes32, address, address)
|
||||
external
|
||||
pure
|
||||
@@ -486,3 +529,11 @@ interface IVault {
|
||||
GIVEN_OUT
|
||||
}
|
||||
}
|
||||
|
||||
interface IPool {
|
||||
function getBptIndex() external view returns (uint256);
|
||||
|
||||
function getActualSupply() external view returns (uint256);
|
||||
|
||||
function getVirtualSupply() external view returns (uint256);
|
||||
}
|
||||
|
||||
14
substreams/Cargo.lock
generated
14
substreams/Cargo.lock
generated
@@ -857,18 +857,18 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.197"
|
||||
version = "1.0.204"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
|
||||
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.197"
|
||||
version = "1.0.204"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
|
||||
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -877,9 +877,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.114"
|
||||
version = "1.0.120"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
|
||||
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@@ -1104,6 +1104,8 @@ dependencies = [
|
||||
"itertools 0.12.1",
|
||||
"num-bigint",
|
||||
"prost 0.11.9",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"substreams",
|
||||
"substreams-ethereum",
|
||||
]
|
||||
|
||||
@@ -12,6 +12,8 @@ hex-literal = "0.4.1"
|
||||
hex = "0.4.3"
|
||||
ethabi = "18.0.0"
|
||||
tycho-substreams = { path = "crates/tycho-substreams" }
|
||||
serde = "1.0.204"
|
||||
serde_json = "1.0.120"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
@@ -11,3 +11,5 @@ hex.workspace = true
|
||||
itertools = "0.12.0"
|
||||
ethabi.workspace = true
|
||||
num-bigint = "0.4.4"
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
46
substreams/crates/tycho-substreams/src/attributes.rs
Normal file
46
substreams/crates/tycho-substreams/src/attributes.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::fmt::Debug;
|
||||
use substreams::prelude::BigInt;
|
||||
|
||||
/// Encodes a value to bytes using json.
|
||||
///
|
||||
/// ## Panics
|
||||
/// In case the serialisation to json fails.
|
||||
pub fn json_serialize_value<T: serde::Serialize + Debug>(v: T) -> Vec<u8> {
|
||||
serde_json::to_value(v)
|
||||
.unwrap_or_else(|e| panic!("Failed to encode value as json {e}"))
|
||||
.to_string()
|
||||
.as_bytes()
|
||||
.to_vec()
|
||||
}
|
||||
|
||||
/// Encodes a list of addresses (in byte representation) into json.
|
||||
///
|
||||
/// Converts each address to a 0x prefixed hex string and then serializes
|
||||
/// the list of strings as a json.
|
||||
///
|
||||
/// ## Panics
|
||||
/// In case the serialisation to json fails.
|
||||
pub fn json_serialize_address_list(addresses: &[Vec<u8>]) -> Vec<u8> {
|
||||
json_serialize_value(
|
||||
addresses
|
||||
.iter()
|
||||
.map(|a| format!("0x{}", hex::encode(a)))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Encodes a list of BigInt values into json.
|
||||
///
|
||||
/// Converts each integer to a 0x prefixed hex string and then serializes
|
||||
/// the list of strings as a json.
|
||||
///
|
||||
/// ## Panics
|
||||
/// In case the serialisation to json fails.
|
||||
pub fn json_serialize_bigint_list(values: &[BigInt]) -> Vec<u8> {
|
||||
json_serialize_value(
|
||||
values
|
||||
.iter()
|
||||
.map(|v| format!("0x{}", hex::encode(v.to_signed_bytes_be())))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
@@ -10,67 +10,14 @@
|
||||
/// more [here](https://streamingfastio.medium.com/new-block-model-to-accelerate-chain-integration-9f65126e5425)
|
||||
use std::collections::HashMap;
|
||||
|
||||
use substreams_ethereum::pb::eth::{
|
||||
self,
|
||||
v2::{block::DetailLevel, CallType, StorageChange},
|
||||
use crate::{
|
||||
models::{InterimContractChange, TransactionChanges},
|
||||
prelude::TransactionChangesBuilder,
|
||||
};
|
||||
use substreams_ethereum::pb::{
|
||||
eth,
|
||||
eth::v2::{block::DetailLevel, CallType, TransactionTrace},
|
||||
};
|
||||
|
||||
use crate::pb::tycho::evm::v1::{self as tycho};
|
||||
|
||||
struct SlotValue {
|
||||
new_value: Vec<u8>,
|
||||
start_value: Vec<u8>,
|
||||
}
|
||||
|
||||
impl From<&StorageChange> for SlotValue {
|
||||
fn from(change: &StorageChange) -> Self {
|
||||
Self { new_value: change.new_value.clone(), start_value: change.old_value.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
impl SlotValue {
|
||||
fn has_changed(&self) -> bool {
|
||||
self.start_value != self.new_value
|
||||
}
|
||||
}
|
||||
|
||||
// Uses a map for slots, protobuf does not allow bytes in hashmap keys
|
||||
struct InterimContractChange {
|
||||
address: Vec<u8>,
|
||||
balance: Vec<u8>,
|
||||
code: Vec<u8>,
|
||||
slots: HashMap<Vec<u8>, SlotValue>,
|
||||
change: tycho::ChangeType,
|
||||
}
|
||||
|
||||
impl InterimContractChange {
|
||||
fn new(address: &[u8], creation: bool) -> Self {
|
||||
Self {
|
||||
address: address.to_vec(),
|
||||
balance: vec![],
|
||||
code: vec![],
|
||||
slots: Default::default(),
|
||||
change: if creation { tycho::ChangeType::Creation } else { tycho::ChangeType::Update },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InterimContractChange> for tycho::ContractChange {
|
||||
fn from(value: InterimContractChange) -> Self {
|
||||
tycho::ContractChange {
|
||||
address: value.address,
|
||||
balance: value.balance,
|
||||
code: value.code,
|
||||
slots: value
|
||||
.slots
|
||||
.into_iter()
|
||||
.filter(|(_, value)| value.has_changed())
|
||||
.map(|(slot, value)| tycho::ContractSlot { slot, value: value.new_value })
|
||||
.collect(),
|
||||
change: value.change.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extracts and aggregates contract changes from a block.
|
||||
///
|
||||
@@ -101,7 +48,45 @@ impl From<InterimContractChange> for tycho::ContractChange {
|
||||
pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
|
||||
block: ð::v2::Block,
|
||||
inclusion_predicate: F,
|
||||
transaction_changes: &mut HashMap<u64, tycho::TransactionChanges>,
|
||||
transaction_changes: &mut HashMap<u64, TransactionChanges>,
|
||||
) {
|
||||
extract_contract_changes_generic(block, inclusion_predicate, |tx, changed_contracts| {
|
||||
transaction_changes
|
||||
.entry(tx.index.into())
|
||||
.or_insert_with(|| TransactionChanges::new(&(tx.into())))
|
||||
.contract_changes
|
||||
.extend(
|
||||
changed_contracts
|
||||
.clone()
|
||||
.into_values()
|
||||
.map(|change| change.into()),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
pub fn extract_contract_changes_builder<F: Fn(&[u8]) -> bool>(
|
||||
block: ð::v2::Block,
|
||||
inclusion_predicate: F,
|
||||
transaction_changes: &mut HashMap<u64, TransactionChangesBuilder>,
|
||||
) {
|
||||
extract_contract_changes_generic(block, inclusion_predicate, |tx, changed_contracts| {
|
||||
let builder = transaction_changes
|
||||
.entry(tx.index.into())
|
||||
.or_insert_with(|| TransactionChangesBuilder::new(&(tx.into())));
|
||||
changed_contracts
|
||||
.clone()
|
||||
.into_iter()
|
||||
.for_each(|(_, change)| builder.add_contract_changes(&change));
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_contract_changes_generic<
|
||||
F: Fn(&[u8]) -> bool,
|
||||
G: FnMut(&TransactionTrace, &HashMap<Vec<u8>, InterimContractChange>),
|
||||
>(
|
||||
block: ð::v2::Block,
|
||||
inclusion_predicate: F,
|
||||
mut store_changes: G,
|
||||
) {
|
||||
if block.detail_level != Into::<i32>::into(DetailLevel::DetaillevelExtended) {
|
||||
panic!("Only extended blocks are supported");
|
||||
@@ -160,14 +145,7 @@ pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
|
||||
)
|
||||
});
|
||||
|
||||
let slot_value = contract_change
|
||||
.slots
|
||||
.entry(storage_change.key.clone())
|
||||
.or_insert_with(|| storage_change.into());
|
||||
|
||||
slot_value
|
||||
.new_value
|
||||
.copy_from_slice(&storage_change.new_value);
|
||||
contract_change.upsert_slot(storage_change);
|
||||
});
|
||||
|
||||
balance_changes
|
||||
@@ -184,10 +162,7 @@ pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
|
||||
});
|
||||
|
||||
if let Some(new_balance) = &balance_change.new_value {
|
||||
contract_change.balance.clear();
|
||||
contract_change
|
||||
.balance
|
||||
.extend_from_slice(&new_balance.bytes);
|
||||
contract_change.set_balance(&new_balance.bytes);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -204,25 +179,15 @@ pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
|
||||
)
|
||||
});
|
||||
|
||||
contract_change.code.clear();
|
||||
contract_change
|
||||
.code
|
||||
.extend_from_slice(&code_change.new_code);
|
||||
contract_change.set_code(&code_change.new_code);
|
||||
});
|
||||
|
||||
if !storage_changes.is_empty() ||
|
||||
!balance_changes.is_empty() ||
|
||||
!code_changes.is_empty()
|
||||
{
|
||||
transaction_changes
|
||||
.entry(block_tx.index.into())
|
||||
.or_insert_with(|| tycho::TransactionChanges::new(&(block_tx.into())))
|
||||
.contract_changes
|
||||
.extend(
|
||||
changed_contracts
|
||||
.drain()
|
||||
.map(|(_, change)| change.into()),
|
||||
);
|
||||
store_changes(block_tx, &changed_contracts)
|
||||
}
|
||||
changed_contracts.clear()
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod abi;
|
||||
pub mod attributes;
|
||||
pub mod balances;
|
||||
pub mod contract;
|
||||
mod mock_store;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use substreams_ethereum::pb::eth::v2::{self as sf};
|
||||
use std::collections::HashMap;
|
||||
use substreams_ethereum::pb::eth::v2::{self as sf, StorageChange};
|
||||
|
||||
// re-export the protobuf types here.
|
||||
pub use crate::pb::tycho::evm::v1::*;
|
||||
@@ -17,6 +18,161 @@ impl TransactionChanges {
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds `TransactionChanges` struct
|
||||
///
|
||||
/// Ensures uniqueness for contract addresses and component ids.
|
||||
#[derive(Default)]
|
||||
pub struct TransactionChangesBuilder {
|
||||
tx: Option<Transaction>,
|
||||
contract_changes: HashMap<Vec<u8>, InterimContractChange>,
|
||||
entity_changes: HashMap<String, InterimEntityChanges>,
|
||||
component_changes: HashMap<String, ProtocolComponent>,
|
||||
balance_changes: HashMap<(Vec<u8>, Vec<u8>), BalanceChange>,
|
||||
}
|
||||
|
||||
impl TransactionChangesBuilder {
|
||||
/// Initialize a new builder for a transaction.
|
||||
pub fn new(tx: &Transaction) -> Self {
|
||||
Self { tx: Some(tx.clone()), ..Default::default() }
|
||||
}
|
||||
|
||||
/// Register a new contract change.
|
||||
///
|
||||
/// Will prioritize the new change over any already present one.
|
||||
pub fn add_contract_changes(&mut self, change: &InterimContractChange) {
|
||||
self.contract_changes
|
||||
.entry(change.address.clone())
|
||||
.and_modify(|c| {
|
||||
if !change.balance.is_empty() {
|
||||
c.set_balance(&change.balance)
|
||||
}
|
||||
if !change.slots.is_empty() {
|
||||
c.upsert_slots(&change.slots)
|
||||
}
|
||||
if !change.code.is_empty() {
|
||||
c.set_code(&change.code)
|
||||
}
|
||||
})
|
||||
.or_insert_with(|| {
|
||||
let mut c = InterimContractChange::new(
|
||||
&change.address,
|
||||
change.change == ChangeType::Creation,
|
||||
);
|
||||
c.upsert_slots(&change.slots);
|
||||
c.set_code(&change.code);
|
||||
c.set_balance(&change.balance);
|
||||
c
|
||||
});
|
||||
}
|
||||
|
||||
/// Unique contract/account addresses that have been changed so far.
|
||||
pub fn changed_contracts(&self) -> impl Iterator<Item = &[u8]> {
|
||||
self.contract_changes
|
||||
.keys()
|
||||
.map(|k| k.as_slice())
|
||||
}
|
||||
|
||||
/// Marks a component as updated.
|
||||
///
|
||||
/// If the protocol does not follow a 1:1 logic between components and contracts.
|
||||
/// Components can be manually marked as updated using this method.
|
||||
pub fn mark_component_as_updated(&mut self, component_id: &str) {
|
||||
let attr = Attribute {
|
||||
name: "update_marker".to_string(),
|
||||
value: vec![1u8],
|
||||
change: ChangeType::Update.into(),
|
||||
};
|
||||
if let Some(entry) = self
|
||||
.entity_changes
|
||||
.get_mut(component_id)
|
||||
{
|
||||
entry.set_attribute(&attr);
|
||||
} else {
|
||||
let mut change = InterimEntityChanges::new(component_id);
|
||||
change.set_attribute(&attr);
|
||||
self.entity_changes
|
||||
.insert(component_id.to_string(), change);
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers a new entity change.
|
||||
///
|
||||
/// Will prioritize the new change over any already present one.
|
||||
pub fn add_entity_change(&mut self, change: &EntityChanges) {
|
||||
self.entity_changes
|
||||
.entry(change.component_id.clone())
|
||||
.and_modify(|ec| {
|
||||
for attr in change.attributes.iter() {
|
||||
ec.set_attribute(attr);
|
||||
}
|
||||
})
|
||||
.or_insert_with(|| InterimEntityChanges {
|
||||
component_id: change.component_id.clone(),
|
||||
attributes: change
|
||||
.attributes
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|a| (a.name.clone(), a))
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Adds a new protocol component.
|
||||
///
|
||||
/// ## Note
|
||||
/// This method is a noop, in case the component is already present. Since
|
||||
/// components are assumed to be immutable.
|
||||
pub fn add_protocol_component(&mut self, component: &ProtocolComponent) {
|
||||
if !self
|
||||
.component_changes
|
||||
.contains_key(&component.id)
|
||||
{
|
||||
self.component_changes
|
||||
.insert(component.id.clone(), component.clone());
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates a components balances
|
||||
///
|
||||
/// Overwrites any previous balance changes of the component if present.
|
||||
pub fn add_balance_change(&mut self, change: &BalanceChange) {
|
||||
self.balance_changes
|
||||
.insert((change.component_id.clone(), change.token.clone()), change.clone());
|
||||
}
|
||||
|
||||
pub fn build(self) -> Option<TransactionChanges> {
|
||||
if self.contract_changes.is_empty() &&
|
||||
self.component_changes.is_empty() &&
|
||||
self.balance_changes.is_empty() &&
|
||||
self.entity_changes.is_empty()
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(TransactionChanges {
|
||||
tx: self.tx,
|
||||
contract_changes: self
|
||||
.contract_changes
|
||||
.into_values()
|
||||
.map(|interim| interim.into())
|
||||
.collect::<Vec<_>>(),
|
||||
entity_changes: self
|
||||
.entity_changes
|
||||
.into_values()
|
||||
.map(|interim| interim.into())
|
||||
.collect::<Vec<_>>(),
|
||||
component_changes: self
|
||||
.component_changes
|
||||
.into_values()
|
||||
.collect::<Vec<_>>(),
|
||||
balance_changes: self
|
||||
.balance_changes
|
||||
.into_values()
|
||||
.collect::<Vec<_>>(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&sf::TransactionTrace> for Transaction {
|
||||
fn from(tx: &sf::TransactionTrace) -> Self {
|
||||
Self {
|
||||
@@ -145,3 +301,128 @@ impl ProtocolComponent {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as `EntityChanges` but ensures attributes are unique by name.
|
||||
#[derive(Default)]
|
||||
pub struct InterimEntityChanges {
|
||||
component_id: String,
|
||||
attributes: HashMap<String, Attribute>,
|
||||
}
|
||||
|
||||
impl InterimEntityChanges {
|
||||
pub fn new(id: &str) -> Self {
|
||||
Self { component_id: id.to_string(), ..Default::default() }
|
||||
}
|
||||
|
||||
pub fn set_attribute(&mut self, attr: &Attribute) {
|
||||
self.attributes
|
||||
.entry(attr.name.clone())
|
||||
.and_modify(|existing| *existing = attr.clone())
|
||||
.or_insert(attr.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InterimEntityChanges> for EntityChanges {
|
||||
fn from(value: InterimEntityChanges) -> Self {
|
||||
EntityChanges {
|
||||
component_id: value.component_id.clone(),
|
||||
attributes: value
|
||||
.attributes
|
||||
.into_values()
|
||||
.collect::<Vec<_>>(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct SlotValue {
|
||||
new_value: Vec<u8>,
|
||||
start_value: Vec<u8>,
|
||||
}
|
||||
|
||||
impl SlotValue {
|
||||
fn has_changed(&self) -> bool {
|
||||
self.start_value != self.new_value
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&StorageChange> for SlotValue {
|
||||
fn from(change: &StorageChange) -> Self {
|
||||
Self { new_value: change.new_value.clone(), start_value: change.old_value.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
// Uses a map for slots, protobuf does not allow bytes in hashmap keys
|
||||
#[derive(Clone)]
|
||||
pub struct InterimContractChange {
|
||||
address: Vec<u8>,
|
||||
balance: Vec<u8>,
|
||||
code: Vec<u8>,
|
||||
slots: HashMap<Vec<u8>, SlotValue>,
|
||||
change: ChangeType,
|
||||
}
|
||||
|
||||
impl InterimContractChange {
|
||||
pub fn new(address: &[u8], creation: bool) -> Self {
|
||||
Self {
|
||||
address: address.to_vec(),
|
||||
balance: vec![],
|
||||
code: vec![],
|
||||
slots: Default::default(),
|
||||
change: if creation { ChangeType::Creation } else { ChangeType::Update },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn upsert_slot(&mut self, change: &StorageChange) {
|
||||
if change.address != self.address {
|
||||
panic!("Bad storage change");
|
||||
}
|
||||
self.slots
|
||||
.entry(change.key.clone())
|
||||
.and_modify(|sv| {
|
||||
sv.new_value
|
||||
.copy_from_slice(&change.new_value)
|
||||
})
|
||||
.or_insert_with(|| change.into());
|
||||
}
|
||||
|
||||
fn upsert_slots(&mut self, changes: &HashMap<Vec<u8>, SlotValue>) {
|
||||
for (slot, value) in changes.iter() {
|
||||
self.slots
|
||||
.entry(slot.clone())
|
||||
.and_modify(|sv| {
|
||||
sv.new_value
|
||||
.copy_from_slice(&value.new_value)
|
||||
})
|
||||
.or_insert(value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_balance(&mut self, new_balance: &[u8]) {
|
||||
self.balance.clear();
|
||||
self.balance
|
||||
.extend_from_slice(new_balance);
|
||||
}
|
||||
|
||||
pub fn set_code(&mut self, code: &[u8]) {
|
||||
self.code.clear();
|
||||
self.code.extend_from_slice(code);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InterimContractChange> for ContractChange {
|
||||
fn from(value: InterimContractChange) -> Self {
|
||||
ContractChange {
|
||||
address: value.address,
|
||||
balance: value.balance,
|
||||
code: value.code,
|
||||
slots: value
|
||||
.slots
|
||||
.into_iter()
|
||||
.filter(|(_, value)| value.has_changed())
|
||||
.map(|(slot, value)| ContractSlot { slot, value: value.new_value })
|
||||
.collect(),
|
||||
change: value.change.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
25
substreams/ethereum-balancer/Readme.md
Normal file
25
substreams/ethereum-balancer/Readme.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Balancer Substream
|
||||
|
||||
## Open tasks
|
||||
|
||||
### Missing rate provider state
|
||||
|
||||
Any pool that does use rate providers, is currently not supported by tycho since we do
|
||||
not witness the contract creation of rate providers and thus can't provide the required
|
||||
contract state.
|
||||
|
||||
This is planned to be resolved with the dynamic contract indexing module.
|
||||
|
||||
## Static Attributes
|
||||
|
||||
| name | type | description |
|
||||
|--------------------|-------|---------------------------------------------------------------------------------------------------------|
|
||||
| pool_type | str | A unique identifier per pool type. Set depending on the factory |
|
||||
| normalized weights | json | The normalised weights of a weighted pool. |
|
||||
| pool_id | bytes | The balancer pool id. |
|
||||
| rate_providers | json | A list of rate provider addresses. |
|
||||
| bpt | bytes | The balancer lp token, set if the pool support entering and exiting lp postions via the swap interface. |
|
||||
| main_token | bytes | The main token address for a linear pool |
|
||||
| wrapped_token | bytes | The wrapped token address for a linear pool |
|
||||
| fee | int | The fee charged by the pool set at deployment time |
|
||||
| upper_target | int | The upper target for a linear pool |
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@ use substreams::{
|
||||
};
|
||||
use substreams_ethereum::{pb::eth, Event};
|
||||
use tycho_substreams::{
|
||||
balances::aggregate_balances_changes, contract::extract_contract_changes, prelude::*,
|
||||
balances::aggregate_balances_changes, contract::extract_contract_changes_builder, prelude::*,
|
||||
};
|
||||
|
||||
pub const VAULT_ADDRESS: &[u8] = &hex!("BA12222222228d8Ba445958a75a0704d566BF2C8");
|
||||
@@ -146,39 +146,43 @@ pub fn map_protocol_changes(
|
||||
) -> Result<BlockChanges> {
|
||||
// We merge contract changes by transaction (identified by transaction index) making it easy to
|
||||
// sort them at the very end.
|
||||
let mut transaction_changes: HashMap<_, TransactionChanges> = HashMap::new();
|
||||
let mut transaction_changes: HashMap<_, TransactionChangesBuilder> = HashMap::new();
|
||||
|
||||
// `ProtocolComponents` are gathered from `map_pools_created` which just need a bit of work to
|
||||
// convert into `TransactionChanges`
|
||||
let default_attributes = vec![
|
||||
Attribute {
|
||||
name: "balance_owner".to_string(),
|
||||
value: VAULT_ADDRESS.to_vec(),
|
||||
change: ChangeType::Creation.into(),
|
||||
},
|
||||
Attribute {
|
||||
name: "update_marker".to_string(),
|
||||
value: vec![1u8],
|
||||
change: ChangeType::Creation.into(),
|
||||
},
|
||||
];
|
||||
grouped_components
|
||||
.tx_components
|
||||
.iter()
|
||||
.for_each(|tx_component| {
|
||||
// initialise builder if not yet present for this tx
|
||||
let tx = tx_component.tx.as_ref().unwrap();
|
||||
transaction_changes
|
||||
let builder = transaction_changes
|
||||
.entry(tx.index)
|
||||
.or_insert_with(|| TransactionChanges::new(tx))
|
||||
.component_changes
|
||||
.extend_from_slice(&tx_component.components);
|
||||
.or_insert_with(|| TransactionChangesBuilder::new(tx));
|
||||
|
||||
// iterate over individual components created within this tx
|
||||
tx_component
|
||||
.components
|
||||
.iter()
|
||||
.for_each(|component| {
|
||||
transaction_changes
|
||||
.entry(tx.index)
|
||||
.or_insert_with(|| TransactionChanges::new(tx))
|
||||
.entity_changes
|
||||
.push(EntityChanges {
|
||||
component_id: component.id.clone(),
|
||||
attributes: vec![Attribute {
|
||||
name: "balance_owner".to_string(),
|
||||
value: "0xBA12222222228d8Ba445958a75a0704d566BF2C8"
|
||||
.to_string()
|
||||
.as_bytes()
|
||||
.to_vec(),
|
||||
change: ChangeType::Creation.into(),
|
||||
}],
|
||||
});
|
||||
builder.add_protocol_component(component);
|
||||
let entity_change = EntityChanges {
|
||||
component_id: component.id.clone(),
|
||||
attributes: default_attributes.clone(),
|
||||
};
|
||||
builder.add_entity_change(&entity_change)
|
||||
});
|
||||
});
|
||||
|
||||
@@ -189,15 +193,16 @@ pub fn map_protocol_changes(
|
||||
aggregate_balances_changes(balance_store, deltas)
|
||||
.into_iter()
|
||||
.for_each(|(_, (tx, balances))| {
|
||||
transaction_changes
|
||||
let builder = transaction_changes
|
||||
.entry(tx.index)
|
||||
.or_insert_with(|| TransactionChanges::new(&tx))
|
||||
.balance_changes
|
||||
.extend(balances.into_values());
|
||||
.or_insert_with(|| TransactionChangesBuilder::new(&tx));
|
||||
balances
|
||||
.values()
|
||||
.for_each(|bc| builder.add_balance_change(bc));
|
||||
});
|
||||
|
||||
// Extract and insert any storage changes that happened for any of the components.
|
||||
extract_contract_changes(
|
||||
extract_contract_changes_builder(
|
||||
&block,
|
||||
|addr| {
|
||||
components_store
|
||||
@@ -208,6 +213,24 @@ pub fn map_protocol_changes(
|
||||
&mut transaction_changes,
|
||||
);
|
||||
|
||||
transaction_changes
|
||||
.iter_mut()
|
||||
.for_each(|(_, change)| {
|
||||
// this indirection is necessary due to borrowing rules.
|
||||
let addresses = change
|
||||
.changed_contracts()
|
||||
.map(|e| e.to_vec())
|
||||
.collect::<Vec<_>>();
|
||||
addresses
|
||||
.into_iter()
|
||||
.for_each(|address| {
|
||||
if address != VAULT_ADDRESS {
|
||||
// We reconstruct the component_id from the address here
|
||||
change.mark_component_as_updated(&format!("0x{}", hex::encode(address)))
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// Process all `transaction_changes` for final output in the `BlockChanges`,
|
||||
// sorted by transaction index (the key).
|
||||
Ok(BlockChanges {
|
||||
@@ -215,17 +238,7 @@ pub fn map_protocol_changes(
|
||||
changes: transaction_changes
|
||||
.drain()
|
||||
.sorted_unstable_by_key(|(index, _)| *index)
|
||||
.filter_map(|(_, change)| {
|
||||
if change.contract_changes.is_empty() &&
|
||||
change.component_changes.is_empty() &&
|
||||
change.balance_changes.is_empty() &&
|
||||
change.entity_changes.is_empty()
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(change)
|
||||
}
|
||||
})
|
||||
.filter_map(|(_, builder)| builder.build())
|
||||
.collect::<Vec<_>>(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,33 +1,13 @@
|
||||
use crate::{abi, modules::VAULT_ADDRESS};
|
||||
use substreams::{hex, scalar::BigInt};
|
||||
use substreams::hex;
|
||||
use substreams_ethereum::{
|
||||
pb::eth::v2::{Call, Log, TransactionTrace},
|
||||
Event, Function,
|
||||
};
|
||||
use tycho_substreams::prelude::*;
|
||||
|
||||
/// This trait defines some helpers for serializing and deserializing `Vec<BigInt` which is needed
|
||||
/// to be able to encode the `normalized_weights` and `weights` `Attribute`s. This should also be
|
||||
/// handled by any downstream application.
|
||||
trait SerializableVecBigInt {
|
||||
fn serialize_bytes(&self) -> Vec<u8>;
|
||||
#[allow(dead_code)]
|
||||
fn deserialize_bytes(bytes: &[u8]) -> Vec<BigInt>;
|
||||
}
|
||||
|
||||
impl SerializableVecBigInt for Vec<BigInt> {
|
||||
fn serialize_bytes(&self) -> Vec<u8> {
|
||||
self.iter()
|
||||
.flat_map(|big_int| big_int.to_signed_bytes_be())
|
||||
.collect()
|
||||
}
|
||||
fn deserialize_bytes(bytes: &[u8]) -> Vec<BigInt> {
|
||||
bytes
|
||||
.chunks_exact(32)
|
||||
.map(BigInt::from_signed_bytes_be)
|
||||
.collect::<Vec<BigInt>>()
|
||||
}
|
||||
}
|
||||
use tycho_substreams::{
|
||||
attributes::{json_serialize_address_list, json_serialize_bigint_list},
|
||||
prelude::*,
|
||||
};
|
||||
|
||||
/// Helper function to get pool_registered event
|
||||
fn get_pool_registered(
|
||||
@@ -42,6 +22,18 @@ fn get_pool_registered(
|
||||
.clone()
|
||||
}
|
||||
|
||||
fn get_token_registered(
|
||||
tx: &TransactionTrace,
|
||||
pool_id: &[u8],
|
||||
) -> abi::vault::events::TokensRegistered {
|
||||
tx.logs_with_calls()
|
||||
.filter(|(log, _)| log.address == VAULT_ADDRESS)
|
||||
.filter_map(|(log, _)| abi::vault::events::TokensRegistered::match_and_decode(log))
|
||||
.find(|ev| ev.pool_id == pool_id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
}
|
||||
|
||||
/// This is the main function that handles the creation of `ProtocolComponent`s with `Attribute`s
|
||||
/// based on the specific factory address. There's 3 factory groups that are represented here:
|
||||
/// - Weighted Pool Factories
|
||||
@@ -76,14 +68,20 @@ pub fn address_map(
|
||||
("pool_type", "WeightedPoolFactory".as_bytes()),
|
||||
(
|
||||
"normalized_weights",
|
||||
&create_call
|
||||
.normalized_weights
|
||||
.serialize_bytes(),
|
||||
&json_serialize_bigint_list(&create_call.normalized_weights),
|
||||
),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
(
|
||||
"rate_providers",
|
||||
&json_serialize_address_list(&create_call.rate_providers),
|
||||
),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
("manual_updates", &[1u8]),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
)
|
||||
@@ -94,17 +92,27 @@ pub fn address_map(
|
||||
let pool_created =
|
||||
abi::composable_stable_pool_factory::events::PoolCreated::match_and_decode(log)?;
|
||||
let pool_registered = get_pool_registered(tx, &pool_created.pool);
|
||||
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
|
||||
|
||||
Some(
|
||||
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
|
||||
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&create_call.tokens)
|
||||
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&tokens_registered.tokens)
|
||||
.with_attributes(&[
|
||||
("pool_type", "ComposableStablePoolFactory".as_bytes()),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
("bpt", &pool_created.pool),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
(
|
||||
"rate_providers",
|
||||
&json_serialize_address_list(&create_call.rate_providers),
|
||||
),
|
||||
("manual_updates", &[1u8]),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
)
|
||||
@@ -115,11 +123,12 @@ pub fn address_map(
|
||||
let pool_created =
|
||||
abi::erc_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
|
||||
let pool_registered = get_pool_registered(tx, &pool_created.pool);
|
||||
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
|
||||
|
||||
Some(
|
||||
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
|
||||
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
|
||||
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&tokens_registered.tokens)
|
||||
.with_attributes(&[
|
||||
("pool_type", "ERC4626LinearPoolFactory".as_bytes()),
|
||||
(
|
||||
@@ -128,9 +137,16 @@ pub fn address_map(
|
||||
.upper_target
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
("manual_updates", &[1u8]),
|
||||
("bpt", &pool_created.pool),
|
||||
("main_token", &create_call.main_token),
|
||||
("wrapped_token", &create_call.wrapped_token),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
@@ -142,11 +158,12 @@ pub fn address_map(
|
||||
let pool_created =
|
||||
abi::euler_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
|
||||
let pool_registered = get_pool_registered(tx, &pool_created.pool);
|
||||
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
|
||||
|
||||
Some(
|
||||
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
|
||||
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
|
||||
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&tokens_registered.tokens)
|
||||
.with_attributes(&[
|
||||
("pool_type", "EulerLinearPoolFactory".as_bytes()),
|
||||
(
|
||||
@@ -155,9 +172,16 @@ pub fn address_map(
|
||||
.upper_target
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
("manual_updates", &[1u8]),
|
||||
("bpt", &pool_created.pool),
|
||||
("main_token", &create_call.main_token),
|
||||
("wrapped_token", &create_call.wrapped_token),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
@@ -217,11 +241,12 @@ pub fn address_map(
|
||||
let pool_created =
|
||||
abi::silo_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
|
||||
let pool_registered = get_pool_registered(tx, &pool_created.pool);
|
||||
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
|
||||
|
||||
Some(
|
||||
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
|
||||
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
|
||||
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&tokens_registered.tokens)
|
||||
.with_attributes(&[
|
||||
("pool_type", "SiloLinearPoolFactory".as_bytes()),
|
||||
(
|
||||
@@ -230,9 +255,16 @@ pub fn address_map(
|
||||
.upper_target
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
("manual_updates", &[1u8]),
|
||||
("bpt", &pool_created.pool),
|
||||
("main_token", &create_call.main_token),
|
||||
("wrapped_token", &create_call.wrapped_token),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
@@ -244,11 +276,12 @@ pub fn address_map(
|
||||
let pool_created =
|
||||
abi::yearn_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
|
||||
let pool_registered = get_pool_registered(tx, &pool_created.pool);
|
||||
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
|
||||
|
||||
Some(
|
||||
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
|
||||
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
|
||||
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
|
||||
.with_tokens(&tokens_registered.tokens)
|
||||
.with_attributes(&[
|
||||
("pool_type", "YearnLinearPoolFactory".as_bytes()),
|
||||
(
|
||||
@@ -257,9 +290,16 @@ pub fn address_map(
|
||||
.upper_target
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
("manual_updates", &[1u8]),
|
||||
("bpt", &pool_created.pool),
|
||||
("main_token", &create_call.main_token),
|
||||
("wrapped_token", &create_call.wrapped_token),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
@@ -280,11 +320,15 @@ pub fn address_map(
|
||||
.with_tokens(&create_call.tokens)
|
||||
.with_attributes(&[
|
||||
("pool_type", "WeightedPool2TokensFactory".as_bytes()),
|
||||
("weights", &create_call.weights.serialize_bytes()),
|
||||
("weights", &json_serialize_bigint_list(&create_call.weights)),
|
||||
("pool_id", &pool_registered.pool_id),
|
||||
(
|
||||
"pool_id",
|
||||
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
|
||||
"fee",
|
||||
&create_call
|
||||
.swap_fee_percentage
|
||||
.to_signed_bytes_be(),
|
||||
),
|
||||
("manual_updates", &[1u8]),
|
||||
])
|
||||
.as_swap_type("balancer_pool", ImplementationType::Vm),
|
||||
)
|
||||
|
||||
125
substreams/ethereum-balancer/test_assets.yaml
Normal file
125
substreams/ethereum-balancer/test_assets.yaml
Normal file
@@ -0,0 +1,125 @@
|
||||
substreams_yaml_path: ./substreams.yaml
|
||||
protocol_type_names:
|
||||
- "balancer_pool"
|
||||
adapter_contract: "BalancerV2SwapAdapter"
|
||||
skip_balance_check: true
|
||||
initialized_accounts:
|
||||
- "0xba12222222228d8ba445958a75a0704d566bf2c8"
|
||||
# Uncomment entries below to include composable stable pool dependencies
|
||||
# wstETH dependencies
|
||||
# - "0x72D07D7DcA67b8A406aD1Ec34ce969c90bFEE768"
|
||||
# - "0xb8ffc3cd6e7cf5a098a1c92f48009765b24088dc"
|
||||
# - "0xae7ab96520de3a18e5e111b5eaab095312d7fe84"
|
||||
# - "0x7f39c581f595b53c5cb19bd0b3f8da6c935e2ca0"
|
||||
# - "0x2b33cf282f867a7ff693a66e11b0fcc5552e4425"
|
||||
# - "0x17144556fd3424edc8fc8a4c940b2d04936d17eb"
|
||||
# sfrxETH dependencies
|
||||
# - "0x302013E7936a39c358d07A3Df55dc94EC417E3a1"
|
||||
# - "0xac3e018457b222d93114458476f3e3416abbe38f"
|
||||
# rETH dependencies
|
||||
# - "0x1a8F81c256aee9C640e14bB0453ce247ea0DFE6F"
|
||||
# - "0x07fcabcbe4ff0d80c2b1eb42855c0131b6cba2f4"
|
||||
# - "0x1d8f8f00cfa6758d7be78336684788fb0ee0fa46"
|
||||
# - "0xae78736cd615f374d3085123a210448e74fc6393"
|
||||
tests:
|
||||
# WeightedPoolFactory - 0x897888115Ada5773E02aA29F775430BFB5F34c51
|
||||
- name: test_weighted_pool_creation
|
||||
start_block: 20128706
|
||||
stop_block: 20128806
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0xe96a45f66bdDA121B24F0a861372A72E8889523d"
|
||||
tokens:
|
||||
- "0x38C2a4a7330b22788374B8Ff70BBa513C8D848cA"
|
||||
- "0x514910771AF9Ca656af840dff83E8264EcF986CA"
|
||||
static_attributes: null
|
||||
creation_tx: "0xa63c671046ad2075ec8ea83ac21199cf3e3a5f433e72ec4c117cbabfb9b18de2"
|
||||
|
||||
# WeightedPool2TokensFactory - 0xA5bf2ddF098bb0Ef6d120C98217dD6B141c74EE0
|
||||
- name: weighted_legacy_creation
|
||||
start_block: 13148365
|
||||
stop_block: 13148465
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0xBF96189Eee9357a95C7719f4F5047F76bdE804E5"
|
||||
tokens:
|
||||
- "0x5A98FcBEA516Cf06857215779Fd812CA3beF1B32"
|
||||
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
|
||||
static_attributes: null
|
||||
creation_tx: "0xdced662e41b1608c386551bbc89894a10321fd8bd58782e22077d1044cf99cb5"
|
||||
|
||||
# ComposableStablePoolFactory - 0xDB8d758BCb971e482B2C45f7F8a7740283A1bd3A
|
||||
- name: test_composable_stable_pool_creation
|
||||
start_block: 17677300
|
||||
stop_block: 17678400
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0x42ED016F826165C2e5976fe5bC3df540C5aD0Af7"
|
||||
tokens:
|
||||
- "0x42ed016f826165c2e5976fe5bc3df540c5ad0af7"
|
||||
- "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0"
|
||||
- "0xac3E018457B222d93114458476f3E3416Abbe38F"
|
||||
- "0xae78736Cd615f374D3085123A210448E74Fc6393"
|
||||
static_attributes: null
|
||||
skip_simulation: true
|
||||
creation_tx: "0x53ff6bab0d8a76a998e29e59da8068ad906ae85507a1c2fbf2505e2cb52fd754"
|
||||
|
||||
# ERC4626LinearPoolFactory - 0x813EE7a840CE909E7Fea2117A44a90b8063bd4fd
|
||||
- name: test_erc4626_linear_pool_creation
|
||||
start_block: 17480142
|
||||
stop_block: 17480242
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0x3fCb7085B8F2F473F80bF6D879cAe99eA4DE9344"
|
||||
tokens:
|
||||
- "0x39Dd7790e75C6F663731f7E1FdC0f35007D3879b"
|
||||
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
|
||||
- "0x3fcb7085b8f2f473f80bf6d879cae99ea4de9344"
|
||||
static_attributes: null
|
||||
skip_simulation: true
|
||||
creation_tx: "0x5ff97870685370bab3876a4335d28c42e24659064fe78b486d6fb1b37b992877"
|
||||
|
||||
# EulerLinearPoolFactory - 0x5F43FBa61f63Fa6bFF101a0A0458cEA917f6B347
|
||||
- name: test_euler_linear_pool_creation
|
||||
start_block: 16588117
|
||||
stop_block: 16588217
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0xD4e7C1F3DA1144c9E2CfD1b015eDA7652b4a4399"
|
||||
tokens:
|
||||
- "0xD4e7C1F3DA1144c9E2CfD1b015eDA7652b4a4399"
|
||||
- "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"
|
||||
- "0xEb91861f8A4e1C12333F42DCE8fB0Ecdc28dA716"
|
||||
static_attributes: null
|
||||
skip_simulation: true
|
||||
creation_tx: "0x4a9ea683052afefdae3d189862868c3a7dc8f431d1d9828b6bfd9451a8816426"
|
||||
|
||||
# SiloLinearPoolFactory - 0x4E11AEec21baF1660b1a46472963cB3DA7811C89
|
||||
- name: test_silo_linear_pool_creation
|
||||
start_block: 17173185
|
||||
stop_block: 17173187
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0x74CBfAF94A3577c539a9dCEE9870A6349a33b34f"
|
||||
tokens:
|
||||
- "0x192E67544694a7bAA2DeA94f9B1Df58BB3395A12"
|
||||
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
|
||||
- "0x74cbfaf94a3577c539a9dcee9870a6349a33b34f"
|
||||
static_attributes: null
|
||||
skip_simulation: true
|
||||
creation_tx: "0x215c9f4256ab450368132f4063611ae8cdd98e80bea7e44ecf0600ed1d757018"
|
||||
|
||||
# YearnLinearPoolFactory - 0x5F5222Ffa40F2AEd6380D022184D6ea67C776eE0a
|
||||
- name: test_yearn_linear_pool_creation
|
||||
start_block: 17052601
|
||||
stop_block: 17052605
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0xac5b4ef7ede2f2843a704e96dcaa637f4ba3dc3f"
|
||||
tokens:
|
||||
- "0x806E02Dea8d4a0882caD9fA3Fa75B212328692dE"
|
||||
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
|
||||
- "0xac5b4ef7ede2f2843a704e96dcaa637f4ba3dc3f"
|
||||
static_attributes: null
|
||||
skip_simulation: true
|
||||
creation_tx: "0x497aa03ce84d236c183204ddfc6762c8e4158da1ebc5e7e18e7f6cceaa497a2a"
|
||||
BIN
substreams/ethereum-curve/evm/CurveSwapAdapter.evm.runtime
Normal file
BIN
substreams/ethereum-curve/evm/CurveSwapAdapter.evm.runtime
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -929,8 +929,8 @@ pub mod events {
|
||||
.topics
|
||||
.get(0)
|
||||
.expect("bounds already checked")
|
||||
.as_ref() ==
|
||||
Self::TOPIC_ID;
|
||||
.as_ref()
|
||||
== Self::TOPIC_ID;
|
||||
}
|
||||
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
|
||||
let mut values =
|
||||
@@ -1009,8 +1009,8 @@ pub mod events {
|
||||
.topics
|
||||
.get(0)
|
||||
.expect("bounds already checked")
|
||||
.as_ref() ==
|
||||
Self::TOPIC_ID;
|
||||
.as_ref()
|
||||
== Self::TOPIC_ID;
|
||||
}
|
||||
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
|
||||
let mut values =
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
#![allow(clippy::all)]
|
||||
pub mod crypto_pool_factory;
|
||||
pub mod stableswap_factory;
|
||||
pub mod crypto_swap_ng_factory;
|
||||
pub mod meta_registry;
|
||||
pub mod tricrypto_factory;
|
||||
pub mod twocrypto_factory;
|
||||
pub mod erc20;
|
||||
pub mod meta_pool_factory;
|
||||
pub mod meta_registry;
|
||||
pub mod stableswap_factory;
|
||||
pub mod tricrypto_factory;
|
||||
pub mod twocrypto_factory;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,8 @@
|
||||
substreams_yaml_path: ./substreams.yaml
|
||||
protocol_type_names:
|
||||
- "curve_pool"
|
||||
adapter_contract: "CurveSwapAdapter.evm.runtime"
|
||||
skip_balance_check: false
|
||||
tests:
|
||||
- name: test_3pool_creation
|
||||
start_block: 10809470
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
substreams_yaml_path: ./substreams.yaml
|
||||
adapter_contract: "SwapAdapter.evm.runtime"
|
||||
skip_balance_check: false
|
||||
protocol_type_names:
|
||||
- "type_name_1"
|
||||
- "type_name_2"
|
||||
@@ -6,6 +8,8 @@ tests:
|
||||
- name: test_pool_creation
|
||||
start_block: 123
|
||||
stop_block: 456
|
||||
initialized_accounts:
|
||||
- "0x0c0e5f2fF0ff18a3be9b835635039256dC4B4963" # Needed for ....
|
||||
expected_state:
|
||||
protocol_components:
|
||||
- id: "0xbebc44782c7db0a1a60cb6fe97d0b483032ff1c7"
|
||||
@@ -15,6 +19,7 @@ tests:
|
||||
- "0x6b175474e89094c44da98b954eedeac495271d0f"
|
||||
static_attributes:
|
||||
creation_tx: "0x20793bbf260912aae189d5d261ff003c9b9166da8191d8f9d63ff1c7722f3ac6"
|
||||
skip_simulation: false
|
||||
- name: test_something_else
|
||||
start_block: 123
|
||||
stop_block: 456
|
||||
@@ -26,3 +31,4 @@ tests:
|
||||
- "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84"
|
||||
static_attributes:
|
||||
creation_tx: "0xfac67ecbd423a5b915deff06045ec9343568edaec34ae95c43d35f2c018afdaa"
|
||||
skip_simulation: true # If true, always add a reason
|
||||
|
||||
@@ -6,7 +6,8 @@ WORKDIR /app
|
||||
|
||||
# Add current directory code to /app in container
|
||||
ADD . /app/testing
|
||||
RUN chmod +x /app/testing/tycho-indexer
|
||||
|
||||
RUN chmod +x /app/testing/tycho-indexer-linux-x64
|
||||
|
||||
# Create a new conda environment and install pip
|
||||
RUN conda create -n myenv pip python=3.9
|
||||
@@ -20,7 +21,7 @@ RUN apt-get update \
|
||||
&& pip install psycopg2 \
|
||||
&& apt-get clean
|
||||
|
||||
RUN /bin/bash -c "source activate myenv && pip install --no-cache-dir -r testing/requirements.txt"
|
||||
RUN /bin/bash -c "source activate myenv && cd testing && pip install --no-cache-dir -r requirements.txt && cd -"
|
||||
|
||||
# Make port 80 available to the world outside this container
|
||||
EXPOSE 80
|
||||
|
||||
@@ -21,6 +21,10 @@ Tests are defined in a `yaml` file. A template can be found at `substreams/ether
|
||||
|
||||
Each test will index all blocks between `start-block` and `stop-block` and verify that the indexed state matches the expected state.
|
||||
|
||||
You will also need the EVM Runtime file for the adapter contract.
|
||||
The script to generate this file is available under `evm/scripts/buildRuntime.sh`.
|
||||
Please place this Runtime file under the respective `substream` directory inside the `evm` folder.
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Step 1: Export Environment Variables
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import argparse
|
||||
from runner import TestRunner
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run indexer within a specified range of blocks"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--test_yaml_path", type=str, help="Path to the test configuration YAML file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--with_binary_logs",
|
||||
action="store_true",
|
||||
help="Flag to activate logs from Tycho.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--db_url",
|
||||
type=str,
|
||||
help="Postgres database URL for the Tycho indexer.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
test_runner = TestRunner(args.test_yaml_path, args.with_binary_logs, db_url=args.db_url)
|
||||
test_runner.run_tests()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,16 +1,18 @@
|
||||
version: '3.1'
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/dbsystel/postgresql-partman:15-5
|
||||
build:
|
||||
context: .
|
||||
dockerfile: postgres.Dockerfile
|
||||
restart: "always"
|
||||
environment:
|
||||
POSTGRESQL_PASSWORD: mypassword
|
||||
POSTGRESQL_DATABASE: tycho_indexer_0
|
||||
POSTGRESQL_USERNAME: postgres
|
||||
POSTGRESQL_SHARED_PRELOAD_LIBRARIES: pg_cron
|
||||
ports:
|
||||
- "5431:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
shm_size: '1gb'
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
@@ -20,7 +22,7 @@ services:
|
||||
- ../substreams:/app/substreams
|
||||
- ../proto:/app/proto
|
||||
- ./tycho-indexer:/app/testing/tycho-indexer
|
||||
- ./runner.py:/app/testing/runner.py
|
||||
- ./src/runner/runner.py:/app/testing/src.py
|
||||
ports:
|
||||
- "80:80"
|
||||
depends_on:
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
import os
|
||||
from web3 import Web3
|
||||
|
||||
native_aliases = ["0x0000000000000000000000000000000000000000","0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"]
|
||||
|
||||
erc20_abi = [
|
||||
{
|
||||
"constant": True,
|
||||
"inputs": [{"name": "_owner", "type": "address"}],
|
||||
"name": "balanceOf",
|
||||
"outputs": [{"name": "balance", "type": "uint256"}],
|
||||
"type": "function",
|
||||
}
|
||||
]
|
||||
|
||||
def get_token_balance(token_address, wallet_address, block_number):
|
||||
rpc_url = os.getenv("RPC_URL")
|
||||
|
||||
if rpc_url is None:
|
||||
raise EnvironmentError("RPC_URL environment variable not set")
|
||||
|
||||
web3 = Web3(Web3.HTTPProvider(rpc_url))
|
||||
|
||||
if not web3.isConnected():
|
||||
raise ConnectionError("Failed to connect to the Ethereum node")
|
||||
|
||||
# Check if the token_address is a native token alias
|
||||
if token_address.lower() in native_aliases:
|
||||
balance = web3.eth.get_balance(Web3.toChecksumAddress(wallet_address), block_identifier=block_number)
|
||||
else:
|
||||
contract = web3.eth.contract(address=Web3.toChecksumAddress(token_address), abi=erc20_abi)
|
||||
balance = contract.functions.balanceOf(Web3.toChecksumAddress(wallet_address)).call(
|
||||
block_identifier=block_number
|
||||
)
|
||||
|
||||
return balance
|
||||
16
testing/postgres.Dockerfile
Normal file
16
testing/postgres.Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
# This Dockerfile creates a custom postgres image used for CI and local deployment.
|
||||
# This is required because we use some postgres extensions that aren't in the generic Postgres image such as pg_partman or pg_cron.
|
||||
|
||||
# As an image with pg_partman already exist, we start from this one an add pg_cron and possibly other extensions on top of that.
|
||||
FROM ghcr.io/dbsystel/postgresql-partman:15-5
|
||||
ARG PGCRON_VERSION="1.6.2"
|
||||
USER root
|
||||
RUN cd /tmp \
|
||||
&& wget "https://github.com/citusdata/pg_cron/archive/refs/tags/v${PGCRON_VERSION}.tar.gz" \
|
||||
&& tar zxf v${PGCRON_VERSION}.tar.gz \
|
||||
&& cd pg_cron-${PGCRON_VERSION} \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. && rm -r pg_cron-${PGCRON_VERSION} v${PGCRON_VERSION}.tar.gz
|
||||
RUN echo "cron.database_name = 'tycho_indexer_0'" >> /opt/bitnami/postgresql/conf/postgresql.conf
|
||||
USER 1001
|
||||
@@ -2,3 +2,4 @@ psycopg2==2.9.9
|
||||
PyYAML==6.0.1
|
||||
Requests==2.32.2
|
||||
web3==5.31.3
|
||||
-e ./tycho-client
|
||||
@@ -1,155 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import yaml
|
||||
|
||||
from evm import get_token_balance
|
||||
from tycho import TychoRunner
|
||||
|
||||
|
||||
class TestResult:
|
||||
def __init__(self, success: bool, message: str = None):
|
||||
self.success = success
|
||||
self.message = message
|
||||
|
||||
@classmethod
|
||||
def Passed(cls):
|
||||
return cls(success=True)
|
||||
|
||||
@classmethod
|
||||
def Failed(cls, message: str):
|
||||
return cls(success=False, message=message)
|
||||
|
||||
|
||||
def load_config(yaml_path: str) -> dict:
|
||||
"""Load YAML configuration from a specified file path."""
|
||||
with open(yaml_path, "r") as file:
|
||||
return yaml.safe_load(file)
|
||||
|
||||
|
||||
class TestRunner:
|
||||
def __init__(self, config_path: str, with_binary_logs: bool, db_url: str):
|
||||
self.config = load_config(config_path)
|
||||
self.base_dir = os.path.dirname(config_path)
|
||||
self.tycho_runner = TychoRunner(with_binary_logs)
|
||||
self.db_url = db_url
|
||||
|
||||
def run_tests(self) -> None:
|
||||
"""Run all tests specified in the configuration."""
|
||||
print(f"Running tests ...")
|
||||
for test in self.config["tests"]:
|
||||
|
||||
spkg_path = self.build_spkg(
|
||||
os.path.join(self.base_dir, self.config["substreams_yaml_path"]),
|
||||
lambda data: self.update_initial_block(data, test["start_block"]),
|
||||
)
|
||||
self.tycho_runner.run_tycho(
|
||||
spkg_path,
|
||||
test["start_block"],
|
||||
test["stop_block"],
|
||||
self.config["protocol_type_names"],
|
||||
)
|
||||
|
||||
result = self.tycho_runner.run_with_rpc_server(
|
||||
self.validate_state, test["expected_state"], test["stop_block"]
|
||||
)
|
||||
|
||||
if result.success:
|
||||
print(f"✅ {test['name']} passed.")
|
||||
else:
|
||||
print(f"❗️ {test['name']} failed: {result.message}")
|
||||
|
||||
self.tycho_runner.empty_database(
|
||||
self.db_url
|
||||
)
|
||||
|
||||
def validate_state(self, expected_state: dict, stop_block: int) -> TestResult:
|
||||
"""Validate the current protocol state against the expected state."""
|
||||
protocol_components = self.tycho_runner.get_protocol_components()
|
||||
protocol_states = self.tycho_runner.get_protocol_state()
|
||||
components = {
|
||||
component["id"]: component
|
||||
for component in protocol_components["protocol_components"]
|
||||
}
|
||||
|
||||
try:
|
||||
for expected_component in expected_state.get("protocol_components", []):
|
||||
comp_id = expected_component["id"].lower()
|
||||
if comp_id not in components:
|
||||
return TestResult.Failed(
|
||||
f"'{comp_id}' not found in protocol components."
|
||||
)
|
||||
|
||||
component = components[comp_id]
|
||||
for key, value in expected_component.items():
|
||||
if key not in component:
|
||||
return TestResult.Failed(
|
||||
f"Missing '{key}' in component '{comp_id}'."
|
||||
)
|
||||
if isinstance(value, list):
|
||||
if set(map(str.lower, value)) != set(
|
||||
map(str.lower, component[key])
|
||||
):
|
||||
return TestResult.Failed(
|
||||
f"List mismatch for key '{key}': {value} != {component[key]}"
|
||||
)
|
||||
elif value is not None and value.lower() != component[key]:
|
||||
return TestResult.Failed(
|
||||
f"Value mismatch for key '{key}': {value} != {component[key]}"
|
||||
)
|
||||
|
||||
for component in protocol_components["protocol_components"]:
|
||||
comp_id = component["id"].lower()
|
||||
for token in component["tokens"]:
|
||||
token_lower = token.lower()
|
||||
state = next((s for s in protocol_states["states"] if s["component_id"].lower() == comp_id), None)
|
||||
if state:
|
||||
balance_hex = state["balances"].get(token_lower, "0x0")
|
||||
else:
|
||||
balance_hex = "0x0"
|
||||
|
||||
node_balance = get_token_balance(token, comp_id, stop_block)
|
||||
tycho_balance = int(balance_hex, 16)
|
||||
if node_balance != tycho_balance:
|
||||
return TestResult.Failed(
|
||||
f"Balance mismatch for {comp_id}:{token} at block {stop_block}: got {node_balance} from rpc call and {tycho_balance} from Substreams")
|
||||
return TestResult.Passed()
|
||||
except Exception as e:
|
||||
return TestResult.Failed(str(e))
|
||||
|
||||
@staticmethod
|
||||
def build_spkg(yaml_file_path: str, modify_func: callable) -> str:
|
||||
"""Build a Substreams package with modifications to the YAML file."""
|
||||
backup_file_path = f"{yaml_file_path}.backup"
|
||||
shutil.copy(yaml_file_path, backup_file_path)
|
||||
|
||||
with open(yaml_file_path, "r") as file:
|
||||
data = yaml.safe_load(file)
|
||||
|
||||
modify_func(data)
|
||||
spkg_name = f"{yaml_file_path.rsplit('/', 1)[0]}/{data['package']['name'].replace('_', '-', 1)}-{data['package']['version']}.spkg"
|
||||
|
||||
with open(yaml_file_path, "w") as file:
|
||||
yaml.dump(data, file, default_flow_style=False)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["substreams", "pack", yaml_file_path], capture_output=True, text=True
|
||||
)
|
||||
if result.returncode != 0:
|
||||
print("Substreams pack command failed:", result.stderr)
|
||||
except Exception as e:
|
||||
print(f"Error running substreams pack command: {e}")
|
||||
|
||||
shutil.copy(backup_file_path, yaml_file_path)
|
||||
Path(backup_file_path).unlink()
|
||||
|
||||
return spkg_name
|
||||
|
||||
@staticmethod
|
||||
def update_initial_block(data: dict, start_block: int) -> None:
|
||||
"""Update the initial block for all modules in the configuration data."""
|
||||
for module in data["modules"]:
|
||||
module["initialBlock"] = start_block
|
||||
0
testing/src/runner/__init__.py
Normal file
0
testing/src/runner/__init__.py
Normal file
37
testing/src/runner/cli.py
Normal file
37
testing/src/runner/cli.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import argparse
|
||||
from runner import TestRunner
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run indexer within a specified range of blocks"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--package", type=str, help="Name of the package to test."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tycho-logs",
|
||||
action="store_true",
|
||||
help="Flag to activate logs from Tycho.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--db-url", type=str, help="Postgres database URL for the Tycho indexer."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--vm-traces",
|
||||
action="store_true",
|
||||
help="Enable tracing during vm simulations.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
test_runner = TestRunner(
|
||||
args.package,
|
||||
args.tycho_logs,
|
||||
db_url=args.db_url,
|
||||
vm_traces=args.vm_traces,
|
||||
)
|
||||
test_runner.run_tests()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
61
testing/src/runner/evm.py
Normal file
61
testing/src/runner/evm.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import os
|
||||
|
||||
from web3 import Web3
|
||||
|
||||
native_aliases = [
|
||||
"0x0000000000000000000000000000000000000000",
|
||||
"0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee",
|
||||
]
|
||||
|
||||
erc20_abi = [
|
||||
{
|
||||
"constant": True,
|
||||
"inputs": [{"name": "_owner", "type": "address"}],
|
||||
"name": "balanceOf",
|
||||
"outputs": [{"name": "balance", "type": "uint256"}],
|
||||
"type": "function",
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def get_token_balance(token_address, wallet_address, block_number):
|
||||
rpc_url = os.getenv("RPC_URL")
|
||||
|
||||
if rpc_url is None:
|
||||
raise EnvironmentError("RPC_URL environment variable not set")
|
||||
|
||||
web3 = Web3(Web3.HTTPProvider(rpc_url))
|
||||
|
||||
if not web3.isConnected():
|
||||
raise ConnectionError("Failed to connect to the Ethereum node")
|
||||
|
||||
# Check if the token_address is a native token alias
|
||||
if token_address.lower() in native_aliases:
|
||||
balance = web3.eth.get_balance(
|
||||
Web3.toChecksumAddress(wallet_address), block_identifier=block_number
|
||||
)
|
||||
else:
|
||||
contract = web3.eth.contract(
|
||||
address=Web3.toChecksumAddress(token_address), abi=erc20_abi
|
||||
)
|
||||
balance = contract.functions.balanceOf(
|
||||
Web3.toChecksumAddress(wallet_address)
|
||||
).call(block_identifier=block_number)
|
||||
|
||||
return balance
|
||||
|
||||
|
||||
def get_block_header(block_number):
|
||||
rpc_url = os.getenv("RPC_URL")
|
||||
|
||||
if rpc_url is None:
|
||||
raise EnvironmentError("RPC_URL environment variable not set")
|
||||
|
||||
web3 = Web3(Web3.HTTPProvider(rpc_url))
|
||||
|
||||
if not web3.isConnected():
|
||||
raise ConnectionError("Failed to connect to the Ethereum node")
|
||||
|
||||
block = web3.eth.get_block(block_number)
|
||||
|
||||
return block
|
||||
282
testing/src/runner/runner.py
Normal file
282
testing/src/runner/runner.py
Normal file
@@ -0,0 +1,282 @@
|
||||
import itertools
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
import traceback
|
||||
|
||||
import yaml
|
||||
from pydantic import BaseModel
|
||||
from tycho_client.decoders import ThirdPartyPoolTychoDecoder
|
||||
from tycho_client.models import Blockchain, EVMBlock
|
||||
from tycho_client.tycho_adapter import TychoPoolStateStreamAdapter
|
||||
|
||||
from evm import get_token_balance, get_block_header
|
||||
from tycho import TychoRunner, TychoRPCClient
|
||||
|
||||
|
||||
class TestResult:
|
||||
def __init__(self, success: bool, message: str = None):
|
||||
self.success = success
|
||||
self.message = message
|
||||
|
||||
@classmethod
|
||||
def Passed(cls):
|
||||
return cls(success=True)
|
||||
|
||||
@classmethod
|
||||
def Failed(cls, message: str):
|
||||
return cls(success=False, message=message)
|
||||
|
||||
|
||||
def load_config(yaml_path: str) -> dict:
|
||||
"""Load YAML configuration from a specified file path."""
|
||||
with open(yaml_path, "r") as file:
|
||||
return yaml.safe_load(file)
|
||||
|
||||
|
||||
class SimulationFailure(BaseModel):
|
||||
pool_id: str
|
||||
sell_token: str
|
||||
buy_token: str
|
||||
error: str
|
||||
|
||||
|
||||
class TestRunner:
|
||||
def __init__(self, package: str, with_binary_logs: bool, db_url: str, vm_traces: bool):
|
||||
self.repo_root = os.getcwd()
|
||||
config_path = os.path.join(self.repo_root, "substreams", package, "test_assets.yaml")
|
||||
self.config = load_config(config_path)
|
||||
self.spkg_src = os.path.join(self.repo_root, "substreams", package)
|
||||
self.adapters_src = os.path.join(self.repo_root, "evm")
|
||||
self.tycho_runner = TychoRunner(db_url, with_binary_logs, self.config["initialized_accounts"])
|
||||
self.tycho_rpc_client = TychoRPCClient()
|
||||
self.db_url = db_url
|
||||
self._vm_traces = vm_traces
|
||||
self._chain = Blockchain.ethereum
|
||||
|
||||
def run_tests(self) -> None:
|
||||
"""Run all tests specified in the configuration."""
|
||||
print(f"Running tests ...")
|
||||
for test in self.config["tests"]:
|
||||
self.tycho_runner.empty_database(self.db_url)
|
||||
|
||||
spkg_path = self.build_spkg(
|
||||
os.path.join(self.spkg_src, self.config["substreams_yaml_path"]),
|
||||
lambda data: self.update_initial_block(data, test["start_block"]),
|
||||
)
|
||||
self.tycho_runner.run_tycho(
|
||||
spkg_path,
|
||||
test["start_block"],
|
||||
test["stop_block"],
|
||||
self.config["protocol_type_names"],
|
||||
test.get("initialized_accounts", []),
|
||||
)
|
||||
|
||||
result = self.tycho_runner.run_with_rpc_server(
|
||||
self.validate_state, test["expected_state"], test["stop_block"]
|
||||
)
|
||||
|
||||
if result.success:
|
||||
print(f"✅ {test['name']} passed.")
|
||||
|
||||
else:
|
||||
print(f"❗️ {test['name']} failed: {result.message}")
|
||||
|
||||
def validate_state(self, expected_state: dict, stop_block: int) -> TestResult:
|
||||
"""Validate the current protocol state against the expected state."""
|
||||
protocol_components = self.tycho_rpc_client.get_protocol_components()
|
||||
protocol_states = self.tycho_rpc_client.get_protocol_state()
|
||||
components = {
|
||||
component["id"]: component
|
||||
for component in protocol_components["protocol_components"]
|
||||
}
|
||||
|
||||
try:
|
||||
for expected_component in expected_state.get("protocol_components", []):
|
||||
comp_id = expected_component["id"].lower()
|
||||
if comp_id not in components:
|
||||
return TestResult.Failed(
|
||||
f"'{comp_id}' not found in protocol components."
|
||||
)
|
||||
|
||||
component = components[comp_id]
|
||||
for key, value in expected_component.items():
|
||||
if key not in ["tokens", "static_attributes", "creation_tx"]:
|
||||
continue
|
||||
if key not in component:
|
||||
return TestResult.Failed(
|
||||
f"Missing '{key}' in component '{comp_id}'."
|
||||
)
|
||||
if isinstance(value, list):
|
||||
if set(map(str.lower, value)) != set(
|
||||
map(str.lower, component[key])
|
||||
):
|
||||
return TestResult.Failed(
|
||||
f"List mismatch for key '{key}': {value} != {component[key]}"
|
||||
)
|
||||
elif value is not None and value.lower() != component[key]:
|
||||
return TestResult.Failed(
|
||||
f"Value mismatch for key '{key}': {value} != {component[key]}"
|
||||
)
|
||||
|
||||
token_balances: dict[str, dict[str, int]] = defaultdict(dict)
|
||||
for component in protocol_components["protocol_components"]:
|
||||
comp_id = component["id"].lower()
|
||||
for token in component["tokens"]:
|
||||
token_lower = token.lower()
|
||||
state = next(
|
||||
(
|
||||
s
|
||||
for s in protocol_states["states"]
|
||||
if s["component_id"].lower() == comp_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if state:
|
||||
balance_hex = state["balances"].get(token_lower, "0x0")
|
||||
else:
|
||||
balance_hex = "0x0"
|
||||
tycho_balance = int(balance_hex, 16)
|
||||
token_balances[comp_id][token_lower] = tycho_balance
|
||||
|
||||
if self.config["skip_balance_check"] is not True:
|
||||
node_balance = get_token_balance(token, comp_id, stop_block)
|
||||
if node_balance != tycho_balance:
|
||||
return TestResult.Failed(
|
||||
f"Balance mismatch for {comp_id}:{token} at block {stop_block}: got {node_balance} "
|
||||
f"from rpc call and {tycho_balance} from Substreams"
|
||||
)
|
||||
contract_states = self.tycho_rpc_client.get_contract_state()
|
||||
filtered_components = {'protocol_components': [pc for pc in protocol_components["protocol_components"] if
|
||||
pc["id"] in [c["id"].lower() for c in
|
||||
expected_state["protocol_components"] if
|
||||
c.get("skip_simulation", False) is False]]}
|
||||
simulation_failures = self.simulate_get_amount_out(
|
||||
stop_block,
|
||||
protocol_states,
|
||||
filtered_components,
|
||||
contract_states,
|
||||
)
|
||||
if len(simulation_failures):
|
||||
error_msgs = []
|
||||
for pool_id, failures in simulation_failures.items():
|
||||
failures_ = [
|
||||
f"{f.sell_token} -> {f.buy_token}: {f.error}" for f in failures
|
||||
]
|
||||
error_msgs.append(
|
||||
f"Pool {pool_id} failed simulations: {', '.join(failures_)}"
|
||||
)
|
||||
raise ValueError(". ".join(error_msgs))
|
||||
|
||||
return TestResult.Passed()
|
||||
except Exception as e:
|
||||
error_message = f"An error occurred: {str(e)}\n" + traceback.format_exc()
|
||||
return TestResult.Failed(error_message)
|
||||
|
||||
def simulate_get_amount_out(
|
||||
self,
|
||||
block_number: int,
|
||||
protocol_states: dict,
|
||||
protocol_components: dict,
|
||||
contract_state: dict,
|
||||
) -> dict[str, list[SimulationFailure]]:
|
||||
protocol_type_names = self.config["protocol_type_names"]
|
||||
|
||||
block_header = get_block_header(block_number)
|
||||
block: EVMBlock = EVMBlock(
|
||||
id=block_number,
|
||||
ts=datetime.fromtimestamp(block_header.timestamp),
|
||||
hash_=block_header.hash.hex(),
|
||||
)
|
||||
|
||||
failed_simulations: dict[str, list[SimulationFailure]] = dict()
|
||||
for protocol in protocol_type_names:
|
||||
adapter_contract = os.path.join(
|
||||
self.adapters_src, "out", f"{self.config['adapter_contract']}.sol",
|
||||
f"{self.config['adapter_contract']}.evm.runtime"
|
||||
)
|
||||
decoder = ThirdPartyPoolTychoDecoder(adapter_contract, 0, trace=self._vm_traces)
|
||||
stream_adapter = TychoPoolStateStreamAdapter(
|
||||
tycho_url="0.0.0.0:4242",
|
||||
protocol=protocol,
|
||||
decoder=decoder,
|
||||
blockchain=self._chain,
|
||||
)
|
||||
snapshot_message = stream_adapter.build_snapshot_message(
|
||||
protocol_components, protocol_states, contract_state
|
||||
)
|
||||
decoded = stream_adapter.process_snapshot(block, snapshot_message)
|
||||
|
||||
for pool_state in decoded.pool_states.values():
|
||||
pool_id = pool_state.id_
|
||||
if not pool_state.balances:
|
||||
raise ValueError(f"Missing balances for pool {pool_id}")
|
||||
for sell_token, buy_token in itertools.permutations(
|
||||
pool_state.tokens, 2
|
||||
):
|
||||
# Try to sell 0.1% of the protocol balance
|
||||
sell_amount = Decimal("0.001") * pool_state.balances[sell_token.address]
|
||||
try:
|
||||
amount_out, gas_used, _ = pool_state.get_amount_out(
|
||||
sell_token, sell_amount, buy_token
|
||||
)
|
||||
print(
|
||||
f"Amount out for {pool_id}: {sell_amount} {sell_token} -> {amount_out} {buy_token} - "
|
||||
f"Gas used: {gas_used}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Error simulating get_amount_out for {pool_id}: {sell_token} -> {buy_token}. "
|
||||
f"Error: {e}"
|
||||
)
|
||||
if pool_id not in failed_simulations:
|
||||
failed_simulations[pool_id] = []
|
||||
failed_simulations[pool_id].append(
|
||||
SimulationFailure(
|
||||
pool_id=pool_id,
|
||||
sell_token=str(sell_token),
|
||||
buy_token=str(buy_token),
|
||||
error=str(e),
|
||||
)
|
||||
)
|
||||
continue
|
||||
return failed_simulations
|
||||
|
||||
@staticmethod
|
||||
def build_spkg(yaml_file_path: str, modify_func: callable) -> str:
|
||||
"""Build a Substreams package with modifications to the YAML file."""
|
||||
backup_file_path = f"{yaml_file_path}.backup"
|
||||
shutil.copy(yaml_file_path, backup_file_path)
|
||||
|
||||
with open(yaml_file_path, "r") as file:
|
||||
data = yaml.safe_load(file)
|
||||
|
||||
modify_func(data)
|
||||
spkg_name = f"{yaml_file_path.rsplit('/', 1)[0]}/{data['package']['name'].replace('_', '-', 1)}-{data['package']['version']}.spkg"
|
||||
|
||||
with open(yaml_file_path, "w") as file:
|
||||
yaml.dump(data, file, default_flow_style=False)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["substreams", "pack", yaml_file_path], capture_output=True, text=True
|
||||
)
|
||||
if result.returncode != 0:
|
||||
print("Substreams pack command failed:", result.stderr)
|
||||
except Exception as e:
|
||||
print(f"Error running substreams pack command: {e}")
|
||||
|
||||
shutil.copy(backup_file_path, yaml_file_path)
|
||||
Path(backup_file_path).unlink()
|
||||
|
||||
return spkg_name
|
||||
|
||||
@staticmethod
|
||||
def update_initial_block(data: dict, start_block: int) -> None:
|
||||
"""Update the initial block for all modules in the configuration data."""
|
||||
for module in data["modules"]:
|
||||
module["initialBlock"] = start_block
|
||||
@@ -1,35 +1,103 @@
|
||||
import signal
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
import requests
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
import psycopg2
|
||||
import requests
|
||||
from psycopg2 import sql
|
||||
|
||||
binary_path = "./testing/tycho-indexer"
|
||||
import os
|
||||
|
||||
|
||||
def find_binary_file(file_name):
|
||||
# Define usual locations for binary files in Unix-based systems
|
||||
locations = [
|
||||
"/bin",
|
||||
"/sbin",
|
||||
"/usr/bin",
|
||||
"/usr/sbin",
|
||||
"/usr/local/bin",
|
||||
"/usr/local/sbin",
|
||||
]
|
||||
|
||||
# Add user's local bin directory if it exists
|
||||
home = os.path.expanduser("~")
|
||||
if os.path.exists(home + "/.local/bin"):
|
||||
locations.append(home + "/.local/bin")
|
||||
|
||||
# Check each location
|
||||
for location in locations:
|
||||
potential_path = location + "/" + file_name
|
||||
if os.path.exists(potential_path):
|
||||
return potential_path
|
||||
|
||||
# If binary is not found in the usual locations, return None
|
||||
raise RuntimeError("Unable to locate tycho-indexer binary")
|
||||
|
||||
|
||||
binary_path = find_binary_file("tycho-indexer")
|
||||
|
||||
|
||||
class TychoRPCClient:
|
||||
def __init__(self, rpc_url: str = "http://0.0.0.0:4242"):
|
||||
self.rpc_url = rpc_url
|
||||
|
||||
def get_protocol_components(self) -> dict:
|
||||
"""Retrieve protocol components from the RPC server."""
|
||||
url = self.rpc_url + "/v1/ethereum/protocol_components"
|
||||
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
||||
data = {"protocol_system": "test_protocol"}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
return response.json()
|
||||
|
||||
def get_protocol_state(self) -> dict:
|
||||
"""Retrieve protocol state from the RPC server."""
|
||||
url = self.rpc_url + "/v1/ethereum/protocol_state"
|
||||
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
||||
data = {}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
return response.json()
|
||||
|
||||
def get_contract_state(self) -> dict:
|
||||
"""Retrieve contract state from the RPC server."""
|
||||
url = self.rpc_url + "/v1/ethereum/contract_state?include_balances=false"
|
||||
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
||||
data = {}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
return response.json()
|
||||
|
||||
|
||||
class TychoRunner:
|
||||
def __init__(self, with_binary_logs: bool = False):
|
||||
def __init__(self, db_url: str, with_binary_logs: bool = False, initialized_accounts: list[str] = None):
|
||||
self.with_binary_logs = with_binary_logs
|
||||
self._db_url = db_url
|
||||
self._initialized_accounts = initialized_accounts or []
|
||||
|
||||
def run_tycho(
|
||||
self,
|
||||
spkg_path: str,
|
||||
start_block: int,
|
||||
end_block: int,
|
||||
protocol_type_names: list,
|
||||
self,
|
||||
spkg_path: str,
|
||||
start_block: int,
|
||||
end_block: int,
|
||||
protocol_type_names: list,
|
||||
initialized_accounts: list,
|
||||
) -> None:
|
||||
"""Run the Tycho indexer with the specified SPKG and block range."""
|
||||
|
||||
env = os.environ.copy()
|
||||
env["RUST_LOG"] = "info"
|
||||
env["RUST_LOG"] = "tycho_indexer=info"
|
||||
|
||||
all_accounts = self._initialized_accounts + initialized_accounts
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
[
|
||||
binary_path,
|
||||
"--database-url",
|
||||
self._db_url,
|
||||
"run",
|
||||
"--spkg",
|
||||
spkg_path,
|
||||
@@ -40,15 +108,16 @@ class TychoRunner:
|
||||
"--start-block",
|
||||
str(start_block),
|
||||
"--stop-block",
|
||||
str(end_block + 2),
|
||||
], # +2 is to make up for the cache in the index side.
|
||||
# +2 is to make up for the cache in the index side.
|
||||
str(end_block + 2)
|
||||
] + (["--initialized-accounts", ",".join(all_accounts)] if all_accounts else []),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
env=env,
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
with process.stdout:
|
||||
for line in iter(process.stdout.readline, ""):
|
||||
if line and self.with_binary_logs:
|
||||
@@ -80,7 +149,12 @@ class TychoRunner:
|
||||
env["RUST_LOG"] = "info"
|
||||
|
||||
process = subprocess.Popen(
|
||||
[binary_path, "rpc"],
|
||||
[
|
||||
binary_path,
|
||||
"--database-url",
|
||||
self._db_url,
|
||||
"rpc"
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
@@ -126,36 +200,16 @@ class TychoRunner:
|
||||
if rpc_thread.is_alive():
|
||||
rpc_thread.join()
|
||||
|
||||
@staticmethod
|
||||
def get_protocol_components() -> dict:
|
||||
"""Retrieve protocol components from the RPC server."""
|
||||
url = "http://0.0.0.0:4242/v1/ethereum/protocol_components"
|
||||
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
||||
data = {"protocol_system": "test_protocol"}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
return response.json()
|
||||
|
||||
@staticmethod
|
||||
def get_protocol_state() -> dict:
|
||||
"""Retrieve protocol state from the RPC server."""
|
||||
url = "http://0.0.0.0:4242/v1/ethereum/protocol_state"
|
||||
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
||||
data = {}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
return response.json()
|
||||
|
||||
@staticmethod
|
||||
def empty_database(db_url: str) -> None:
|
||||
"""Drop and recreate the Tycho indexer database."""
|
||||
try:
|
||||
conn = psycopg2.connect(db_url)
|
||||
conn = psycopg2.connect(db_url[:db_url.rfind('/')])
|
||||
conn.autocommit = True
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
sql.SQL("DROP DATABASE IF EXISTS {}").format(
|
||||
sql.SQL("DROP DATABASE IF EXISTS {} WITH (FORCE)").format(
|
||||
sql.Identifier("tycho_indexer_0")
|
||||
)
|
||||
)
|
||||
3
testing/tycho-client/MANIFEST.in
Normal file
3
testing/tycho-client/MANIFEST.in
Normal file
@@ -0,0 +1,3 @@
|
||||
include wheels/*.whl
|
||||
include tycho_client/assets/*
|
||||
include tycho_client/bins/*
|
||||
38
testing/tycho-client/README.md
Normal file
38
testing/tycho-client/README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Tycho Adapter
|
||||
|
||||
This repository contains the Tycho Adapter, a tool that allows you to interact with the Tycho API.
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.9
|
||||
|
||||
### Install with pip
|
||||
|
||||
```shell
|
||||
# Create conda environment
|
||||
conda create -n tycho pip python=3.9
|
||||
# Activate environment
|
||||
conda activate tycho
|
||||
# Install packages
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```python
|
||||
from tycho_client.decoders import ThirdPartyPoolTychoDecoder
|
||||
from tycho_client.models import Blockchain
|
||||
from tycho_client.tycho_adapter import TychoPoolStateStreamAdapter
|
||||
|
||||
decoder = ThirdPartyPoolTychoDecoder(
|
||||
"MyProtocolSwapAdapter.evm.runtime", minimum_gas=0, hard_limit=False
|
||||
)
|
||||
stream_adapter = TychoPoolStateStreamAdapter(
|
||||
tycho_url="0.0.0.0:4242",
|
||||
protocol="my_protocol",
|
||||
decoder=decoder,
|
||||
blockchain=Blockchain.ethereum,
|
||||
)
|
||||
```
|
||||
6
testing/tycho-client/requirements.txt
Normal file
6
testing/tycho-client/requirements.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
requests==2.32.2
|
||||
eth-abi==2.2.0
|
||||
eth-typing==2.3.0
|
||||
eth-utils==1.9.5
|
||||
hexbytes==0.3.1
|
||||
pydantic==2.8.2
|
||||
36
testing/tycho-client/setup.py
Normal file
36
testing/tycho-client/setup.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
|
||||
def read_requirements():
|
||||
with open("requirements.txt") as req:
|
||||
content = req.read()
|
||||
requirements = content.split("\n")
|
||||
return [req for req in requirements if req and not req.startswith("#")]
|
||||
|
||||
|
||||
setup(
|
||||
name="tycho-client",
|
||||
version="0.1.0",
|
||||
author="Propeller Heads",
|
||||
description="A package for interacting with the Tycho API.",
|
||||
long_description=open("README.md").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
packages=find_packages(),
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
],
|
||||
python_requires="~=3.9",
|
||||
install_requires=[
|
||||
"requests==2.32.2",
|
||||
"eth-abi==2.2.0",
|
||||
"eth-typing==2.3.0",
|
||||
"eth-utils==1.9.5",
|
||||
"hexbytes==0.3.1",
|
||||
"pydantic==2.8.2",
|
||||
"protosim_py==0.4.11",
|
||||
],
|
||||
package_data={"tycho-client": ["../wheels/*", "./assets/*", "./bins/*"]},
|
||||
include_package_data=True,
|
||||
)
|
||||
0
testing/tycho-client/tycho_client/__init__.py
Normal file
0
testing/tycho-client/tycho_client/__init__.py
Normal file
211
testing/tycho-client/tycho_client/adapter_contract.py
Normal file
211
testing/tycho-client/tycho_client/adapter_contract.py
Normal file
@@ -0,0 +1,211 @@
|
||||
import logging
|
||||
import time
|
||||
from decimal import Decimal
|
||||
from fractions import Fraction
|
||||
from typing import Any, Union, NamedTuple
|
||||
|
||||
import eth_abi
|
||||
from eth_abi.exceptions import DecodingError
|
||||
from eth_typing import HexStr
|
||||
from eth_utils import keccak
|
||||
from eth_utils.abi import collapse_if_tuple
|
||||
from hexbytes import HexBytes
|
||||
from protosim_py import (
|
||||
SimulationEngine,
|
||||
SimulationParameters,
|
||||
SimulationResult,
|
||||
StateUpdate,
|
||||
)
|
||||
|
||||
from .constants import EXTERNAL_ACCOUNT
|
||||
from .models import Address, EthereumToken, EVMBlock, Capability
|
||||
from .utils import load_abi, maybe_coerce_error
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
TStateOverwrites = dict[Address, dict[int, int]]
|
||||
|
||||
|
||||
class Trade(NamedTuple):
|
||||
"""
|
||||
Trade represents a simple trading operation with fields:
|
||||
received_amount: Amount received from the trade
|
||||
gas_used: Amount of gas used in the transaction
|
||||
price: Price at which the trade was executed
|
||||
"""
|
||||
|
||||
received_amount: float
|
||||
gas_used: float
|
||||
price: float
|
||||
|
||||
|
||||
class ProtoSimResponse:
|
||||
def __init__(self, return_value: Any, simulation_result: SimulationResult):
|
||||
self.return_value = return_value
|
||||
self.simulation_result = simulation_result
|
||||
|
||||
|
||||
class ProtoSimContract:
|
||||
def __init__(self, address: Address, abi_name: str, engine: SimulationEngine):
|
||||
self.abi = load_abi(abi_name)
|
||||
self.address = address
|
||||
self.engine = engine
|
||||
self._default_tx_env = dict(
|
||||
caller=EXTERNAL_ACCOUNT, to=self.address, value=0, overrides={}
|
||||
)
|
||||
functions = [f for f in self.abi if f["type"] == "function"]
|
||||
self._functions = {f["name"]: f for f in functions}
|
||||
if len(self._functions) != len(functions):
|
||||
raise ValueError(
|
||||
f"ProtoSimContract does not support overloaded function names! "
|
||||
f"Encountered while loading {abi_name}."
|
||||
)
|
||||
|
||||
def _encode_input(self, fname: str, args: list) -> bytearray:
|
||||
func = self._functions[fname]
|
||||
types = [collapse_if_tuple(t) for t in func["inputs"]]
|
||||
selector = keccak(text=f"{fname}({','.join(types)})")[:4]
|
||||
return bytearray(selector + eth_abi.encode(types, args))
|
||||
|
||||
def _decode_output(self, fname: str, encoded: list[int]) -> Any:
|
||||
func = self._functions[fname]
|
||||
types = [collapse_if_tuple(t) for t in func["outputs"]]
|
||||
return eth_abi.decode(types, bytearray(encoded))
|
||||
|
||||
def call(
|
||||
self,
|
||||
fname: str,
|
||||
*args: list[Union[int, str, bool, bytes]],
|
||||
block_number,
|
||||
timestamp: int = None,
|
||||
overrides: TStateOverwrites = None,
|
||||
caller: Address = EXTERNAL_ACCOUNT,
|
||||
value: int = 0,
|
||||
) -> ProtoSimResponse:
|
||||
call_data = self._encode_input(fname, *args)
|
||||
params = SimulationParameters(
|
||||
data=call_data,
|
||||
to=self.address,
|
||||
block_number=block_number,
|
||||
timestamp=timestamp or int(time.time()),
|
||||
overrides=overrides or {},
|
||||
caller=caller,
|
||||
value=value,
|
||||
)
|
||||
sim_result = self._simulate(params)
|
||||
try:
|
||||
output = self._decode_output(fname, sim_result.result)
|
||||
except DecodingError:
|
||||
log.warning("Failed to decode output")
|
||||
output = None
|
||||
return ProtoSimResponse(output, sim_result)
|
||||
|
||||
def _simulate(self, params: SimulationParameters) -> "SimulationResult":
|
||||
"""Run simulation and handle errors.
|
||||
|
||||
It catches a RuntimeError:
|
||||
|
||||
- if it's ``Execution reverted``, re-raises a RuntimeError
|
||||
with a Tenderly link added
|
||||
- if it's ``Out of gas``, re-raises a RecoverableSimulationException
|
||||
- otherwise it just re-raises the original error.
|
||||
"""
|
||||
try:
|
||||
simulation_result = self.engine.run_sim(params)
|
||||
return simulation_result
|
||||
except RuntimeError as err:
|
||||
try:
|
||||
coerced_err = maybe_coerce_error(err, self, params.gas_limit)
|
||||
except Exception:
|
||||
log.exception("Couldn't coerce error. Re-raising the original one.")
|
||||
raise err
|
||||
msg = str(coerced_err)
|
||||
if "Revert!" in msg:
|
||||
raise type(coerced_err)(msg, repr(self)) from err
|
||||
else:
|
||||
raise coerced_err
|
||||
|
||||
|
||||
class AdapterContract(ProtoSimContract):
|
||||
"""
|
||||
The AdapterContract provides an interface to interact with the protocols implemented
|
||||
by third parties using the `propeller-protocol-lib`.
|
||||
"""
|
||||
|
||||
def __init__(self, address: Address, engine: SimulationEngine):
|
||||
super().__init__(address, "ISwapAdapter", engine)
|
||||
|
||||
def price(
|
||||
self,
|
||||
pair_id: HexStr,
|
||||
sell_token: EthereumToken,
|
||||
buy_token: EthereumToken,
|
||||
amounts: list[int],
|
||||
block: EVMBlock,
|
||||
overwrites: TStateOverwrites = None,
|
||||
) -> list[Fraction]:
|
||||
args = [HexBytes(pair_id), sell_token.address, buy_token.address, amounts]
|
||||
res = self.call(
|
||||
"price",
|
||||
args,
|
||||
block_number=block.id,
|
||||
timestamp=int(block.ts.timestamp()),
|
||||
overrides=overwrites,
|
||||
)
|
||||
return list(map(lambda x: Fraction(*x), res.return_value[0]))
|
||||
|
||||
def swap(
|
||||
self,
|
||||
pair_id: HexStr,
|
||||
sell_token: EthereumToken,
|
||||
buy_token: EthereumToken,
|
||||
is_buy: bool,
|
||||
amount: Decimal,
|
||||
block: EVMBlock,
|
||||
overwrites: TStateOverwrites = None,
|
||||
) -> tuple[Trade, dict[str, StateUpdate]]:
|
||||
args = [
|
||||
HexBytes(pair_id),
|
||||
sell_token.address,
|
||||
buy_token.address,
|
||||
int(is_buy),
|
||||
amount,
|
||||
]
|
||||
res = self.call(
|
||||
"swap",
|
||||
args,
|
||||
block_number=block.id,
|
||||
timestamp=int(block.ts.timestamp()),
|
||||
overrides=overwrites,
|
||||
)
|
||||
amount, gas, price = res.return_value[0]
|
||||
return Trade(amount, gas, Fraction(*price)), res.simulation_result.state_updates
|
||||
|
||||
def get_limits(
|
||||
self,
|
||||
pair_id: HexStr,
|
||||
sell_token: EthereumToken,
|
||||
buy_token: EthereumToken,
|
||||
block: EVMBlock,
|
||||
overwrites: TStateOverwrites = None,
|
||||
) -> tuple[int, int]:
|
||||
args = [HexBytes(pair_id), sell_token.address, buy_token.address]
|
||||
res = self.call(
|
||||
"getLimits",
|
||||
args,
|
||||
block_number=block.id,
|
||||
timestamp=int(block.ts.timestamp()),
|
||||
overrides=overwrites,
|
||||
)
|
||||
return res.return_value[0]
|
||||
|
||||
def get_capabilities(
|
||||
self, pair_id: HexStr, sell_token: EthereumToken, buy_token: EthereumToken
|
||||
) -> set[Capability]:
|
||||
args = [HexBytes(pair_id), sell_token.address, buy_token.address]
|
||||
res = self.call("getCapabilities", args, block_number=1)
|
||||
return set(map(Capability, res.return_value[0]))
|
||||
|
||||
def min_gas_usage(self) -> int:
|
||||
res = self.call("minGasUsage", [], block_number=1)
|
||||
return res.return_value[0]
|
||||
BIN
testing/tycho-client/tycho_client/assets/ERC20.bin
Normal file
BIN
testing/tycho-client/tycho_client/assets/ERC20.bin
Normal file
Binary file not shown.
78
testing/tycho-client/tycho_client/assets/IERC20.sol
Normal file
78
testing/tycho-client/tycho_client/assets/IERC20.sol
Normal file
@@ -0,0 +1,78 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
// OpenZeppelin Contracts (last updated v4.9.0) (token/ERC20/IERC20.sol)
|
||||
|
||||
pragma solidity ^0.8.19;
|
||||
|
||||
/**
|
||||
* @dev Interface of the ERC20 standard as defined in the EIP.
|
||||
*/
|
||||
interface IERC20 {
|
||||
/**
|
||||
* @dev Emitted when `value` tokens are moved from one account (`from`) to
|
||||
* another (`to`).
|
||||
*
|
||||
* Note that `value` may be zero.
|
||||
*/
|
||||
event Transfer(address indexed from, address indexed to, uint256 value);
|
||||
|
||||
/**
|
||||
* @dev Emitted when the allowance of a `spender` for an `owner` is set by
|
||||
* a call to {approve}. `value` is the new allowance.
|
||||
*/
|
||||
event Approval(address indexed owner, address indexed spender, uint256 value);
|
||||
|
||||
/**
|
||||
* @dev Returns the amount of tokens in existence.
|
||||
*/
|
||||
function totalSupply() external view returns (uint256);
|
||||
|
||||
/**
|
||||
* @dev Returns the amount of tokens owned by `account`.
|
||||
*/
|
||||
function balanceOf(address account) external view returns (uint256);
|
||||
|
||||
/**
|
||||
* @dev Moves `amount` tokens from the caller's account to `to`.
|
||||
*
|
||||
* Returns a boolean value indicating whether the operation succeeded.
|
||||
*
|
||||
* Emits a {Transfer} event.
|
||||
*/
|
||||
function transfer(address to, uint256 amount) external returns (bool);
|
||||
|
||||
/**
|
||||
* @dev Returns the remaining number of tokens that `spender` will be
|
||||
* allowed to spend on behalf of `owner` through {transferFrom}. This is
|
||||
* zero by default.
|
||||
*
|
||||
* This value changes when {approve} or {transferFrom} are called.
|
||||
*/
|
||||
function allowance(address owner, address spender) external view returns (uint256);
|
||||
|
||||
/**
|
||||
* @dev Sets `amount` as the allowance of `spender` over the caller's tokens.
|
||||
*
|
||||
* Returns a boolean value indicating whether the operation succeeded.
|
||||
*
|
||||
* IMPORTANT: Beware that changing an allowance with this method brings the risk
|
||||
* that someone may use both the old and the new allowance by unfortunate
|
||||
* transaction ordering. One possible solution to mitigate this race
|
||||
* condition is to first reduce the spender's allowance to 0 and set the
|
||||
* desired value afterwards:
|
||||
* https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729
|
||||
*
|
||||
* Emits an {Approval} event.
|
||||
*/
|
||||
function approve(address spender, uint256 amount) external returns (bool);
|
||||
|
||||
/**
|
||||
* @dev Moves `amount` tokens from `from` to `to` using the
|
||||
* allowance mechanism. `amount` is then deducted from the caller's
|
||||
* allowance.
|
||||
*
|
||||
* Returns a boolean value indicating whether the operation succeeded.
|
||||
*
|
||||
* Emits a {Transfer} event.
|
||||
*/
|
||||
function transferFrom(address from, address to, uint256 amount) external returns (bool);
|
||||
}
|
||||
250
testing/tycho-client/tycho_client/assets/ISwapAdapter.abi
Normal file
250
testing/tycho-client/tycho_client/assets/ISwapAdapter.abi
Normal file
@@ -0,0 +1,250 @@
|
||||
[
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "limit",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "LimitExceeded",
|
||||
"type": "error"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "string",
|
||||
"name": "reason",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"name": "NotImplemented",
|
||||
"type": "error"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "string",
|
||||
"name": "reason",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"name": "Unavailable",
|
||||
"type": "error"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "poolId",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "sellToken",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "buyToken",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "getCapabilities",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "enum ISwapAdapterTypes.Capability[]",
|
||||
"name": "capabilities",
|
||||
"type": "uint8[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "poolId",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "sellToken",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "buyToken",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "getLimits",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "limits",
|
||||
"type": "uint256[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "offset",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "limit",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "getPoolIds",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bytes32[]",
|
||||
"name": "ids",
|
||||
"type": "bytes32[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "poolId",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "getTokens",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "contract IERC20[]",
|
||||
"name": "tokens",
|
||||
"type": "address[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "poolId",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "sellToken",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "buyToken",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "specifiedAmounts",
|
||||
"type": "uint256[]"
|
||||
}
|
||||
],
|
||||
"name": "price",
|
||||
"outputs": [
|
||||
{
|
||||
"components": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "numerator",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "denominator",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"internalType": "struct ISwapAdapterTypes.Fraction[]",
|
||||
"name": "prices",
|
||||
"type": "tuple[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "poolId",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "sellToken",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "buyToken",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "enum ISwapAdapterTypes.OrderSide",
|
||||
"name": "side",
|
||||
"type": "uint8"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "specifiedAmount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "swap",
|
||||
"outputs": [
|
||||
{
|
||||
"components": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "calculatedAmount",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "gasUsed",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"components": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "numerator",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "denominator",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"internalType": "struct ISwapAdapterTypes.Fraction",
|
||||
"name": "price",
|
||||
"type": "tuple"
|
||||
}
|
||||
],
|
||||
"internalType": "struct ISwapAdapterTypes.Trade",
|
||||
"name": "trade",
|
||||
"type": "tuple"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
363
testing/tycho-client/tycho_client/assets/mocked_ERC20.sol
Normal file
363
testing/tycho-client/tycho_client/assets/mocked_ERC20.sol
Normal file
@@ -0,0 +1,363 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
// OpenZeppelin Contracts (last updated v4.9.0) (token/ERC20/ERC20.sol)
|
||||
|
||||
pragma solidity ^0.8.19;
|
||||
|
||||
import "./IERC20.sol";
|
||||
|
||||
|
||||
/**
|
||||
* @dev Provides information about the current execution context, including the
|
||||
* sender of the transaction and its data. While these are generally available
|
||||
* via msg.sender and msg.data, they should not be accessed in such a direct
|
||||
* manner, since when dealing with meta-transactions the account sending and
|
||||
* paying for execution may not be the actual sender (as far as an application
|
||||
* is concerned).
|
||||
*
|
||||
* This contract is only required for intermediate, library-like contracts.
|
||||
*/
|
||||
abstract contract Context {
|
||||
function _msgSender() internal view virtual returns (address) {
|
||||
return msg.sender;
|
||||
}
|
||||
|
||||
function _msgData() internal view virtual returns (bytes calldata) {
|
||||
return msg.data;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Interface for the optional metadata functions from the ERC20 standard.
|
||||
*
|
||||
* _Available since v4.1._
|
||||
*/
|
||||
interface IERC20Metadata is IERC20 {
|
||||
/**
|
||||
* @dev Returns the name of the token.
|
||||
*/
|
||||
function name() external view returns (string memory);
|
||||
|
||||
/**
|
||||
* @dev Returns the symbol of the token.
|
||||
*/
|
||||
function symbol() external view returns (string memory);
|
||||
|
||||
/**
|
||||
* @dev Returns the decimals places of the token.
|
||||
*/
|
||||
function decimals() external view returns (uint8);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Implementation of the {IERC20} interface.
|
||||
*
|
||||
* This implementation is agnostic to the way tokens are created. This means
|
||||
* that a supply mechanism has to be added in a derived contract using {_mint}.
|
||||
*
|
||||
* TIP: For a detailed writeup see our guide
|
||||
* https://forum.openzeppelin.com/t/how-to-implement-erc20-supply-mechanisms/226[How
|
||||
* to implement supply mechanisms].
|
||||
*
|
||||
* The default value of {decimals} is 18. To change this, you should override
|
||||
* this function so it returns a different value.
|
||||
*
|
||||
* We have followed general OpenZeppelin Contracts guidelines: functions revert
|
||||
* instead returning `false` on failure. This behavior is nonetheless
|
||||
* conventional and does not conflict with the expectations of ERC20
|
||||
* applications.
|
||||
*
|
||||
* Additionally, an {Approval} event is emitted on calls to {transferFrom}.
|
||||
* This allows applications to reconstruct the allowance for all accounts just
|
||||
* by listening to said events. Other implementations of the EIP may not emit
|
||||
* these events, as it isn't required by the specification.
|
||||
*
|
||||
* Finally, the non-standard {decreaseAllowance} and {increaseAllowance}
|
||||
* functions have been added to mitigate the well-known issues around setting
|
||||
* allowances. See {IERC20-approve}.
|
||||
*/
|
||||
contract ERC20 is Context, IERC20, IERC20Metadata {
|
||||
mapping(address => uint256) private _balances;
|
||||
|
||||
mapping(address => mapping(address => uint256)) private _allowances;
|
||||
|
||||
uint256 private _totalSupply;
|
||||
|
||||
string private _name;
|
||||
string private _symbol;
|
||||
uint8 private _decimals;
|
||||
|
||||
/**
|
||||
* @dev Sets the values for {name}, {symbol} and {decimals}.
|
||||
*
|
||||
* All three of these values are immutable: they can only be set once during
|
||||
* construction.
|
||||
*/
|
||||
constructor(string memory name_, string memory symbol_, uint8 decimals_) {
|
||||
_name = name_;
|
||||
_symbol = symbol_;
|
||||
_decimals = decimals_;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the name of the token.
|
||||
*/
|
||||
function name() public view virtual returns (string memory) {
|
||||
return _name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the symbol of the token, usually a shorter version of the
|
||||
* name.
|
||||
*/
|
||||
function symbol() public view virtual returns (string memory) {
|
||||
return _symbol;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the number of decimals used to get its user representation.
|
||||
* For example, if `decimals` equals `2`, a balance of `505` tokens should
|
||||
* be displayed to a user as `5.05` (`505 / 10 ** 2`).
|
||||
*
|
||||
* Tokens usually opt for a value of 18, imitating the relationship between
|
||||
* Ether and Wei. This is the default value returned by this function, unless
|
||||
* it's overridden.
|
||||
*
|
||||
* NOTE: This information is only used for _display_ purposes: it in
|
||||
* no way affects any of the arithmetic of the contract, including
|
||||
* {IERC20-balanceOf} and {IERC20-transfer}.
|
||||
*/
|
||||
function decimals() public view virtual returns (uint8) {
|
||||
return _decimals;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev See {IERC20-totalSupply}.
|
||||
*/
|
||||
function totalSupply() public view virtual returns (uint256) {
|
||||
return _totalSupply;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev See {IERC20-balanceOf}.
|
||||
*/
|
||||
function balanceOf(address account) public view virtual returns (uint256) {
|
||||
return _balances[account];
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev See {IERC20-transfer}.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - `to` cannot be the zero address.
|
||||
* - the caller must have a balance of at least `amount`.
|
||||
*/
|
||||
function transfer(address to, uint256 amount) public virtual returns (bool) {
|
||||
address owner = _msgSender();
|
||||
_transfer(owner, to, amount);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev See {IERC20-allowance}.
|
||||
*/
|
||||
function allowance(address owner, address spender) public view virtual returns (uint256) {
|
||||
return _allowances[owner][spender];
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev See {IERC20-approve}.
|
||||
*
|
||||
* NOTE: If `amount` is the maximum `uint256`, the allowance is not updated on
|
||||
* `transferFrom`. This is semantically equivalent to an infinite approval.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - `spender` cannot be the zero address.
|
||||
*/
|
||||
function approve(address spender, uint256 amount) public virtual returns (bool) {
|
||||
address owner = _msgSender();
|
||||
_approve(owner, spender, amount);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev See {IERC20-transferFrom}.
|
||||
*
|
||||
* Emits an {Approval} event indicating the updated allowance. This is not
|
||||
* required by the EIP. See the note at the beginning of {ERC20}.
|
||||
*
|
||||
* NOTE: Does not update the allowance if the current allowance
|
||||
* is the maximum `uint256`.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - `from` and `to` cannot be the zero address.
|
||||
* - `from` must have a balance of at least `amount`.
|
||||
* - the caller must have allowance for ``from``'s tokens of at least
|
||||
* `amount`.
|
||||
*/
|
||||
function transferFrom(address from, address to, uint256 amount) public virtual returns (bool) {
|
||||
address spender = _msgSender();
|
||||
_spendAllowance(from, spender, amount);
|
||||
_transfer(from, to, amount);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Atomically increases the allowance granted to `spender` by the caller.
|
||||
*
|
||||
* This is an alternative to {approve} that can be used as a mitigation for
|
||||
* problems described in {IERC20-approve}.
|
||||
*
|
||||
* Emits an {Approval} event indicating the updated allowance.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - `spender` cannot be the zero address.
|
||||
*/
|
||||
function increaseAllowance(address spender, uint256 addedValue) public virtual returns (bool) {
|
||||
address owner = _msgSender();
|
||||
_approve(owner, spender, allowance(owner, spender) + addedValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Atomically decreases the allowance granted to `spender` by the caller.
|
||||
*
|
||||
* This is an alternative to {approve} that can be used as a mitigation for
|
||||
* problems described in {IERC20-approve}.
|
||||
*
|
||||
* Emits an {Approval} event indicating the updated allowance.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - `spender` cannot be the zero address.
|
||||
* - `spender` must have allowance for the caller of at least
|
||||
* `subtractedValue`.
|
||||
*/
|
||||
function decreaseAllowance(address spender, uint256 subtractedValue) public virtual returns (bool) {
|
||||
address owner = _msgSender();
|
||||
uint256 currentAllowance = allowance(owner, spender);
|
||||
require(currentAllowance >= subtractedValue, "ERC20: decreased allowance below zero");
|
||||
unchecked {
|
||||
_approve(owner, spender, currentAllowance - subtractedValue);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Moves `amount` of tokens from `from` to `to`.
|
||||
*
|
||||
* This internal function is equivalent to {transfer}, and can be used to
|
||||
* e.g. implement automatic token fees, slashing mechanisms, etc.
|
||||
*
|
||||
* Emits a {Transfer} event.
|
||||
*
|
||||
* NOTE: This function is not virtual, {_update} should be overridden instead.
|
||||
*/
|
||||
function _transfer(address from, address to, uint256 amount) internal {
|
||||
require(from != address(0), "ERC20: transfer from the zero address");
|
||||
require(to != address(0), "ERC20: transfer to the zero address");
|
||||
_update(from, to, amount);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Transfers `amount` of tokens from `from` to `to`, or alternatively mints (or burns) if `from` (or `to`) is
|
||||
* the zero address. All customizations to transfers, mints, and burns should be done by overriding this function.
|
||||
*
|
||||
* Emits a {Transfer} event.
|
||||
*/
|
||||
function _update(address from, address to, uint256 amount) internal virtual {
|
||||
if (from == address(0)) {
|
||||
_totalSupply += amount;
|
||||
} else {
|
||||
uint256 fromBalance = _balances[from];
|
||||
require(fromBalance >= amount, "ERC20: transfer amount exceeds balance");
|
||||
unchecked {
|
||||
// Overflow not possible: amount <= fromBalance <= totalSupply.
|
||||
_balances[from] = fromBalance - amount;
|
||||
}
|
||||
}
|
||||
|
||||
if (to == address(0)) {
|
||||
unchecked {
|
||||
// Overflow not possible: amount <= totalSupply or amount <= fromBalance <= totalSupply.
|
||||
_totalSupply -= amount;
|
||||
}
|
||||
} else {
|
||||
unchecked {
|
||||
// Overflow not possible: balance + amount is at most totalSupply, which we know fits into a uint256.
|
||||
_balances[to] += amount;
|
||||
}
|
||||
}
|
||||
|
||||
emit Transfer(from, to, amount);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Creates `amount` tokens and assigns them to `account`, by transferring it from address(0).
|
||||
* Relies on the `_update` mechanism
|
||||
*
|
||||
* Emits a {Transfer} event with `from` set to the zero address.
|
||||
*
|
||||
* NOTE: This function is not virtual, {_update} should be overridden instead.
|
||||
*/
|
||||
function _mint(address account, uint256 amount) internal {
|
||||
require(account != address(0), "ERC20: mint to the zero address");
|
||||
_update(address(0), account, amount);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Destroys `amount` tokens from `account`, by transferring it to address(0).
|
||||
* Relies on the `_update` mechanism.
|
||||
*
|
||||
* Emits a {Transfer} event with `to` set to the zero address.
|
||||
*
|
||||
* NOTE: This function is not virtual, {_update} should be overridden instead
|
||||
*/
|
||||
function _burn(address account, uint256 amount) internal {
|
||||
require(account != address(0), "ERC20: burn from the zero address");
|
||||
_update(account, address(0), amount);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Sets `amount` as the allowance of `spender` over the `owner` s tokens.
|
||||
*
|
||||
* This internal function is equivalent to `approve`, and can be used to
|
||||
* e.g. set automatic allowances for certain subsystems, etc.
|
||||
*
|
||||
* Emits an {Approval} event.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - `owner` cannot be the zero address.
|
||||
* - `spender` cannot be the zero address.
|
||||
*/
|
||||
function _approve(address owner, address spender, uint256 amount) internal virtual {
|
||||
require(owner != address(0), "ERC20: approve from the zero address");
|
||||
require(spender != address(0), "ERC20: approve to the zero address");
|
||||
|
||||
_allowances[owner][spender] = amount;
|
||||
emit Approval(owner, spender, amount);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Updates `owner` s allowance for `spender` based on spent `amount`.
|
||||
*
|
||||
* Does not update the allowance amount in case of infinite allowance.
|
||||
* Revert if not enough allowance is available.
|
||||
*
|
||||
* Might emit an {Approval} event.
|
||||
*/
|
||||
function _spendAllowance(address owner, address spender, uint256 amount) internal virtual {
|
||||
uint256 currentAllowance = allowance(owner, spender);
|
||||
if (currentAllowance != type(uint256).max) {
|
||||
require(currentAllowance >= amount, "ERC20: insufficient allowance");
|
||||
unchecked {
|
||||
_approve(owner, spender, currentAllowance - amount);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
12
testing/tycho-client/tycho_client/constants.py
Normal file
12
testing/tycho-client/tycho_client/constants.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from pathlib import Path
|
||||
from typing import Final
|
||||
|
||||
ASSETS_FOLDER = Path(__file__).parent / "assets"
|
||||
TYCHO_CLIENT_FOLDER = Path(__file__).parent / "bins"
|
||||
TYCHO_CLIENT_LOG_FOLDER = TYCHO_CLIENT_FOLDER / "logs"
|
||||
|
||||
EXTERNAL_ACCOUNT: Final[str] = "0xf847a638E44186F3287ee9F8cAF73FF4d4B80784"
|
||||
"""This is a dummy address used as a transaction sender"""
|
||||
UINT256_MAX: Final[int] = 2 ** 256 - 1
|
||||
MAX_BALANCE: Final[int] = UINT256_MAX // 2
|
||||
"""0.5 of the maximal possible balance to avoid overflow errors"""
|
||||
164
testing/tycho-client/tycho_client/decoders.py
Normal file
164
testing/tycho-client/tycho_client/decoders.py
Normal file
@@ -0,0 +1,164 @@
|
||||
import time
|
||||
from decimal import Decimal
|
||||
from logging import getLogger
|
||||
from typing import Any
|
||||
|
||||
import eth_abi
|
||||
from eth_utils import keccak
|
||||
from protosim_py import SimulationEngine, SimulationParameters, AccountInfo
|
||||
|
||||
from .constants import EXTERNAL_ACCOUNT
|
||||
from .exceptions import TychoDecodeError
|
||||
from .models import EVMBlock, EthereumToken
|
||||
from .pool_state import ThirdPartyPool
|
||||
from .tycho_db import TychoDBSingleton
|
||||
from .utils import decode_tycho_exchange, get_code_for_address
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
|
||||
class ThirdPartyPoolTychoDecoder:
|
||||
"""ThirdPartyPool decoder for protocol messages from the Tycho feed"""
|
||||
|
||||
def __init__(self, adapter_contract: str, minimum_gas: int, trace: bool):
|
||||
self.adapter_contract = adapter_contract
|
||||
self.minimum_gas = minimum_gas
|
||||
self.trace = trace
|
||||
|
||||
def decode_snapshot(
|
||||
self,
|
||||
snapshot: dict[str, Any],
|
||||
block: EVMBlock,
|
||||
tokens: dict[str, EthereumToken],
|
||||
) -> tuple[dict[str, ThirdPartyPool], list[str]]:
|
||||
pools = {}
|
||||
failed_pools = []
|
||||
for snap in snapshot.values():
|
||||
try:
|
||||
pool = self.decode_pool_state(snap, block, tokens)
|
||||
pools[pool.id_] = pool
|
||||
except TychoDecodeError as e:
|
||||
log.error(f"Failed to decode third party snapshot: {e}")
|
||||
failed_pools.append(snap["component"]["id"])
|
||||
continue
|
||||
|
||||
return pools, failed_pools
|
||||
|
||||
def decode_pool_state(
|
||||
self, snap: dict, block: EVMBlock, tokens: dict[str, EthereumToken]
|
||||
) -> ThirdPartyPool:
|
||||
component = snap["component"]
|
||||
exchange, _ = decode_tycho_exchange(component["protocol_system"])
|
||||
|
||||
try:
|
||||
tokens = tuple(tokens[t] for t in component["tokens"])
|
||||
except KeyError as e:
|
||||
raise TychoDecodeError("Unsupported token", pool_id=component["id"])
|
||||
|
||||
balances = self.decode_balances(snap, tokens)
|
||||
optional_attributes = self.decode_optional_attributes(component, snap, block.id)
|
||||
|
||||
return ThirdPartyPool(
|
||||
id_=optional_attributes.pop("pool_id", component["id"]),
|
||||
tokens=tokens,
|
||||
balances=balances,
|
||||
block=block,
|
||||
spot_prices={},
|
||||
trading_fee=Decimal("0"),
|
||||
exchange=exchange,
|
||||
adapter_contract_name=self.adapter_contract,
|
||||
minimum_gas=self.minimum_gas,
|
||||
trace=self.trace,
|
||||
**optional_attributes,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def decode_optional_attributes(component, snap, block_number):
|
||||
# Handle optional state attributes
|
||||
attributes = snap["state"]["attributes"]
|
||||
balance_owner = attributes.get("balance_owner")
|
||||
stateless_contracts = {}
|
||||
static_attributes = snap["component"]["static_attributes"]
|
||||
pool_id = static_attributes.get("pool_id") or component["id"]
|
||||
|
||||
index = 0
|
||||
while f"stateless_contract_addr_{index}" in static_attributes:
|
||||
encoded_address = static_attributes[f"stateless_contract_addr_{index}"]
|
||||
decoded = bytes.fromhex(
|
||||
encoded_address[2:] if encoded_address.startswith('0x') else encoded_address).decode('utf-8')
|
||||
if decoded.startswith("call"):
|
||||
address = ThirdPartyPoolTychoDecoder.get_address_from_call(block_number, decoded)
|
||||
else:
|
||||
address = decoded
|
||||
|
||||
code = static_attributes.get(f"stateless_contract_code_{index}") or get_code_for_address(address)
|
||||
stateless_contracts[address] = code
|
||||
index += 1
|
||||
|
||||
index = 0
|
||||
while f"stateless_contract_addr_{index}" in attributes:
|
||||
address = attributes[f"stateless_contract_addr_{index}"]
|
||||
code = attributes.get(f"stateless_contract_code_{index}") or get_code_for_address(address)
|
||||
stateless_contracts[address] = code
|
||||
index += 1
|
||||
return {
|
||||
"balance_owner": balance_owner,
|
||||
"pool_id": pool_id,
|
||||
"stateless_contracts": stateless_contracts,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_address_from_call(block_number, decoded):
|
||||
db = TychoDBSingleton.get_instance()
|
||||
engine = SimulationEngine.new_with_tycho_db(db=db)
|
||||
engine.init_account(
|
||||
address="0x0000000000000000000000000000000000000000",
|
||||
account=AccountInfo(balance=0, nonce=0),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
selector = keccak(text=decoded.split(":")[-1])[:4]
|
||||
sim_result = engine.run_sim(SimulationParameters(
|
||||
data=bytearray(selector),
|
||||
to=decoded.split(':')[1],
|
||||
block_number=block_number,
|
||||
timestamp=int(time.time()),
|
||||
overrides={},
|
||||
caller=EXTERNAL_ACCOUNT,
|
||||
value=0,
|
||||
))
|
||||
address = eth_abi.decode(["address"], bytearray(sim_result.result))
|
||||
return address[0]
|
||||
|
||||
@staticmethod
|
||||
def decode_balances(snap, tokens):
|
||||
balances = {}
|
||||
for addr, balance in snap["state"]["balances"].items():
|
||||
checksum_addr = addr
|
||||
token = next(t for t in tokens if t.address == checksum_addr)
|
||||
balances[token.address] = token.from_onchain_amount(
|
||||
int(balance, 16) # balances are big endian encoded
|
||||
)
|
||||
return balances
|
||||
|
||||
@staticmethod
|
||||
def apply_update(
|
||||
pool: ThirdPartyPool,
|
||||
pool_update: dict[str, Any],
|
||||
balance_updates: dict[str, Any],
|
||||
block: EVMBlock,
|
||||
) -> ThirdPartyPool:
|
||||
# check for and apply optional state attributes
|
||||
attributes = pool_update.get("updated_attributes")
|
||||
if attributes:
|
||||
# TODO: handle balance_owner and stateless_contracts updates
|
||||
pass
|
||||
|
||||
for addr, balance_msg in balance_updates.items():
|
||||
token = [t for t in pool.tokens if t.address == addr][0]
|
||||
balance = int(balance_msg["balance"], 16) # balances are big endian encoded
|
||||
pool.balances[token.address] = token.from_onchain_amount(balance)
|
||||
pool.block = block
|
||||
# we clear simulation cache and overwrites on the pool and trigger a recalculation of spot prices
|
||||
pool.clear_all_cache()
|
||||
return pool
|
||||
59
testing/tycho-client/tycho_client/exceptions.py
Normal file
59
testing/tycho-client/tycho_client/exceptions.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
class TychoDecodeError(Exception):
|
||||
def __init__(self, msg: str, pool_id: str):
|
||||
super().__init__(msg)
|
||||
self.pool_id = pool_id
|
||||
|
||||
|
||||
class APIRequestError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TradeSimulationException(Exception):
|
||||
def __init__(self, message, pool_id: str):
|
||||
self.pool_id = pool_id
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class RecoverableSimulationException(TradeSimulationException):
|
||||
"""Marks that the simulation could not fully fulfill the requested order.
|
||||
|
||||
Provides a partial trade that is valid but does not fully fulfill the conditions
|
||||
requested.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
message
|
||||
Error message
|
||||
pool_id
|
||||
ID of a pool that caused the error
|
||||
partial_trade
|
||||
A tuple of (bought_amount, gas_used, new_pool_state, sold_amount)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message,
|
||||
pool_id: str,
|
||||
partial_trade: tuple[Decimal, int, "ThirdPartyPool", Decimal] = None,
|
||||
):
|
||||
super().__init__(message, pool_id)
|
||||
self.partial_trade = partial_trade
|
||||
|
||||
|
||||
class OutOfGas(RecoverableSimulationException):
|
||||
"""This exception indicates that the underlying VM **likely** ran out of gas.
|
||||
|
||||
It is not easy to judge whether it was really due to out of gas, as the details
|
||||
of the SC being called might be hiding this. E.g. out of gas may happen while
|
||||
calling an external contract, which might show as the external call failing, although
|
||||
it was due to a lack of gas.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TychoClientException(Exception):
|
||||
pass
|
||||
127
testing/tycho-client/tycho_client/models.py
Normal file
127
testing/tycho-client/tycho_client/models.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import datetime
|
||||
from decimal import Decimal, localcontext, Context, ROUND_FLOOR, InvalidOperation
|
||||
from enum import Enum, IntEnum, auto
|
||||
from fractions import Fraction
|
||||
from logging import getLogger
|
||||
from typing import Union
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr
|
||||
|
||||
Address = str
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
|
||||
class Blockchain(Enum):
|
||||
ethereum = "ethereum"
|
||||
arbitrum = "arbitrum"
|
||||
polygon = "polygon"
|
||||
zksync = "zksync"
|
||||
|
||||
|
||||
class EVMBlock(BaseModel):
|
||||
id: int
|
||||
ts: datetime.datetime = Field(default_factory=datetime.datetime.utcnow)
|
||||
hash_: str
|
||||
|
||||
|
||||
class EthereumToken(BaseModel):
|
||||
symbol: str
|
||||
address: str
|
||||
decimals: int
|
||||
gas: Union[int, list[int]] = 29000
|
||||
_hash: int = PrivateAttr(default=None)
|
||||
|
||||
def to_onchain_amount(self, amount: Union[float, Decimal, str]) -> int:
|
||||
"""Converts floating-point numerals to an integer, by shifting right by the
|
||||
token's maximum amount of decimals (e.g.: 1.000000 becomes 1000000).
|
||||
For the reverse operation please see self.from_onchain_amount
|
||||
"""
|
||||
if not isinstance(amount, Decimal):
|
||||
log.warning(f"Expected variable of type Decimal. Got {type(amount)}.")
|
||||
|
||||
with localcontext(Context(rounding=ROUND_FLOOR, prec=256)):
|
||||
amount = Decimal(str(amount)) * (10 ** self.decimals)
|
||||
try:
|
||||
amount = amount.quantize(Decimal("1.0"))
|
||||
except InvalidOperation:
|
||||
log.error(
|
||||
f"Quantize failed for {self.symbol}, {amount}, {self.decimals}"
|
||||
)
|
||||
return int(amount)
|
||||
|
||||
def from_onchain_amount(
|
||||
self, onchain_amount: Union[int, Fraction], quantize: bool = True
|
||||
) -> Decimal:
|
||||
"""Converts an Integer to a quantized decimal, by shifting left by the token's
|
||||
maximum amount of decimals (e.g.: 1000000 becomes 1.000000 for a 6-decimal token
|
||||
For the reverse operation please see self.to_onchain_amount
|
||||
|
||||
If the onchain_amount is too low, then using quantize can underflow without
|
||||
raising and the offchain amount returned is 0.
|
||||
See _decimal.Decimal.quantize docstrings for details.
|
||||
|
||||
Quantize is needed for UniswapV2.
|
||||
"""
|
||||
with localcontext(Context(rounding=ROUND_FLOOR, prec=256)):
|
||||
if isinstance(onchain_amount, Fraction):
|
||||
return (
|
||||
Decimal(onchain_amount.numerator)
|
||||
/ Decimal(onchain_amount.denominator)
|
||||
/ Decimal(10 ** self.decimals)
|
||||
).quantize(Decimal(f"{1 / 10 ** self.decimals}"))
|
||||
if quantize is True:
|
||||
try:
|
||||
amount = (
|
||||
Decimal(str(onchain_amount)) / 10 ** self.decimals
|
||||
).quantize(Decimal(f"{1 / 10 ** self.decimals}"))
|
||||
except InvalidOperation:
|
||||
amount = Decimal(str(onchain_amount)) / Decimal(10 ** self.decimals)
|
||||
else:
|
||||
amount = Decimal(str(onchain_amount)) / Decimal(10 ** self.decimals)
|
||||
return amount
|
||||
|
||||
def __repr__(self):
|
||||
return self.symbol
|
||||
|
||||
def __str__(self):
|
||||
return self.symbol
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
# this is faster than calling custom __hash__, due to cache check
|
||||
return other.address == self.address
|
||||
|
||||
def __hash__(self) -> int:
|
||||
if self._hash is None:
|
||||
# caching the hash saves time during graph search
|
||||
self._hash = hash(self.address)
|
||||
return self._hash
|
||||
|
||||
|
||||
class DatabaseType(Enum):
|
||||
# Make call to the node each time it needs a storage (unless cached from a previous call).
|
||||
rpc_reader = "rpc_reader"
|
||||
# Connect to Tycho and cache the whole state of a target contract, the state is continuously updated by Tycho.
|
||||
# To use this we need Tycho to be configured to index the target contract state.
|
||||
tycho = "tycho"
|
||||
|
||||
|
||||
class Capability(IntEnum):
|
||||
SellSide = auto()
|
||||
BuySide = auto()
|
||||
PriceFunction = auto()
|
||||
FeeOnTransfer = auto()
|
||||
ConstantPrice = auto()
|
||||
TokenBalanceIndependent = auto()
|
||||
ScaledPrice = auto()
|
||||
HardLimits = auto()
|
||||
MarginalPrice = auto()
|
||||
|
||||
|
||||
class SynchronizerState(Enum):
|
||||
started = "started"
|
||||
ready = "ready"
|
||||
stale = "stale"
|
||||
delayed = "delayed"
|
||||
advanced = "advanced"
|
||||
ended = "ended"
|
||||
347
testing/tycho-client/tycho_client/pool_state.py
Normal file
347
testing/tycho-client/tycho_client/pool_state.py
Normal file
@@ -0,0 +1,347 @@
|
||||
import functools
|
||||
import itertools
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from decimal import Decimal
|
||||
from fractions import Fraction
|
||||
from logging import getLogger
|
||||
from typing import Optional, cast, TypeVar, Annotated
|
||||
|
||||
from eth_typing import HexStr
|
||||
from protosim_py import SimulationEngine, AccountInfo
|
||||
from pydantic import BaseModel, PrivateAttr, Field
|
||||
|
||||
from .adapter_contract import AdapterContract
|
||||
from .constants import MAX_BALANCE, EXTERNAL_ACCOUNT
|
||||
from .exceptions import RecoverableSimulationException
|
||||
from .models import EVMBlock, Capability, Address, EthereumToken
|
||||
from .utils import (
|
||||
create_engine,
|
||||
get_contract_bytecode,
|
||||
frac_to_decimal,
|
||||
ERC20OverwriteFactory,
|
||||
)
|
||||
|
||||
ADAPTER_ADDRESS = "0xA2C5C98A892fD6656a7F39A2f63228C0Bc846270"
|
||||
|
||||
log = getLogger(__name__)
|
||||
TPoolState = TypeVar("TPoolState", bound="ThirdPartyPool")
|
||||
|
||||
|
||||
class ThirdPartyPool(BaseModel):
|
||||
id_: str
|
||||
tokens: tuple[EthereumToken, ...]
|
||||
balances: dict[Address, Decimal]
|
||||
block: EVMBlock
|
||||
spot_prices: dict[tuple[EthereumToken, EthereumToken], Decimal]
|
||||
trading_fee: Decimal
|
||||
exchange: str
|
||||
minimum_gas: int
|
||||
|
||||
_engine: SimulationEngine = PrivateAttr(default=None)
|
||||
|
||||
adapter_contract_name: str
|
||||
"""The adapters contract name. Used to look up the byte code for the adapter."""
|
||||
_adapter_contract: AdapterContract = PrivateAttr(default=None)
|
||||
|
||||
stateless_contracts: dict[str, bytes] = {}
|
||||
"""The address to bytecode map of all stateless contracts used by the protocol for simulations."""
|
||||
|
||||
capabilities: set[Capability] = Field(default_factory=lambda: {Capability.SellSide})
|
||||
"""The supported capabilities of this pool."""
|
||||
|
||||
balance_owner: Optional[str] = None
|
||||
"""The contract address for where protocol balances are stored (i.e. a vault contract).
|
||||
If given, balances will be overwritten here instead of on the pool contract during simulations."""
|
||||
|
||||
block_lasting_overwrites: defaultdict[
|
||||
Address,
|
||||
Annotated[dict[int, int], Field(default_factory=lambda: defaultdict[dict])],
|
||||
] = Field(default_factory=lambda: defaultdict(dict))
|
||||
|
||||
"""Storage overwrites that will be applied to all simulations. They will be cleared
|
||||
when ``clear_all_cache`` is called, i.e. usually at each block. Hence the name."""
|
||||
|
||||
trace: bool = False
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
self._set_engine(data.get("engine", None))
|
||||
self.balance_owner = data.get("balance_owner", None)
|
||||
self._adapter_contract = AdapterContract(ADAPTER_ADDRESS, self._engine)
|
||||
self._set_capabilities()
|
||||
if len(self.spot_prices) == 0:
|
||||
self._set_spot_prices()
|
||||
|
||||
def _set_engine(self, engine: Optional[SimulationEngine]):
|
||||
"""Set instance's simulation engine. If no engine given, make a default one.
|
||||
|
||||
If engine is already set, this is a noop.
|
||||
|
||||
The engine will have the specified adapter contract mocked, as well as the
|
||||
tokens used by the pool.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
engine
|
||||
Optional simulation engine instance.
|
||||
"""
|
||||
if self._engine is not None:
|
||||
return
|
||||
else:
|
||||
engine = create_engine([t.address for t in self.tokens], trace=self.trace)
|
||||
engine.init_account(
|
||||
address="0x0000000000000000000000000000000000000000",
|
||||
account=AccountInfo(balance=0, nonce=0),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
engine.init_account(
|
||||
address="0x0000000000000000000000000000000000000004",
|
||||
account=AccountInfo(balance=0, nonce=0),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
engine.init_account(
|
||||
address=ADAPTER_ADDRESS,
|
||||
account=AccountInfo(
|
||||
balance=MAX_BALANCE,
|
||||
nonce=0,
|
||||
code=get_contract_bytecode(self.adapter_contract_name),
|
||||
),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
for addr, bytecode in self.stateless_contracts.items():
|
||||
engine.init_account(
|
||||
address=addr,
|
||||
account=AccountInfo(balance=0, nonce=0, code=bytecode),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
self._engine = engine
|
||||
|
||||
def _set_spot_prices(self):
|
||||
"""Set the spot prices for this pool.
|
||||
|
||||
We currently require the price function capability for now.
|
||||
"""
|
||||
self._ensure_capability(Capability.PriceFunction)
|
||||
for t0, t1 in itertools.permutations(self.tokens, 2):
|
||||
sell_amount = t0.to_onchain_amount(
|
||||
self.get_sell_amount_limit(t0, t1) * Decimal("0.01")
|
||||
)
|
||||
frac = self._adapter_contract.price(
|
||||
cast(HexStr, self.id_),
|
||||
t0,
|
||||
t1,
|
||||
[sell_amount],
|
||||
block=self.block,
|
||||
overwrites=self.block_lasting_overwrites,
|
||||
)[0]
|
||||
if Capability.ScaledPrice in self.capabilities:
|
||||
self.spot_prices[(t0, t1)] = frac_to_decimal(frac)
|
||||
else:
|
||||
scaled = frac * Fraction(10 ** t0.decimals, 10 ** t1.decimals)
|
||||
self.spot_prices[(t0, t1)] = frac_to_decimal(scaled)
|
||||
|
||||
def _ensure_capability(self, capability: Capability):
|
||||
"""Ensures the protocol/adapter implement a certain capability."""
|
||||
if capability not in self.capabilities:
|
||||
raise NotImplemented(f"{capability} not available!")
|
||||
|
||||
def _set_capabilities(self):
|
||||
"""Sets capabilities of the pool."""
|
||||
capabilities = []
|
||||
for t0, t1 in itertools.permutations(self.tokens, 2):
|
||||
capabilities.append(
|
||||
self._adapter_contract.get_capabilities(cast(HexStr, self.id_), t0, t1)
|
||||
)
|
||||
max_capabilities = max(map(len, capabilities))
|
||||
self.capabilities = functools.reduce(set.intersection, capabilities)
|
||||
if len(self.capabilities) < max_capabilities:
|
||||
log.warning(
|
||||
f"Pool {self.id_} hash different capabilities depending on the token pair!"
|
||||
)
|
||||
|
||||
def get_amount_out(
|
||||
self: TPoolState,
|
||||
sell_token: EthereumToken,
|
||||
sell_amount: Decimal,
|
||||
buy_token: EthereumToken,
|
||||
) -> tuple[Decimal, int, TPoolState]:
|
||||
# if the pool has a hard limit and the sell amount exceeds that, simulate and
|
||||
# raise a partial trade
|
||||
if Capability.HardLimits in self.capabilities:
|
||||
sell_limit = self.get_sell_amount_limit(sell_token, buy_token)
|
||||
if sell_amount > sell_limit:
|
||||
partial_trade = self._get_amount_out(sell_token, sell_limit, buy_token)
|
||||
raise RecoverableSimulationException(
|
||||
"Sell amount exceeds sell limit",
|
||||
repr(self),
|
||||
partial_trade + (sell_limit,),
|
||||
)
|
||||
|
||||
return self._get_amount_out(sell_token, sell_amount, buy_token)
|
||||
|
||||
def _get_amount_out(
|
||||
self: TPoolState,
|
||||
sell_token: EthereumToken,
|
||||
sell_amount: Decimal,
|
||||
buy_token: EthereumToken,
|
||||
) -> tuple[Decimal, int, TPoolState]:
|
||||
trade, state_changes = self._adapter_contract.swap(
|
||||
cast(HexStr, self.id_),
|
||||
sell_token,
|
||||
buy_token,
|
||||
False,
|
||||
sell_token.to_onchain_amount(sell_amount),
|
||||
block=self.block,
|
||||
overwrites=self._get_overwrites(sell_token, buy_token),
|
||||
)
|
||||
new_state = self._duplicate()
|
||||
for address, state_update in state_changes.items():
|
||||
for slot, value in state_update.storage.items():
|
||||
new_state.block_lasting_overwrites[address][slot] = value
|
||||
|
||||
new_price = frac_to_decimal(trade.price)
|
||||
if new_price != Decimal(0):
|
||||
new_state.spot_prices = {
|
||||
(sell_token, buy_token): new_price,
|
||||
(buy_token, sell_token): Decimal(1) / new_price,
|
||||
}
|
||||
|
||||
buy_amount = buy_token.from_onchain_amount(trade.received_amount)
|
||||
|
||||
return buy_amount, trade.gas_used, new_state
|
||||
|
||||
def _get_overwrites(
|
||||
self, sell_token: EthereumToken, buy_token: EthereumToken, **kwargs
|
||||
) -> dict[Address, dict[int, int]]:
|
||||
"""Get an overwrites dictionary to use in a simulation.
|
||||
|
||||
The returned overwrites include block-lasting overwrites set on the instance
|
||||
level, and token-specific overwrites that depend on passed tokens.
|
||||
"""
|
||||
token_overwrites = self._get_token_overwrites(sell_token, buy_token, **kwargs)
|
||||
return _merge(self.block_lasting_overwrites, token_overwrites)
|
||||
|
||||
def _get_token_overwrites(
|
||||
self, sell_token: EthereumToken, buy_token: EthereumToken, max_amount=None
|
||||
) -> dict[Address, dict[int, int]]:
|
||||
"""Creates overwrites for a token.
|
||||
|
||||
Funds external account with enough tokens to execute swaps. Also creates a
|
||||
corresponding approval to the adapter contract.
|
||||
|
||||
If the protocol reads its own token balances, the balances for the underlying
|
||||
pool contract will also be overwritten.
|
||||
"""
|
||||
res = []
|
||||
if Capability.TokenBalanceIndependent not in self.capabilities:
|
||||
res = [self._get_balance_overwrites()]
|
||||
|
||||
# avoids recursion if using this method with get_sell_amount_limit
|
||||
if max_amount is None:
|
||||
max_amount = sell_token.to_onchain_amount(
|
||||
self.get_sell_amount_limit(sell_token, buy_token)
|
||||
)
|
||||
overwrites = ERC20OverwriteFactory(sell_token)
|
||||
overwrites.set_balance(max_amount, EXTERNAL_ACCOUNT)
|
||||
overwrites.set_allowance(
|
||||
allowance=max_amount, owner=EXTERNAL_ACCOUNT, spender=ADAPTER_ADDRESS
|
||||
)
|
||||
res.append(overwrites.get_protosim_overwrites())
|
||||
|
||||
# we need to merge the dictionaries because balance overwrites may target
|
||||
# the same token address.
|
||||
res = functools.reduce(_merge, res)
|
||||
return res
|
||||
|
||||
def _get_balance_overwrites(self) -> dict[Address, dict[int, int]]:
|
||||
balance_overwrites = {}
|
||||
address = self.balance_owner or self.id_
|
||||
for t in self.tokens:
|
||||
overwrites = ERC20OverwriteFactory(t)
|
||||
overwrites.set_balance(
|
||||
t.to_onchain_amount(self.balances[t.address]), address
|
||||
)
|
||||
balance_overwrites.update(overwrites.get_protosim_overwrites())
|
||||
return balance_overwrites
|
||||
|
||||
def _duplicate(self: type["ThirdPartyPool"]) -> "ThirdPartyPool":
|
||||
"""Make a new instance identical to self that shares the same simulation engine.
|
||||
|
||||
Note that the new and current state become coupled in a way that they must
|
||||
simulate the same block. This is fine, see
|
||||
https://datarevenue.atlassian.net/browse/ROC-1301
|
||||
|
||||
Not naming this method _copy to not confuse with Pydantic's .copy method.
|
||||
"""
|
||||
return type(self)(
|
||||
exchange=self.exchange,
|
||||
adapter_contract_name=self.adapter_contract_name,
|
||||
block=self.block,
|
||||
id_=self.id_,
|
||||
tokens=self.tokens,
|
||||
spot_prices=self.spot_prices.copy(),
|
||||
trading_fee=self.trading_fee,
|
||||
block_lasting_overwrites=deepcopy(self.block_lasting_overwrites),
|
||||
engine=self._engine,
|
||||
balances=self.balances,
|
||||
minimum_gas=self.minimum_gas,
|
||||
balance_owner=self.balance_owner,
|
||||
stateless_contracts=self.stateless_contracts,
|
||||
)
|
||||
|
||||
def get_sell_amount_limit(
|
||||
self, sell_token: EthereumToken, buy_token: EthereumToken
|
||||
) -> Decimal:
|
||||
"""
|
||||
Retrieves the sell amount of the given token.
|
||||
|
||||
For pools with more than 2 tokens, the sell limit is obtain for all possible buy token
|
||||
combinations and the minimum is returned.
|
||||
"""
|
||||
limit = self._adapter_contract.get_limits(
|
||||
cast(HexStr, self.id_),
|
||||
sell_token,
|
||||
buy_token,
|
||||
block=self.block,
|
||||
overwrites=self._get_overwrites(
|
||||
sell_token, buy_token, max_amount=MAX_BALANCE // 100
|
||||
),
|
||||
)[0]
|
||||
return sell_token.from_onchain_amount(limit)
|
||||
|
||||
def clear_all_cache(self):
|
||||
self._engine.clear_temp_storage()
|
||||
self.block_lasting_overwrites = defaultdict(dict)
|
||||
self._set_spot_prices()
|
||||
|
||||
|
||||
def _merge(a: dict, b: dict, path=None):
|
||||
"""
|
||||
Merges two dictionaries (a and b) deeply. This means it will traverse and combine
|
||||
their nested dictionaries too if present.
|
||||
|
||||
Parameters:
|
||||
a (dict): The first dictionary to merge.
|
||||
b (dict): The second dictionary to merge into the first one.
|
||||
path (list, optional): An internal parameter used during recursion
|
||||
to keep track of the ancestry of nested dictionaries.
|
||||
|
||||
Returns:
|
||||
a (dict): The merged dictionary which includes all key-value pairs from `b`
|
||||
added into `a`. If they have nested dictionaries with same keys, those are also merged.
|
||||
On key conflicts, preference is given to values from b.
|
||||
"""
|
||||
if path is None:
|
||||
path = []
|
||||
for key in b:
|
||||
if key in a:
|
||||
if isinstance(a[key], dict) and isinstance(b[key], dict):
|
||||
_merge(a[key], b[key], path + [str(key)])
|
||||
else:
|
||||
a[key] = b[key]
|
||||
return a
|
||||
345
testing/tycho-client/tycho_client/tycho_adapter.py
Normal file
345
testing/tycho-client/tycho_client/tycho_adapter.py
Normal file
@@ -0,0 +1,345 @@
|
||||
import asyncio
|
||||
import json
|
||||
import platform
|
||||
import time
|
||||
from asyncio.subprocess import STDOUT, PIPE
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from http.client import HTTPException
|
||||
from logging import getLogger
|
||||
from typing import Any, Optional, Dict
|
||||
|
||||
import requests
|
||||
from protosim_py import AccountUpdate, AccountInfo, BlockHeader
|
||||
|
||||
from .constants import TYCHO_CLIENT_LOG_FOLDER, TYCHO_CLIENT_FOLDER
|
||||
from .decoders import ThirdPartyPoolTychoDecoder
|
||||
from .exceptions import APIRequestError, TychoClientException
|
||||
from .models import Blockchain, EVMBlock, EthereumToken, SynchronizerState, Address
|
||||
from .pool_state import ThirdPartyPool
|
||||
from .tycho_db import TychoDBSingleton
|
||||
from .utils import create_engine
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
|
||||
class TokenLoader:
|
||||
def __init__(
|
||||
self,
|
||||
tycho_url: str,
|
||||
blockchain: Blockchain,
|
||||
min_token_quality: Optional[int] = 0,
|
||||
):
|
||||
self.tycho_url = tycho_url
|
||||
self.blockchain = blockchain
|
||||
self.min_token_quality = min_token_quality
|
||||
self.endpoint = "/v1/{}/tokens"
|
||||
self._token_limit = 10000
|
||||
|
||||
def get_tokens(self) -> dict[str, EthereumToken]:
|
||||
"""Loads all tokens from Tycho RPC"""
|
||||
url = self.tycho_url + self.endpoint.format(self.blockchain.value)
|
||||
page = 0
|
||||
|
||||
start = time.monotonic()
|
||||
all_tokens = []
|
||||
while data := self._get_all_with_pagination(
|
||||
url=url,
|
||||
page=page,
|
||||
limit=self._token_limit,
|
||||
params={"min_quality": self.min_token_quality},
|
||||
):
|
||||
all_tokens.extend(data)
|
||||
page += 1
|
||||
if len(data) < self._token_limit:
|
||||
break
|
||||
|
||||
log.info(f"Loaded {len(all_tokens)} tokens in {time.monotonic() - start:.2f}s")
|
||||
|
||||
formatted_tokens = dict()
|
||||
|
||||
for token in all_tokens:
|
||||
formatted = EthereumToken(**token)
|
||||
formatted_tokens[formatted.address] = formatted
|
||||
|
||||
return formatted_tokens
|
||||
|
||||
def get_token_subset(self, addresses: list[str]) -> dict[str, EthereumToken]:
|
||||
"""Loads a subset of tokens from Tycho RPC"""
|
||||
url = self.tycho_url + self.endpoint.format(self.blockchain.value)
|
||||
page = 0
|
||||
|
||||
start = time.monotonic()
|
||||
all_tokens = []
|
||||
while data := self._get_all_with_pagination(
|
||||
url=url,
|
||||
page=page,
|
||||
limit=self._token_limit,
|
||||
params={"min_quality": self.min_token_quality, "addresses": addresses},
|
||||
):
|
||||
all_tokens.extend(data)
|
||||
page += 1
|
||||
if len(data) < self._token_limit:
|
||||
break
|
||||
|
||||
log.info(f"Loaded {len(all_tokens)} tokens in {time.monotonic() - start:.2f}s")
|
||||
|
||||
formatted_tokens = dict()
|
||||
|
||||
for token in all_tokens:
|
||||
formatted = EthereumToken(**token)
|
||||
formatted_tokens[formatted.address] = formatted
|
||||
|
||||
return formatted_tokens
|
||||
|
||||
@staticmethod
|
||||
def _get_all_with_pagination(
|
||||
url: str, params: Optional[Dict] = None, page: int = 0, limit: int = 50
|
||||
) -> Dict:
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
params["pagination"] = {"page": page, "page_size": limit}
|
||||
r = requests.post(url, json=params)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
except HTTPException as e:
|
||||
log.error(f"Request status {r.status_code} with content {r.json()}")
|
||||
raise APIRequestError("Failed to load token configurations")
|
||||
return r.json()["tokens"]
|
||||
|
||||
|
||||
@dataclass(repr=False)
|
||||
class BlockProtocolChanges:
|
||||
block: EVMBlock
|
||||
pool_states: dict[Address, ThirdPartyPool]
|
||||
"""All updated pools"""
|
||||
removed_pools: set[Address]
|
||||
deserialization_time: float
|
||||
"""The time it took to deserialize the pool states from the tycho feed message"""
|
||||
|
||||
|
||||
class TychoPoolStateStreamAdapter:
|
||||
def __init__(
|
||||
self,
|
||||
tycho_url: str,
|
||||
protocol: str,
|
||||
decoder: ThirdPartyPoolTychoDecoder,
|
||||
blockchain: Blockchain,
|
||||
min_tvl: Optional[Decimal] = 10,
|
||||
min_token_quality: Optional[int] = 0,
|
||||
include_state=True,
|
||||
):
|
||||
"""
|
||||
:param tycho_url: URL to connect to Tycho DB
|
||||
:param protocol: Name of the protocol that you're testing
|
||||
:param blockchain: Blockchain enum
|
||||
:param min_tvl: Minimum TVL to consider a pool
|
||||
:param min_token_quality: Minimum token quality to consider a token
|
||||
:param include_state: Include state in the stream
|
||||
"""
|
||||
self.min_token_quality = min_token_quality
|
||||
self.tycho_url = tycho_url
|
||||
self.min_tvl = min_tvl
|
||||
self.tycho_client = None
|
||||
self.protocol = f"vm:{protocol}"
|
||||
self._include_state = include_state
|
||||
self._blockchain = blockchain
|
||||
self._decoder = decoder
|
||||
|
||||
# Create engine
|
||||
# TODO: This should be initialized outside the adapter?
|
||||
TychoDBSingleton.initialize(tycho_http_url=self.tycho_url)
|
||||
self._engine = create_engine([], trace=False)
|
||||
|
||||
# Loads tokens from Tycho
|
||||
self._tokens: dict[str, EthereumToken] = TokenLoader(
|
||||
tycho_url=f"http://{self.tycho_url}",
|
||||
blockchain=self._blockchain,
|
||||
min_token_quality=self.min_token_quality,
|
||||
).get_tokens()
|
||||
|
||||
async def start(self):
|
||||
"""Start the tycho-client Rust binary through subprocess"""
|
||||
# stdout=PIPE means that the output is piped directly to this Python process
|
||||
# stderr=STDOUT combines the stderr and stdout streams
|
||||
bin_path = self._get_binary_path()
|
||||
|
||||
cmd = [
|
||||
"--log-folder",
|
||||
str(TYCHO_CLIENT_LOG_FOLDER),
|
||||
"--tycho-url",
|
||||
self.tycho_url,
|
||||
"--min-tvl",
|
||||
str(self.min_tvl),
|
||||
]
|
||||
if not self._include_state:
|
||||
cmd.append("--no-state")
|
||||
cmd.append("--exchange")
|
||||
cmd.append(self.protocol)
|
||||
|
||||
log.debug(f"Starting tycho-client binary at {bin_path}. CMD: {cmd}")
|
||||
self.tycho_client = await asyncio.create_subprocess_exec(
|
||||
str(bin_path), *cmd, stdout=PIPE, stderr=STDOUT, limit=2 ** 64
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_binary_path():
|
||||
"""Determines the correct binary path based on the OS and architecture."""
|
||||
os_name = platform.system()
|
||||
if os_name == "Linux":
|
||||
architecture = platform.machine()
|
||||
if architecture == "aarch64":
|
||||
return TYCHO_CLIENT_FOLDER / "tycho-client-linux-arm64"
|
||||
else:
|
||||
return TYCHO_CLIENT_FOLDER / "tycho-client-linux-x64"
|
||||
elif os_name == "Darwin":
|
||||
architecture = platform.machine()
|
||||
if architecture == "arm64":
|
||||
return TYCHO_CLIENT_FOLDER / "tycho-client-mac-arm64"
|
||||
else:
|
||||
return TYCHO_CLIENT_FOLDER / "tycho-client-mac-x64"
|
||||
else:
|
||||
raise ValueError(f"Unsupported OS: {os_name}")
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> BlockProtocolChanges:
|
||||
if self.tycho_client.stdout.at_eof():
|
||||
raise StopAsyncIteration
|
||||
line = await self.tycho_client.stdout.readline()
|
||||
|
||||
try:
|
||||
if not line:
|
||||
exit_code = await self.tycho_client.wait()
|
||||
if exit_code == 0:
|
||||
# Clean exit, handle accordingly, possibly without raising an error
|
||||
log.debug("Tycho client exited cleanly.")
|
||||
raise StopAsyncIteration
|
||||
else:
|
||||
line = f"Tycho client failed with exit code: {exit_code}"
|
||||
# Non-zero exit code, handle accordingly, possibly by raising an error
|
||||
raise TychoClientException(line)
|
||||
|
||||
msg = json.loads(line.decode("utf-8"))
|
||||
except (json.JSONDecodeError, TychoClientException):
|
||||
# Read the last 10 lines from the log file available under TYCHO_CLIENT_LOG_FOLDER
|
||||
# and raise an exception with the last 10 lines
|
||||
error_msg = f"Invalid JSON output on tycho. Original line: {line}."
|
||||
with open(TYCHO_CLIENT_LOG_FOLDER / "dev_logs.log", "r") as f:
|
||||
lines = f.readlines()
|
||||
last_lines = lines[-10:]
|
||||
error_msg += f" Tycho logs: {last_lines}"
|
||||
log.exception(error_msg)
|
||||
raise Exception("Tycho-client failed.")
|
||||
return self.process_tycho_message(msg)
|
||||
|
||||
@staticmethod
|
||||
def build_snapshot_message(
|
||||
protocol_components: dict, protocol_states: dict, contract_states: dict
|
||||
) -> dict[str, ThirdPartyPool]:
|
||||
vm_states = {state["address"]: state for state in contract_states["accounts"]}
|
||||
states = {}
|
||||
for component in protocol_components["protocol_components"]:
|
||||
pool_id = component["id"]
|
||||
states[pool_id] = {"component": component}
|
||||
for state in protocol_states["states"]:
|
||||
pool_id = state["component_id"]
|
||||
if pool_id not in states:
|
||||
log.debug(f"{pool_id} was present in snapshot but not in components")
|
||||
continue
|
||||
states[pool_id]["state"] = state
|
||||
snapshot = {"vm_storage": vm_states, "states": states}
|
||||
|
||||
return snapshot
|
||||
|
||||
def process_tycho_message(self, msg) -> BlockProtocolChanges:
|
||||
self._validate_sync_states(msg)
|
||||
|
||||
state_msg = msg["state_msgs"][self.protocol]
|
||||
|
||||
block = EVMBlock(
|
||||
id=msg["block"]["id"],
|
||||
ts=datetime.fromtimestamp(msg["block"]["timestamp"]),
|
||||
hash_=msg["block"]["hash"],
|
||||
)
|
||||
|
||||
return self.process_snapshot(block, state_msg["snapshot"])
|
||||
|
||||
def process_snapshot(
|
||||
self, block: EVMBlock, state_msg: dict
|
||||
) -> BlockProtocolChanges:
|
||||
start = time.monotonic()
|
||||
removed_pools = set()
|
||||
decoded_count = 0
|
||||
failed_count = 0
|
||||
|
||||
self._process_vm_storage(state_msg["vm_storage"], block)
|
||||
|
||||
# decode new components
|
||||
decoded_pools, failed_pools = self._decoder.decode_snapshot(
|
||||
state_msg["states"], block, self._tokens
|
||||
)
|
||||
|
||||
decoded_count += len(decoded_pools)
|
||||
failed_count += len(failed_pools)
|
||||
|
||||
decoded_pools = {
|
||||
p.id_: p for p in decoded_pools.values()
|
||||
} # remap pools to their pool ids
|
||||
deserialization_time = time.monotonic() - start
|
||||
total = decoded_count + failed_count
|
||||
log.debug(
|
||||
f"Received {total} snapshots. n_decoded: {decoded_count}, n_failed: {failed_count}"
|
||||
)
|
||||
if failed_count > 0:
|
||||
log.info(f"Could not to decode {failed_count}/{total} pool snapshots")
|
||||
|
||||
return BlockProtocolChanges(
|
||||
block=block,
|
||||
pool_states=decoded_pools,
|
||||
removed_pools=removed_pools,
|
||||
deserialization_time=round(deserialization_time, 3),
|
||||
)
|
||||
|
||||
def _validate_sync_states(self, msg):
|
||||
try:
|
||||
sync_state = msg["sync_states"][self.protocol]
|
||||
log.info(f"Received sync state for {self.protocol}: {sync_state}")
|
||||
if not sync_state["status"] != SynchronizerState.ready.value:
|
||||
raise ValueError("Tycho-indexer is not synced")
|
||||
except KeyError:
|
||||
raise ValueError("Invalid message received from tycho-client.")
|
||||
|
||||
def _process_vm_storage(self, storage: dict[str, Any], block: EVMBlock):
|
||||
vm_updates = []
|
||||
for storage_update in storage.values():
|
||||
address = storage_update["address"]
|
||||
balance = int(storage_update["native_balance"], 16)
|
||||
code = bytearray.fromhex(storage_update["code"][2:])
|
||||
|
||||
# init accounts
|
||||
self._engine.init_account(
|
||||
address=address,
|
||||
account=AccountInfo(balance=balance, nonce=0, code=code),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
|
||||
# apply account updates
|
||||
slots = {int(k, 16): int(v, 16) for k, v in storage_update["slots"].items()}
|
||||
vm_updates.append(
|
||||
AccountUpdate(
|
||||
address=address,
|
||||
chain=storage_update["chain"],
|
||||
slots=slots,
|
||||
balance=balance,
|
||||
code=code,
|
||||
change="Update",
|
||||
)
|
||||
)
|
||||
|
||||
block_header = BlockHeader(block.id, block.hash_, int(block.ts.timestamp()))
|
||||
TychoDBSingleton.get_instance().update(vm_updates, block_header)
|
||||
48
testing/tycho-client/tycho_client/tycho_db.py
Normal file
48
testing/tycho-client/tycho_client/tycho_db.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from protosim_py import TychoDB
|
||||
|
||||
|
||||
class TychoDBSingleton:
|
||||
"""
|
||||
A singleton wrapper around the TychoDB class.
|
||||
|
||||
This class ensures that there is only one instance of TychoDB throughout the lifetime of the program,
|
||||
avoiding the overhead of creating multiple instances.
|
||||
"""
|
||||
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def initialize(cls, tycho_http_url: str):
|
||||
"""
|
||||
Initialize the TychoDB instance with the given URLs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tycho_http_url : str
|
||||
The URL of the Tycho HTTP server.
|
||||
|
||||
"""
|
||||
cls._instance = TychoDB(tycho_http_url=tycho_http_url)
|
||||
|
||||
@classmethod
|
||||
def get_instance(cls) -> TychoDB:
|
||||
"""
|
||||
Retrieve the singleton instance of TychoDB.
|
||||
|
||||
If the TychoDB instance does not exist, it creates a new one.
|
||||
If it already exists, it returns the existing instance.
|
||||
|
||||
Returns
|
||||
-------
|
||||
TychoDB
|
||||
The singleton instance of TychoDB.
|
||||
"""
|
||||
if cls._instance is None:
|
||||
raise ValueError(
|
||||
"TychoDB instance not initialized. Call initialize() first."
|
||||
)
|
||||
return cls._instance
|
||||
|
||||
@classmethod
|
||||
def clear_instance(cls):
|
||||
cls._instance = None
|
||||
355
testing/tycho-client/tycho_client/utils.py
Normal file
355
testing/tycho-client/tycho_client/utils.py
Normal file
@@ -0,0 +1,355 @@
|
||||
import json
|
||||
import os
|
||||
from decimal import Decimal
|
||||
from fractions import Fraction
|
||||
from functools import lru_cache
|
||||
from logging import getLogger
|
||||
from pathlib import Path
|
||||
from typing import Final, Any
|
||||
|
||||
import eth_abi
|
||||
from eth_typing import HexStr
|
||||
from hexbytes import HexBytes
|
||||
from protosim_py import SimulationEngine, AccountInfo
|
||||
import requests
|
||||
from web3 import Web3
|
||||
|
||||
from .constants import EXTERNAL_ACCOUNT, MAX_BALANCE, ASSETS_FOLDER
|
||||
from .exceptions import OutOfGas
|
||||
from .models import Address, EthereumToken
|
||||
from .tycho_db import TychoDBSingleton
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
|
||||
def decode_tycho_exchange(exchange: str) -> (str, bool):
|
||||
# removes vm prefix if present, returns True if vm prefix was present (vm protocol) or False if native protocol
|
||||
return (exchange.split(":")[1], False) if "vm:" in exchange else (exchange, True)
|
||||
|
||||
|
||||
def create_engine(
|
||||
mocked_tokens: list[Address], trace: bool = False
|
||||
) -> SimulationEngine:
|
||||
"""Create a simulation engine with a mocked ERC20 contract at given addresses.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
mocked_tokens
|
||||
A list of addresses at which a mocked ERC20 contract should be inserted.
|
||||
|
||||
trace
|
||||
Whether to trace calls, only meant for debugging purposes, might print a lot of
|
||||
data to stdout.
|
||||
"""
|
||||
|
||||
db = TychoDBSingleton.get_instance()
|
||||
engine = SimulationEngine.new_with_tycho_db(db=db, trace=trace)
|
||||
|
||||
for t in mocked_tokens:
|
||||
info = AccountInfo(
|
||||
balance=0, nonce=0, code=get_contract_bytecode(ASSETS_FOLDER / "ERC20.bin")
|
||||
)
|
||||
engine.init_account(
|
||||
address=t, account=info, mocked=True, permanent_storage=None
|
||||
)
|
||||
engine.init_account(
|
||||
address=EXTERNAL_ACCOUNT,
|
||||
account=AccountInfo(balance=MAX_BALANCE, nonce=0, code=None),
|
||||
mocked=False,
|
||||
permanent_storage=None,
|
||||
)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
class ERC20OverwriteFactory:
|
||||
def __init__(self, token: EthereumToken):
|
||||
"""
|
||||
Initialize the ERC20OverwriteFactory.
|
||||
|
||||
Parameters:
|
||||
token: The token object.
|
||||
"""
|
||||
self._token = token
|
||||
self._overwrites = dict()
|
||||
self._balance_slot: Final[int] = 0
|
||||
self._allowance_slot: Final[int] = 1
|
||||
self._total_supply_slot: Final[int] = 2
|
||||
|
||||
def set_balance(self, balance: int, owner: Address):
|
||||
"""
|
||||
Set the balance for a given owner.
|
||||
|
||||
Parameters:
|
||||
balance: The balance value.
|
||||
owner: The owner's address.
|
||||
"""
|
||||
storage_index = get_storage_slot_at_key(HexStr(owner), self._balance_slot)
|
||||
self._overwrites[storage_index] = balance
|
||||
log.log(
|
||||
5,
|
||||
f"Override balance: token={self._token.address} owner={owner}"
|
||||
f"value={balance} slot={storage_index}",
|
||||
)
|
||||
|
||||
def set_allowance(self, allowance: int, spender: Address, owner: Address):
|
||||
"""
|
||||
Set the allowance for a given spender and owner.
|
||||
|
||||
Parameters:
|
||||
allowance: The allowance value.
|
||||
spender: The spender's address.
|
||||
owner: The owner's address.
|
||||
"""
|
||||
storage_index = get_storage_slot_at_key(
|
||||
HexStr(spender),
|
||||
get_storage_slot_at_key(HexStr(owner), self._allowance_slot),
|
||||
)
|
||||
self._overwrites[storage_index] = allowance
|
||||
log.log(
|
||||
5,
|
||||
f"Override allowance: token={self._token.address} owner={owner}"
|
||||
f"spender={spender} value={allowance} slot={storage_index}",
|
||||
)
|
||||
|
||||
def set_total_supply(self, supply: int):
|
||||
"""
|
||||
Set the total supply of the token.
|
||||
|
||||
Parameters:
|
||||
supply: The total supply value.
|
||||
"""
|
||||
self._overwrites[self._total_supply_slot] = supply
|
||||
log.log(
|
||||
5,
|
||||
f"Override total supply: token={self._token.address} supply={supply}"
|
||||
)
|
||||
|
||||
def get_protosim_overwrites(self) -> dict[Address, dict[int, int]]:
|
||||
"""
|
||||
Get the overwrites dictionary of previously collected values.
|
||||
|
||||
Returns:
|
||||
dict[Address, dict]: A dictionary containing the token's address
|
||||
and the overwrites.
|
||||
"""
|
||||
# Protosim returns lowercase addresses in state updates returned from simulation
|
||||
|
||||
return {self._token.address.lower(): self._overwrites}
|
||||
|
||||
def get_geth_overwrites(self) -> dict[Address, dict[int, int]]:
|
||||
"""
|
||||
Get the overwrites dictionary of previously collected values.
|
||||
|
||||
Returns:
|
||||
dict[Address, dict]: A dictionary containing the token's address
|
||||
and the overwrites.
|
||||
"""
|
||||
formatted_overwrites = {
|
||||
HexBytes(key).hex(): "0x" + HexBytes(val).hex().lstrip("0x").zfill(64)
|
||||
for key, val in self._overwrites.items()
|
||||
}
|
||||
|
||||
code = "0x" + get_contract_bytecode(ASSETS_FOLDER / "ERC20.bin").hex()
|
||||
return {self._token.address: {"stateDiff": formatted_overwrites, "code": code}}
|
||||
|
||||
|
||||
def get_storage_slot_at_key(key: Address, mapping_slot: int) -> int:
|
||||
"""Get storage slot index of a value stored at a certain key in a mapping
|
||||
|
||||
Parameters
|
||||
----------
|
||||
key
|
||||
Key in a mapping. This function is meant to work with ethereum addresses
|
||||
and accepts only strings.
|
||||
mapping_slot
|
||||
Storage slot at which the mapping itself is stored. See the examples for more
|
||||
explanation.
|
||||
|
||||
Returns
|
||||
-------
|
||||
slot
|
||||
An index of a storage slot where the value at the given key is stored.
|
||||
|
||||
Examples
|
||||
--------
|
||||
If a mapping is declared as a first variable in solidity code, its storage slot
|
||||
is 0 (e.g. ``balances`` in our mocked ERC20 contract). Here's how to compute
|
||||
a storage slot where balance of a given account is stored::
|
||||
|
||||
get_storage_slot_at_key("0xC63135E4bF73F637AF616DFd64cf701866BB2628", 0)
|
||||
|
||||
For nested mappings, we need to apply the function twice. An example of this is
|
||||
``allowances`` in ERC20. It is a mapping of form:
|
||||
``dict[owner, dict[spender, value]]``. In our mocked ERC20 contract, ``allowances``
|
||||
is a second variable, so it is stored at slot 1. Here's how to get a storage slot
|
||||
where an allowance of ``0xspender`` to spend ``0xowner``'s money is stored::
|
||||
|
||||
get_storage_slot_at_key("0xspender", get_storage_slot_at_key("0xowner", 1)))
|
||||
|
||||
See Also
|
||||
--------
|
||||
`Solidity Storage Layout documentation
|
||||
<https://docs.soliditylang.org/en/v0.8.13/internals/layout_in_storage.html#mappings-and-dynamic-arrays>`_
|
||||
"""
|
||||
key_bytes = bytes.fromhex(key[2:]).rjust(32, b"\0")
|
||||
mapping_slot_bytes = int.to_bytes(mapping_slot, 32, "big")
|
||||
slot_bytes = Web3.keccak(key_bytes + mapping_slot_bytes)
|
||||
return int.from_bytes(slot_bytes, "big")
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_contract_bytecode(path: str) -> bytes:
|
||||
"""Load contract bytecode from a file given an absolute path"""
|
||||
with open(path, "rb") as fh:
|
||||
code = fh.read()
|
||||
return code
|
||||
|
||||
|
||||
def frac_to_decimal(frac: Fraction) -> Decimal:
|
||||
return Decimal(frac.numerator) / Decimal(frac.denominator)
|
||||
|
||||
|
||||
def load_abi(name_or_path: str) -> dict:
|
||||
if os.path.exists(abspath := os.path.abspath(name_or_path)):
|
||||
path = abspath
|
||||
else:
|
||||
path = f"{os.path.dirname(os.path.abspath(__file__))}/assets/{name_or_path}.abi"
|
||||
try:
|
||||
with open(os.path.abspath(path)) as f:
|
||||
abi: dict = json.load(f)
|
||||
except FileNotFoundError:
|
||||
search_dir = f"{os.path.dirname(os.path.abspath(__file__))}/assets/"
|
||||
|
||||
# List all files in search dir and subdirs suggest them to the user in an error message
|
||||
available_files = []
|
||||
for dirpath, dirnames, filenames in os.walk(search_dir):
|
||||
for filename in filenames:
|
||||
# Make paths relative to search_dir
|
||||
relative_path = os.path.relpath(
|
||||
os.path.join(dirpath, filename), search_dir
|
||||
)
|
||||
available_files.append(relative_path.replace(".abi", ""))
|
||||
|
||||
raise FileNotFoundError(
|
||||
f"File {name_or_path} not found. "
|
||||
f"Did you mean one of these? {', '.join(available_files)}"
|
||||
)
|
||||
return abi
|
||||
|
||||
|
||||
# https://docs.soliditylang.org/en/latest/control-structures.html#panic-via-assert-and-error-via-require
|
||||
solidity_panic_codes = {
|
||||
0: "GenericCompilerPanic",
|
||||
1: "AssertionError",
|
||||
17: "ArithmeticOver/Underflow",
|
||||
18: "ZeroDivisionError",
|
||||
33: "UnkownEnumMember",
|
||||
34: "BadStorageByteArrayEncoding",
|
||||
51: "EmptyArray",
|
||||
0x32: "OutOfBounds",
|
||||
0x41: "OutOfMemory",
|
||||
0x51: "BadFunctionPointer",
|
||||
}
|
||||
|
||||
|
||||
def parse_solidity_error_message(data) -> str:
|
||||
data_bytes = HexBytes(data)
|
||||
error_string = f"Failed to decode: {data}"
|
||||
# data is encoded as Error(string)
|
||||
if data_bytes[:4] == HexBytes("0x08c379a0"):
|
||||
(error_string,) = eth_abi.decode(["string"], data_bytes[4:])
|
||||
return error_string
|
||||
elif data_bytes[:4] == HexBytes("0x4e487b71"):
|
||||
(error_code,) = eth_abi.decode(["uint256"], data_bytes[4:])
|
||||
return solidity_panic_codes.get(error_code, f"Panic({error_code})")
|
||||
# old solidity: revert 'some string' case
|
||||
try:
|
||||
(error_string,) = eth_abi.decode(["string"], data_bytes)
|
||||
return error_string
|
||||
except Exception:
|
||||
pass
|
||||
# some custom error maybe it is with string?
|
||||
try:
|
||||
(error_string,) = eth_abi.decode(["string"], data_bytes[4:])
|
||||
return error_string
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
(error_string,) = eth_abi.decode(["string"], data_bytes[4:])
|
||||
return error_string
|
||||
except Exception:
|
||||
pass
|
||||
return error_string
|
||||
|
||||
|
||||
def maybe_coerce_error(
|
||||
err: RuntimeError, pool_state: Any, gas_limit: int = None
|
||||
) -> Exception:
|
||||
details = err.args[0]
|
||||
# we got bytes as data, so this was a revert
|
||||
if details.data.startswith("0x"):
|
||||
err = RuntimeError(
|
||||
f"Revert! Reason: {parse_solidity_error_message(details.data)}"
|
||||
)
|
||||
# we have gas information, check if this likely an out of gas err.
|
||||
if gas_limit is not None and details.gas_used is not None:
|
||||
# if we used up 97% or more issue a OutOfGas error.
|
||||
usage = details.gas_used / gas_limit
|
||||
if usage >= 0.97:
|
||||
return OutOfGas(
|
||||
f"SimulationError: Likely out-of-gas. "
|
||||
f"Used: {usage * 100:.2f}% of gas limit. "
|
||||
f"Original error: {err}",
|
||||
repr(pool_state),
|
||||
)
|
||||
elif "OutOfGas" in details.data:
|
||||
if gas_limit is not None:
|
||||
usage = details.gas_used / gas_limit
|
||||
usage_msg = f"Used: {usage * 100:.2f}% of gas limit. "
|
||||
else:
|
||||
usage_msg = ""
|
||||
return OutOfGas(
|
||||
f"SimulationError: out-of-gas. {usage_msg}Original error: {details.data}",
|
||||
repr(pool_state),
|
||||
)
|
||||
return err
|
||||
|
||||
|
||||
def exec_rpc_method(url, method, params, timeout=240) -> dict:
|
||||
payload = {"jsonrpc": "2.0", "method": method, "params": params, "id": 1}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
r = requests.post(url, data=json.dumps(payload), headers=headers, timeout=timeout)
|
||||
|
||||
if r.status_code >= 400:
|
||||
raise RuntimeError(
|
||||
"RPC failed: status_code not ok. (method {}: {})".format(
|
||||
method, r.status_code
|
||||
)
|
||||
)
|
||||
data = r.json()
|
||||
|
||||
if "result" in data:
|
||||
return data["result"]
|
||||
elif "error" in data:
|
||||
raise RuntimeError(
|
||||
"RPC failed with Error {} - {}".format(data["error"], method)
|
||||
)
|
||||
|
||||
|
||||
def get_code_for_address(address: str, connection_string: str = None):
|
||||
if connection_string is None:
|
||||
connection_string = os.getenv("RPC_URL")
|
||||
if connection_string is None:
|
||||
raise EnvironmentError("RPC_URL environment variable is not set")
|
||||
|
||||
method = "eth_getCode"
|
||||
params = [address, "latest"]
|
||||
|
||||
try:
|
||||
code = exec_rpc_method(connection_string, method, params)
|
||||
return bytes.fromhex(code[2:])
|
||||
except RuntimeError as e:
|
||||
print(f"Error fetching code for address {address}: {e}")
|
||||
return None
|
||||
Reference in New Issue
Block a user