Merge pull request #54 from propeller-heads/ah/balancer-misc-updates

feat(balancer): Add update markers & rate providers
This commit is contained in:
Alan Höng
2024-08-02 11:01:02 +02:00
committed by GitHub
45 changed files with 50290 additions and 57300 deletions

View File

@@ -169,12 +169,26 @@ contract BalancerV2SwapAdapter is ISwapAdapter {
returns (uint256[] memory limits)
{
limits = new uint256[](2);
address pool;
(pool,) = vault.getPool(poolId);
uint256 bptIndex = maybeGetBptTokenIndex(pool);
uint256 circulatingSupply = getBptCirculatingSupply(pool);
(address[] memory tokens, uint256[] memory balances,) =
vault.getPoolTokens(poolId);
for (uint256 i = 0; i < tokens.length; i++) {
if (tokens[i] == sellToken) {
limits[0] = balances[i] * RESERVE_LIMIT_FACTOR / 10;
if (i == bptIndex) {
// Some pools pre-mint the bpt tokens and keep the balance
// on the
// pool we can't sell more than the circulating supply
// though,
// else we get an underflow error.
limits[0] = circulatingSupply;
} else {
limits[0] = balances[i] * RESERVE_LIMIT_FACTOR / 10;
}
}
if (tokens[i] == buyToken) {
limits[1] = balances[i] * RESERVE_LIMIT_FACTOR / 10;
@@ -182,6 +196,35 @@ contract BalancerV2SwapAdapter is ISwapAdapter {
}
}
function maybeGetBptTokenIndex(address poolAddress)
internal
view
returns (uint256)
{
IPool pool = IPool(poolAddress);
try pool.getBptIndex() returns (uint256 index) {
return index;
} catch {
return type(uint256).max;
}
}
function getBptCirculatingSupply(address poolAddress)
internal
view
returns (uint256)
{
IPool pool = IPool(poolAddress);
try pool.getActualSupply() returns (uint256 supply) {
return supply;
} catch {}
try pool.getVirtualSupply() returns (uint256 supply) {
return supply;
} catch {}
return type(uint256).max;
}
function getCapabilities(bytes32, address, address)
external
pure
@@ -486,3 +529,11 @@ interface IVault {
GIVEN_OUT
}
}
interface IPool {
function getBptIndex() external view returns (uint256);
function getActualSupply() external view returns (uint256);
function getVirtualSupply() external view returns (uint256);
}

14
substreams/Cargo.lock generated
View File

@@ -857,18 +857,18 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "serde"
version = "1.0.197"
version = "1.0.204"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.197"
version = "1.0.204"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
dependencies = [
"proc-macro2",
"quote",
@@ -877,9 +877,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.114"
version = "1.0.120"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
dependencies = [
"itoa",
"ryu",
@@ -1104,6 +1104,8 @@ dependencies = [
"itertools 0.12.1",
"num-bigint",
"prost 0.11.9",
"serde",
"serde_json",
"substreams",
"substreams-ethereum",
]

View File

@@ -12,6 +12,8 @@ hex-literal = "0.4.1"
hex = "0.4.3"
ethabi = "18.0.0"
tycho-substreams = { path = "crates/tycho-substreams" }
serde = "1.0.204"
serde_json = "1.0.120"
[profile.release]
lto = true

View File

@@ -11,3 +11,5 @@ hex.workspace = true
itertools = "0.12.0"
ethabi.workspace = true
num-bigint = "0.4.4"
serde.workspace = true
serde_json.workspace = true

View File

@@ -0,0 +1,46 @@
use std::fmt::Debug;
use substreams::prelude::BigInt;
/// Encodes a value to bytes using json.
///
/// ## Panics
/// In case the serialisation to json fails.
pub fn json_serialize_value<T: serde::Serialize + Debug>(v: T) -> Vec<u8> {
serde_json::to_value(v)
.unwrap_or_else(|e| panic!("Failed to encode value as json {e}"))
.to_string()
.as_bytes()
.to_vec()
}
/// Encodes a list of addresses (in byte representation) into json.
///
/// Converts each address to a 0x prefixed hex string and then serializes
/// the list of strings as a json.
///
/// ## Panics
/// In case the serialisation to json fails.
pub fn json_serialize_address_list(addresses: &[Vec<u8>]) -> Vec<u8> {
json_serialize_value(
addresses
.iter()
.map(|a| format!("0x{}", hex::encode(a)))
.collect::<Vec<_>>(),
)
}
/// Encodes a list of BigInt values into json.
///
/// Converts each integer to a 0x prefixed hex string and then serializes
/// the list of strings as a json.
///
/// ## Panics
/// In case the serialisation to json fails.
pub fn json_serialize_bigint_list(values: &[BigInt]) -> Vec<u8> {
json_serialize_value(
values
.iter()
.map(|v| format!("0x{}", hex::encode(v.to_signed_bytes_be())))
.collect::<Vec<_>>(),
)
}

View File

@@ -10,67 +10,14 @@
/// more [here](https://streamingfastio.medium.com/new-block-model-to-accelerate-chain-integration-9f65126e5425)
use std::collections::HashMap;
use substreams_ethereum::pb::eth::{
self,
v2::{block::DetailLevel, CallType, StorageChange},
use crate::{
models::{InterimContractChange, TransactionChanges},
prelude::TransactionChangesBuilder,
};
use substreams_ethereum::pb::{
eth,
eth::v2::{block::DetailLevel, CallType, TransactionTrace},
};
use crate::pb::tycho::evm::v1::{self as tycho};
struct SlotValue {
new_value: Vec<u8>,
start_value: Vec<u8>,
}
impl From<&StorageChange> for SlotValue {
fn from(change: &StorageChange) -> Self {
Self { new_value: change.new_value.clone(), start_value: change.old_value.clone() }
}
}
impl SlotValue {
fn has_changed(&self) -> bool {
self.start_value != self.new_value
}
}
// Uses a map for slots, protobuf does not allow bytes in hashmap keys
struct InterimContractChange {
address: Vec<u8>,
balance: Vec<u8>,
code: Vec<u8>,
slots: HashMap<Vec<u8>, SlotValue>,
change: tycho::ChangeType,
}
impl InterimContractChange {
fn new(address: &[u8], creation: bool) -> Self {
Self {
address: address.to_vec(),
balance: vec![],
code: vec![],
slots: Default::default(),
change: if creation { tycho::ChangeType::Creation } else { tycho::ChangeType::Update },
}
}
}
impl From<InterimContractChange> for tycho::ContractChange {
fn from(value: InterimContractChange) -> Self {
tycho::ContractChange {
address: value.address,
balance: value.balance,
code: value.code,
slots: value
.slots
.into_iter()
.filter(|(_, value)| value.has_changed())
.map(|(slot, value)| tycho::ContractSlot { slot, value: value.new_value })
.collect(),
change: value.change.into(),
}
}
}
/// Extracts and aggregates contract changes from a block.
///
@@ -101,7 +48,45 @@ impl From<InterimContractChange> for tycho::ContractChange {
pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
block: &eth::v2::Block,
inclusion_predicate: F,
transaction_changes: &mut HashMap<u64, tycho::TransactionChanges>,
transaction_changes: &mut HashMap<u64, TransactionChanges>,
) {
extract_contract_changes_generic(block, inclusion_predicate, |tx, changed_contracts| {
transaction_changes
.entry(tx.index.into())
.or_insert_with(|| TransactionChanges::new(&(tx.into())))
.contract_changes
.extend(
changed_contracts
.clone()
.into_values()
.map(|change| change.into()),
);
})
}
pub fn extract_contract_changes_builder<F: Fn(&[u8]) -> bool>(
block: &eth::v2::Block,
inclusion_predicate: F,
transaction_changes: &mut HashMap<u64, TransactionChangesBuilder>,
) {
extract_contract_changes_generic(block, inclusion_predicate, |tx, changed_contracts| {
let builder = transaction_changes
.entry(tx.index.into())
.or_insert_with(|| TransactionChangesBuilder::new(&(tx.into())));
changed_contracts
.clone()
.into_iter()
.for_each(|(_, change)| builder.add_contract_changes(&change));
})
}
fn extract_contract_changes_generic<
F: Fn(&[u8]) -> bool,
G: FnMut(&TransactionTrace, &HashMap<Vec<u8>, InterimContractChange>),
>(
block: &eth::v2::Block,
inclusion_predicate: F,
mut store_changes: G,
) {
if block.detail_level != Into::<i32>::into(DetailLevel::DetaillevelExtended) {
panic!("Only extended blocks are supported");
@@ -160,14 +145,7 @@ pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
)
});
let slot_value = contract_change
.slots
.entry(storage_change.key.clone())
.or_insert_with(|| storage_change.into());
slot_value
.new_value
.copy_from_slice(&storage_change.new_value);
contract_change.upsert_slot(storage_change);
});
balance_changes
@@ -184,10 +162,7 @@ pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
});
if let Some(new_balance) = &balance_change.new_value {
contract_change.balance.clear();
contract_change
.balance
.extend_from_slice(&new_balance.bytes);
contract_change.set_balance(&new_balance.bytes);
}
});
@@ -204,25 +179,15 @@ pub fn extract_contract_changes<F: Fn(&[u8]) -> bool>(
)
});
contract_change.code.clear();
contract_change
.code
.extend_from_slice(&code_change.new_code);
contract_change.set_code(&code_change.new_code);
});
if !storage_changes.is_empty() ||
!balance_changes.is_empty() ||
!code_changes.is_empty()
{
transaction_changes
.entry(block_tx.index.into())
.or_insert_with(|| tycho::TransactionChanges::new(&(block_tx.into())))
.contract_changes
.extend(
changed_contracts
.drain()
.map(|(_, change)| change.into()),
);
store_changes(block_tx, &changed_contracts)
}
changed_contracts.clear()
});
}

View File

@@ -1,4 +1,5 @@
mod abi;
pub mod attributes;
pub mod balances;
pub mod contract;
mod mock_store;

View File

@@ -1,4 +1,5 @@
use substreams_ethereum::pb::eth::v2::{self as sf};
use std::collections::HashMap;
use substreams_ethereum::pb::eth::v2::{self as sf, StorageChange};
// re-export the protobuf types here.
pub use crate::pb::tycho::evm::v1::*;
@@ -17,6 +18,161 @@ impl TransactionChanges {
}
}
/// Builds `TransactionChanges` struct
///
/// Ensures uniqueness for contract addresses and component ids.
#[derive(Default)]
pub struct TransactionChangesBuilder {
tx: Option<Transaction>,
contract_changes: HashMap<Vec<u8>, InterimContractChange>,
entity_changes: HashMap<String, InterimEntityChanges>,
component_changes: HashMap<String, ProtocolComponent>,
balance_changes: HashMap<(Vec<u8>, Vec<u8>), BalanceChange>,
}
impl TransactionChangesBuilder {
/// Initialize a new builder for a transaction.
pub fn new(tx: &Transaction) -> Self {
Self { tx: Some(tx.clone()), ..Default::default() }
}
/// Register a new contract change.
///
/// Will prioritize the new change over any already present one.
pub fn add_contract_changes(&mut self, change: &InterimContractChange) {
self.contract_changes
.entry(change.address.clone())
.and_modify(|c| {
if !change.balance.is_empty() {
c.set_balance(&change.balance)
}
if !change.slots.is_empty() {
c.upsert_slots(&change.slots)
}
if !change.code.is_empty() {
c.set_code(&change.code)
}
})
.or_insert_with(|| {
let mut c = InterimContractChange::new(
&change.address,
change.change == ChangeType::Creation,
);
c.upsert_slots(&change.slots);
c.set_code(&change.code);
c.set_balance(&change.balance);
c
});
}
/// Unique contract/account addresses that have been changed so far.
pub fn changed_contracts(&self) -> impl Iterator<Item = &[u8]> {
self.contract_changes
.keys()
.map(|k| k.as_slice())
}
/// Marks a component as updated.
///
/// If the protocol does not follow a 1:1 logic between components and contracts.
/// Components can be manually marked as updated using this method.
pub fn mark_component_as_updated(&mut self, component_id: &str) {
let attr = Attribute {
name: "update_marker".to_string(),
value: vec![1u8],
change: ChangeType::Update.into(),
};
if let Some(entry) = self
.entity_changes
.get_mut(component_id)
{
entry.set_attribute(&attr);
} else {
let mut change = InterimEntityChanges::new(component_id);
change.set_attribute(&attr);
self.entity_changes
.insert(component_id.to_string(), change);
}
}
/// Registers a new entity change.
///
/// Will prioritize the new change over any already present one.
pub fn add_entity_change(&mut self, change: &EntityChanges) {
self.entity_changes
.entry(change.component_id.clone())
.and_modify(|ec| {
for attr in change.attributes.iter() {
ec.set_attribute(attr);
}
})
.or_insert_with(|| InterimEntityChanges {
component_id: change.component_id.clone(),
attributes: change
.attributes
.clone()
.into_iter()
.map(|a| (a.name.clone(), a))
.collect(),
});
}
/// Adds a new protocol component.
///
/// ## Note
/// This method is a noop, in case the component is already present. Since
/// components are assumed to be immutable.
pub fn add_protocol_component(&mut self, component: &ProtocolComponent) {
if !self
.component_changes
.contains_key(&component.id)
{
self.component_changes
.insert(component.id.clone(), component.clone());
}
}
/// Updates a components balances
///
/// Overwrites any previous balance changes of the component if present.
pub fn add_balance_change(&mut self, change: &BalanceChange) {
self.balance_changes
.insert((change.component_id.clone(), change.token.clone()), change.clone());
}
pub fn build(self) -> Option<TransactionChanges> {
if self.contract_changes.is_empty() &&
self.component_changes.is_empty() &&
self.balance_changes.is_empty() &&
self.entity_changes.is_empty()
{
None
} else {
Some(TransactionChanges {
tx: self.tx,
contract_changes: self
.contract_changes
.into_values()
.map(|interim| interim.into())
.collect::<Vec<_>>(),
entity_changes: self
.entity_changes
.into_values()
.map(|interim| interim.into())
.collect::<Vec<_>>(),
component_changes: self
.component_changes
.into_values()
.collect::<Vec<_>>(),
balance_changes: self
.balance_changes
.into_values()
.collect::<Vec<_>>(),
})
}
}
}
impl From<&sf::TransactionTrace> for Transaction {
fn from(tx: &sf::TransactionTrace) -> Self {
Self {
@@ -145,3 +301,128 @@ impl ProtocolComponent {
self
}
}
/// Same as `EntityChanges` but ensures attributes are unique by name.
#[derive(Default)]
pub struct InterimEntityChanges {
component_id: String,
attributes: HashMap<String, Attribute>,
}
impl InterimEntityChanges {
pub fn new(id: &str) -> Self {
Self { component_id: id.to_string(), ..Default::default() }
}
pub fn set_attribute(&mut self, attr: &Attribute) {
self.attributes
.entry(attr.name.clone())
.and_modify(|existing| *existing = attr.clone())
.or_insert(attr.clone());
}
}
impl From<InterimEntityChanges> for EntityChanges {
fn from(value: InterimEntityChanges) -> Self {
EntityChanges {
component_id: value.component_id.clone(),
attributes: value
.attributes
.into_values()
.collect::<Vec<_>>(),
}
}
}
#[derive(Clone)]
struct SlotValue {
new_value: Vec<u8>,
start_value: Vec<u8>,
}
impl SlotValue {
fn has_changed(&self) -> bool {
self.start_value != self.new_value
}
}
impl From<&StorageChange> for SlotValue {
fn from(change: &StorageChange) -> Self {
Self { new_value: change.new_value.clone(), start_value: change.old_value.clone() }
}
}
// Uses a map for slots, protobuf does not allow bytes in hashmap keys
#[derive(Clone)]
pub struct InterimContractChange {
address: Vec<u8>,
balance: Vec<u8>,
code: Vec<u8>,
slots: HashMap<Vec<u8>, SlotValue>,
change: ChangeType,
}
impl InterimContractChange {
pub fn new(address: &[u8], creation: bool) -> Self {
Self {
address: address.to_vec(),
balance: vec![],
code: vec![],
slots: Default::default(),
change: if creation { ChangeType::Creation } else { ChangeType::Update },
}
}
pub fn upsert_slot(&mut self, change: &StorageChange) {
if change.address != self.address {
panic!("Bad storage change");
}
self.slots
.entry(change.key.clone())
.and_modify(|sv| {
sv.new_value
.copy_from_slice(&change.new_value)
})
.or_insert_with(|| change.into());
}
fn upsert_slots(&mut self, changes: &HashMap<Vec<u8>, SlotValue>) {
for (slot, value) in changes.iter() {
self.slots
.entry(slot.clone())
.and_modify(|sv| {
sv.new_value
.copy_from_slice(&value.new_value)
})
.or_insert(value.clone());
}
}
pub fn set_balance(&mut self, new_balance: &[u8]) {
self.balance.clear();
self.balance
.extend_from_slice(new_balance);
}
pub fn set_code(&mut self, code: &[u8]) {
self.code.clear();
self.code.extend_from_slice(code);
}
}
impl From<InterimContractChange> for ContractChange {
fn from(value: InterimContractChange) -> Self {
ContractChange {
address: value.address,
balance: value.balance,
code: value.code,
slots: value
.slots
.into_iter()
.filter(|(_, value)| value.has_changed())
.map(|(slot, value)| ContractSlot { slot, value: value.new_value })
.collect(),
change: value.change.into(),
}
}
}

View File

@@ -0,0 +1,25 @@
# Balancer Substream
## Open tasks
### Missing rate provider state
Any pool that does use rate providers, is currently not supported by tycho since we do
not witness the contract creation of rate providers and thus can't provide the required
contract state.
This is planned to be resolved with the dynamic contract indexing module.
## Static Attributes
| name | type | description |
|--------------------|-------|---------------------------------------------------------------------------------------------------------|
| pool_type | str | A unique identifier per pool type. Set depending on the factory |
| normalized weights | json | The normalised weights of a weighted pool. |
| pool_id | bytes | The balancer pool id. |
| rate_providers | json | A list of rate provider addresses. |
| bpt | bytes | The balancer lp token, set if the pool support entering and exiting lp postions via the swap interface. |
| main_token | bytes | The main token address for a linear pool |
| wrapped_token | bytes | The wrapped token address for a linear pool |
| fee | int | The fee charged by the pool set at deployment time |
| upper_target | int | The upper target for a linear pool |

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ use substreams::{
};
use substreams_ethereum::{pb::eth, Event};
use tycho_substreams::{
balances::aggregate_balances_changes, contract::extract_contract_changes, prelude::*,
balances::aggregate_balances_changes, contract::extract_contract_changes_builder, prelude::*,
};
pub const VAULT_ADDRESS: &[u8] = &hex!("BA12222222228d8Ba445958a75a0704d566BF2C8");
@@ -146,39 +146,43 @@ pub fn map_protocol_changes(
) -> Result<BlockChanges> {
// We merge contract changes by transaction (identified by transaction index) making it easy to
// sort them at the very end.
let mut transaction_changes: HashMap<_, TransactionChanges> = HashMap::new();
let mut transaction_changes: HashMap<_, TransactionChangesBuilder> = HashMap::new();
// `ProtocolComponents` are gathered from `map_pools_created` which just need a bit of work to
// convert into `TransactionChanges`
let default_attributes = vec![
Attribute {
name: "balance_owner".to_string(),
value: VAULT_ADDRESS.to_vec(),
change: ChangeType::Creation.into(),
},
Attribute {
name: "update_marker".to_string(),
value: vec![1u8],
change: ChangeType::Creation.into(),
},
];
grouped_components
.tx_components
.iter()
.for_each(|tx_component| {
// initialise builder if not yet present for this tx
let tx = tx_component.tx.as_ref().unwrap();
transaction_changes
let builder = transaction_changes
.entry(tx.index)
.or_insert_with(|| TransactionChanges::new(tx))
.component_changes
.extend_from_slice(&tx_component.components);
.or_insert_with(|| TransactionChangesBuilder::new(tx));
// iterate over individual components created within this tx
tx_component
.components
.iter()
.for_each(|component| {
transaction_changes
.entry(tx.index)
.or_insert_with(|| TransactionChanges::new(tx))
.entity_changes
.push(EntityChanges {
component_id: component.id.clone(),
attributes: vec![Attribute {
name: "balance_owner".to_string(),
value: "0xBA12222222228d8Ba445958a75a0704d566BF2C8"
.to_string()
.as_bytes()
.to_vec(),
change: ChangeType::Creation.into(),
}],
});
builder.add_protocol_component(component);
let entity_change = EntityChanges {
component_id: component.id.clone(),
attributes: default_attributes.clone(),
};
builder.add_entity_change(&entity_change)
});
});
@@ -189,15 +193,16 @@ pub fn map_protocol_changes(
aggregate_balances_changes(balance_store, deltas)
.into_iter()
.for_each(|(_, (tx, balances))| {
transaction_changes
let builder = transaction_changes
.entry(tx.index)
.or_insert_with(|| TransactionChanges::new(&tx))
.balance_changes
.extend(balances.into_values());
.or_insert_with(|| TransactionChangesBuilder::new(&tx));
balances
.values()
.for_each(|bc| builder.add_balance_change(bc));
});
// Extract and insert any storage changes that happened for any of the components.
extract_contract_changes(
extract_contract_changes_builder(
&block,
|addr| {
components_store
@@ -208,6 +213,24 @@ pub fn map_protocol_changes(
&mut transaction_changes,
);
transaction_changes
.iter_mut()
.for_each(|(_, change)| {
// this indirection is necessary due to borrowing rules.
let addresses = change
.changed_contracts()
.map(|e| e.to_vec())
.collect::<Vec<_>>();
addresses
.into_iter()
.for_each(|address| {
if address != VAULT_ADDRESS {
// We reconstruct the component_id from the address here
change.mark_component_as_updated(&format!("0x{}", hex::encode(address)))
}
})
});
// Process all `transaction_changes` for final output in the `BlockChanges`,
// sorted by transaction index (the key).
Ok(BlockChanges {
@@ -215,17 +238,7 @@ pub fn map_protocol_changes(
changes: transaction_changes
.drain()
.sorted_unstable_by_key(|(index, _)| *index)
.filter_map(|(_, change)| {
if change.contract_changes.is_empty() &&
change.component_changes.is_empty() &&
change.balance_changes.is_empty() &&
change.entity_changes.is_empty()
{
None
} else {
Some(change)
}
})
.filter_map(|(_, builder)| builder.build())
.collect::<Vec<_>>(),
})
}

View File

@@ -1,33 +1,13 @@
use crate::{abi, modules::VAULT_ADDRESS};
use substreams::{hex, scalar::BigInt};
use substreams::hex;
use substreams_ethereum::{
pb::eth::v2::{Call, Log, TransactionTrace},
Event, Function,
};
use tycho_substreams::prelude::*;
/// This trait defines some helpers for serializing and deserializing `Vec<BigInt` which is needed
/// to be able to encode the `normalized_weights` and `weights` `Attribute`s. This should also be
/// handled by any downstream application.
trait SerializableVecBigInt {
fn serialize_bytes(&self) -> Vec<u8>;
#[allow(dead_code)]
fn deserialize_bytes(bytes: &[u8]) -> Vec<BigInt>;
}
impl SerializableVecBigInt for Vec<BigInt> {
fn serialize_bytes(&self) -> Vec<u8> {
self.iter()
.flat_map(|big_int| big_int.to_signed_bytes_be())
.collect()
}
fn deserialize_bytes(bytes: &[u8]) -> Vec<BigInt> {
bytes
.chunks_exact(32)
.map(BigInt::from_signed_bytes_be)
.collect::<Vec<BigInt>>()
}
}
use tycho_substreams::{
attributes::{json_serialize_address_list, json_serialize_bigint_list},
prelude::*,
};
/// Helper function to get pool_registered event
fn get_pool_registered(
@@ -42,6 +22,18 @@ fn get_pool_registered(
.clone()
}
fn get_token_registered(
tx: &TransactionTrace,
pool_id: &[u8],
) -> abi::vault::events::TokensRegistered {
tx.logs_with_calls()
.filter(|(log, _)| log.address == VAULT_ADDRESS)
.filter_map(|(log, _)| abi::vault::events::TokensRegistered::match_and_decode(log))
.find(|ev| ev.pool_id == pool_id)
.unwrap()
.clone()
}
/// This is the main function that handles the creation of `ProtocolComponent`s with `Attribute`s
/// based on the specific factory address. There's 3 factory groups that are represented here:
/// - Weighted Pool Factories
@@ -76,14 +68,20 @@ pub fn address_map(
("pool_type", "WeightedPoolFactory".as_bytes()),
(
"normalized_weights",
&create_call
.normalized_weights
.serialize_bytes(),
&json_serialize_bigint_list(&create_call.normalized_weights),
),
("pool_id", &pool_registered.pool_id),
(
"rate_providers",
&json_serialize_address_list(&create_call.rate_providers),
),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
("manual_updates", &[1u8]),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
)
@@ -94,17 +92,27 @@ pub fn address_map(
let pool_created =
abi::composable_stable_pool_factory::events::PoolCreated::match_and_decode(log)?;
let pool_registered = get_pool_registered(tx, &pool_created.pool);
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
Some(
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
.with_tokens(&create_call.tokens)
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
.with_tokens(&tokens_registered.tokens)
.with_attributes(&[
("pool_type", "ComposableStablePoolFactory".as_bytes()),
("pool_id", &pool_registered.pool_id),
("bpt", &pool_created.pool),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
(
"rate_providers",
&json_serialize_address_list(&create_call.rate_providers),
),
("manual_updates", &[1u8]),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
)
@@ -115,11 +123,12 @@ pub fn address_map(
let pool_created =
abi::erc_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
let pool_registered = get_pool_registered(tx, &pool_created.pool);
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
Some(
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
.with_tokens(&tokens_registered.tokens)
.with_attributes(&[
("pool_type", "ERC4626LinearPoolFactory".as_bytes()),
(
@@ -128,9 +137,16 @@ pub fn address_map(
.upper_target
.to_signed_bytes_be(),
),
("pool_id", &pool_registered.pool_id),
("manual_updates", &[1u8]),
("bpt", &pool_created.pool),
("main_token", &create_call.main_token),
("wrapped_token", &create_call.wrapped_token),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
@@ -142,11 +158,12 @@ pub fn address_map(
let pool_created =
abi::euler_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
let pool_registered = get_pool_registered(tx, &pool_created.pool);
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
Some(
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
.with_tokens(&tokens_registered.tokens)
.with_attributes(&[
("pool_type", "EulerLinearPoolFactory".as_bytes()),
(
@@ -155,9 +172,16 @@ pub fn address_map(
.upper_target
.to_signed_bytes_be(),
),
("pool_id", &pool_registered.pool_id),
("manual_updates", &[1u8]),
("bpt", &pool_created.pool),
("main_token", &create_call.main_token),
("wrapped_token", &create_call.wrapped_token),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
@@ -217,11 +241,12 @@ pub fn address_map(
let pool_created =
abi::silo_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
let pool_registered = get_pool_registered(tx, &pool_created.pool);
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
Some(
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
.with_tokens(&tokens_registered.tokens)
.with_attributes(&[
("pool_type", "SiloLinearPoolFactory".as_bytes()),
(
@@ -230,9 +255,16 @@ pub fn address_map(
.upper_target
.to_signed_bytes_be(),
),
("pool_id", &pool_registered.pool_id),
("manual_updates", &[1u8]),
("bpt", &pool_created.pool),
("main_token", &create_call.main_token),
("wrapped_token", &create_call.wrapped_token),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
@@ -244,11 +276,12 @@ pub fn address_map(
let pool_created =
abi::yearn_linear_pool_factory::events::PoolCreated::match_and_decode(log)?;
let pool_registered = get_pool_registered(tx, &pool_created.pool);
let tokens_registered = get_token_registered(tx, &pool_registered.pool_id);
Some(
ProtocolComponent::at_contract(&pool_created.pool, &(tx.into()))
.with_contracts(&[pool_created.pool, VAULT_ADDRESS.to_vec()])
.with_tokens(&[create_call.main_token, create_call.wrapped_token])
.with_contracts(&[pool_created.pool.clone(), VAULT_ADDRESS.to_vec()])
.with_tokens(&tokens_registered.tokens)
.with_attributes(&[
("pool_type", "YearnLinearPoolFactory".as_bytes()),
(
@@ -257,9 +290,16 @@ pub fn address_map(
.upper_target
.to_signed_bytes_be(),
),
("pool_id", &pool_registered.pool_id),
("manual_updates", &[1u8]),
("bpt", &pool_created.pool),
("main_token", &create_call.main_token),
("wrapped_token", &create_call.wrapped_token),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
@@ -280,11 +320,15 @@ pub fn address_map(
.with_tokens(&create_call.tokens)
.with_attributes(&[
("pool_type", "WeightedPool2TokensFactory".as_bytes()),
("weights", &create_call.weights.serialize_bytes()),
("weights", &json_serialize_bigint_list(&create_call.weights)),
("pool_id", &pool_registered.pool_id),
(
"pool_id",
format!("0x{}", hex::encode(pool_registered.pool_id)).as_bytes(),
"fee",
&create_call
.swap_fee_percentage
.to_signed_bytes_be(),
),
("manual_updates", &[1u8]),
])
.as_swap_type("balancer_pool", ImplementationType::Vm),
)

View File

@@ -1,121 +1,125 @@
substreams_yaml_path: ./substreams.yaml
protocol_type_names:
- "balancer_pool"
adapter_contract: "BalancerV2SwapAdapter.evm.runtime"
adapter_contract: "BalancerV2SwapAdapter"
skip_balance_check: true
initialized_accounts:
- "0xba12222222228d8ba445958a75a0704d566bf2c8"
# Uncomment entries below to include composable stable pool dependencies
# wstETH dependencies
# - "0x72D07D7DcA67b8A406aD1Ec34ce969c90bFEE768"
# - "0xb8ffc3cd6e7cf5a098a1c92f48009765b24088dc"
# - "0xae7ab96520de3a18e5e111b5eaab095312d7fe84"
# - "0x7f39c581f595b53c5cb19bd0b3f8da6c935e2ca0"
# - "0x2b33cf282f867a7ff693a66e11b0fcc5552e4425"
# - "0x17144556fd3424edc8fc8a4c940b2d04936d17eb"
# sfrxETH dependencies
# - "0x302013E7936a39c358d07A3Df55dc94EC417E3a1"
# - "0xac3e018457b222d93114458476f3e3416abbe38f"
# rETH dependencies
# - "0x1a8F81c256aee9C640e14bB0453ce247ea0DFE6F"
# - "0x07fcabcbe4ff0d80c2b1eb42855c0131b6cba2f4"
# - "0x1d8f8f00cfa6758d7be78336684788fb0ee0fa46"
# - "0xae78736cd615f374d3085123a210448e74fc6393"
tests:
# WeightedPoolFactory - 0x897888115Ada5773E02aA29F775430BFB5F34c51
# WeightedPoolFactory - 0x897888115Ada5773E02aA29F775430BFB5F34c51
- name: test_weighted_pool_creation
start_block: 16878320
stop_block: 16971020
start_block: 20128706
stop_block: 20128806
expected_state:
protocol_components:
- id: "0x8055b8C947De30130BC1Ec750C8F345a50006B23"
- id: "0xe96a45f66bdDA121B24F0a861372A72E8889523d"
tokens:
- "0xba100000625a3754423978a60c9317c58a424e3D"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0x3ae08d6ff86737a64827855af810f7ee9ee208ff8e6d8c916495d09a83282c8a"
- id: "0xDac7eF49161bdBf0e8f0B4c8e2D38DF19D972874"
tokens:
- "0x9A62fB1CAFEa99f8f0441f80af7F7ccf0d46847D"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0xbed0c745c3761cd54a7489181a5f1165c628c35ef69ecb0bfeec08f09f0ec407"
# ComposableStablePoolFactory - 0xDB8d758BCb971e482B2C45f7F8a7740283A1bd3A
- name: test_composable_stable_pool_creation
start_block: 17672478
stop_block: 17677310
- "0x38C2a4a7330b22788374B8Ff70BBa513C8D848cA"
- "0x514910771AF9Ca656af840dff83E8264EcF986CA"
static_attributes: null
creation_tx: "0xa63c671046ad2075ec8ea83ac21199cf3e3a5f433e72ec4c117cbabfb9b18de2"
# WeightedPool2TokensFactory - 0xA5bf2ddF098bb0Ef6d120C98217dD6B141c74EE0
- name: weighted_legacy_creation
start_block: 13148365
stop_block: 13148465
expected_state:
protocol_components:
- id: "0x20356663C17D31549d1210379749E2aE36722D8f"
tokens:
- "0xba100000625a3754423978a60c9317c58a424e3D"
protocol_components:
- id: "0xBF96189Eee9357a95C7719f4F5047F76bdE804E5"
tokens:
- "0x5A98FcBEA516Cf06857215779Fd812CA3beF1B32"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0x822653ae905ab40f51f46c7b8185ba9a4aa06e674789f87dd26d0d11b26dc7c9"
- id: "0x42ED016F826165C2e5976fe5bC3df540C5aD0Af7"
tokens:
static_attributes: null
creation_tx: "0xdced662e41b1608c386551bbc89894a10321fd8bd58782e22077d1044cf99cb5"
# ComposableStablePoolFactory - 0xDB8d758BCb971e482B2C45f7F8a7740283A1bd3A
- name: test_composable_stable_pool_creation
start_block: 17677300
stop_block: 17678400
expected_state:
protocol_components:
- id: "0x42ED016F826165C2e5976fe5bC3df540C5aD0Af7"
tokens:
- "0x42ed016f826165c2e5976fe5bc3df540c5ad0af7"
- "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0"
- "0xac3E018457B222d93114458476f3E3416Abbe38F"
- "0xae78736Cd615f374D3085123A210448E74Fc6393"
static_attributes:
creation_tx: "0x53ff6bab0d8a76a998e29e59da8068ad906ae85507a1c2fbf2505e2cb52fd754"
# ERC4626LinearPoolFactory - 0x813EE7a840CE909E7Fea2117A44a90b8063bd4fd
static_attributes: null
skip_simulation: true
creation_tx: "0x53ff6bab0d8a76a998e29e59da8068ad906ae85507a1c2fbf2505e2cb52fd754"
# ERC4626LinearPoolFactory - 0x813EE7a840CE909E7Fea2117A44a90b8063bd4fd
- name: test_erc4626_linear_pool_creation
start_block: 17045881
stop_block: 17480143
start_block: 17480142
stop_block: 17480242
expected_state:
protocol_components:
- id: "0x9516a2d25958EdB8da246a320f2c7d94A0DBe25d"
tokens:
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
- "0xB99eDc4b289B0F2284fCF3f66884191BdCe29624"
static_attributes:
creation_tx: "0x756e81cea4cf725c738bcef3852ad57687156b561574cad3e2956e6cb48da5e6"
- id: "0x3fCb7085B8F2F473F80bF6D879cAe99eA4DE9344"
tokens:
- "0x39Dd7790e75C6F663731f7E1FdC0f35007D3879b"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0x5ff97870685370bab3876a4335d28c42e24659064fe78b486d6fb1b37b992877"
# EulerLinearPoolFactory - 0x5F43FBa61f63Fa6bFF101a0A0458cEA917f6B347
protocol_components:
- id: "0x3fCb7085B8F2F473F80bF6D879cAe99eA4DE9344"
tokens:
- "0x39Dd7790e75C6F663731f7E1FdC0f35007D3879b"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
- "0x3fcb7085b8f2f473f80bf6d879cae99ea4de9344"
static_attributes: null
skip_simulation: true
creation_tx: "0x5ff97870685370bab3876a4335d28c42e24659064fe78b486d6fb1b37b992877"
# EulerLinearPoolFactory - 0x5F43FBa61f63Fa6bFF101a0A0458cEA917f6B347
- name: test_euler_linear_pool_creation
start_block: 16588078
stop_block: 16588118
start_block: 16588117
stop_block: 16588217
expected_state:
protocol_components:
- id: "0xDEC02e6642e2c999aF429F5cE944653CAd15e093"
tokens:
- "0xC101dcA301a4011C1F925e9622e749e550a1B667"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0xd639c8a6c3a553d47fd7f3d384ec4bc50a2cd6dfb2c3135b7f5db49d73c15df2"
- id: "0xD4e7C1F3DA1144c9E2CfD1b015eDA7652b4a4399"
tokens:
- "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"
- "0xEb91861f8A4e1C12333F42DCE8fB0Ecdc28dA716"
static_attributes:
creation_tx: "0x4a9ea683052afefdae3d189862868c3a7dc8f431d1d9828b6bfd9451a8816426"
# SiloLinearPoolFactory - 0x4E11AEec21baF1660b1a46472963cB3DA7811C89
protocol_components:
- id: "0xD4e7C1F3DA1144c9E2CfD1b015eDA7652b4a4399"
tokens:
- "0xD4e7C1F3DA1144c9E2CfD1b015eDA7652b4a4399"
- "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"
- "0xEb91861f8A4e1C12333F42DCE8fB0Ecdc28dA716"
static_attributes: null
skip_simulation: true
creation_tx: "0x4a9ea683052afefdae3d189862868c3a7dc8f431d1d9828b6bfd9451a8816426"
# SiloLinearPoolFactory - 0x4E11AEec21baF1660b1a46472963cB3DA7811C89
- name: test_silo_linear_pool_creation
start_block: 17173185
stop_block: 17173187
expected_state:
protocol_components:
- id: "0x74CBfAF94A3577c539a9dCEE9870A6349a33b34f"
tokens:
- "0x192E67544694a7bAA2DeA94f9B1Df58BB3395A12"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0xd639c8a6c3a553d47fd7f3d384ec4bc50a2cd6dfb2c3135b7f5db49d73c15df2"
# YearnLinearPoolFactory - 0x5F5222Ffa40F2AEd6380D022184D6ea67C776eE0a
protocol_components:
- id: "0x74CBfAF94A3577c539a9dCEE9870A6349a33b34f"
tokens:
- "0x192E67544694a7bAA2DeA94f9B1Df58BB3395A12"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
- "0x74cbfaf94a3577c539a9dcee9870a6349a33b34f"
static_attributes: null
skip_simulation: true
creation_tx: "0x215c9f4256ab450368132f4063611ae8cdd98e80bea7e44ecf0600ed1d757018"
# YearnLinearPoolFactory - 0x5F5222Ffa40F2AEd6380D022184D6ea67C776eE0a
- name: test_yearn_linear_pool_creation
start_block: 17052601
stop_block: 17052605
expected_state:
protocol_components:
- id: "0x5F5222Ffa40F2AEd6380D022184D6ea67C776eE0"
tokens:
- "0x806E02Dea8d4a0882caD9fA3Fa75B212328692dE"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0x497aa03ce84d236c183204ddfc6762c8e4158da1ebc5e7e18e7f6cceaa497a2a"
# WeightedPool2TokensFactory - 0xA5bf2ddF098bb0Ef6d120C98217dD6B141c74EE0
- name: test_yearn_linear_pool_creation
start_block: 12349890
stop_block: 12363660
expected_state:
protocol_components:
- id: "0x021c343C6180f03cE9E48FaE3ff432309b9aF199"
tokens:
- "0xD291E7a03283640FDc51b121aC401383A46cC623"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0xc6b9f8ee6c6f17edacc6df1b6287c57157a16f2aa5b315a03cf2c42d7ebb74e3"
- id: "0x571046EaE58C783f29f95ADBa17Dd561Af8a8712"
tokens:
- "0x6B175474E89094C44Da98b954EedeAC495271d0F"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
static_attributes:
creation_tx: "0xe60d97670f902d58e65fb2d56f77a0cee19ffad47deae1fb8a126d534ee1ece5"
protocol_components:
- id: "0xac5b4ef7ede2f2843a704e96dcaa637f4ba3dc3f"
tokens:
- "0x806E02Dea8d4a0882caD9fA3Fa75B212328692dE"
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
- "0xac5b4ef7ede2f2843a704e96dcaa637f4ba3dc3f"
static_attributes: null
skip_simulation: true
creation_tx: "0x497aa03ce84d236c183204ddfc6762c8e4158da1ebc5e7e18e7f6cceaa497a2a"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -929,8 +929,8 @@ pub mod events {
.topics
.get(0)
.expect("bounds already checked")
.as_ref() ==
Self::TOPIC_ID;
.as_ref()
== Self::TOPIC_ID;
}
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
let mut values =
@@ -1009,8 +1009,8 @@ pub mod events {
.topics
.get(0)
.expect("bounds already checked")
.as_ref() ==
Self::TOPIC_ID;
.as_ref()
== Self::TOPIC_ID;
}
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
let mut values =

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
#![allow(clippy::all)]
pub mod crypto_pool_factory;
pub mod stableswap_factory;
pub mod crypto_swap_ng_factory;
pub mod meta_registry;
pub mod tricrypto_factory;
pub mod twocrypto_factory;
pub mod erc20;
pub mod meta_pool_factory;
pub mod meta_registry;
pub mod stableswap_factory;
pub mod tricrypto_factory;
pub mod twocrypto_factory;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -8,6 +8,8 @@ tests:
- name: test_pool_creation
start_block: 123
stop_block: 456
initialized_accounts:
- "0x0c0e5f2fF0ff18a3be9b835635039256dC4B4963" # Needed for ....
expected_state:
protocol_components:
- id: "0xbebc44782c7db0a1a60cb6fe97d0b483032ff1c7"
@@ -17,6 +19,7 @@ tests:
- "0x6b175474e89094c44da98b954eedeac495271d0f"
static_attributes:
creation_tx: "0x20793bbf260912aae189d5d261ff003c9b9166da8191d8f9d63ff1c7722f3ac6"
skip_simulation: false
- name: test_something_else
start_block: 123
stop_block: 456
@@ -28,3 +31,4 @@ tests:
- "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84"
static_attributes:
creation_tx: "0xfac67ecbd423a5b915deff06045ec9343568edaec34ae95c43d35f2c018afdaa"
skip_simulation: true # If true, always add a reason

View File

@@ -2,6 +2,7 @@ version: '3.1'
services:
db:
build:
context: .
dockerfile: postgres.Dockerfile
restart: "always"
environment:
@@ -11,8 +12,7 @@ services:
POSTGRESQL_SHARED_PRELOAD_LIBRARIES: pg_cron
ports:
- "5431:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
shm_size: '1gb'
app:
build:
context: .
@@ -22,7 +22,7 @@ services:
- ../substreams:/app/substreams
- ../proto:/app/proto
- ./tycho-indexer:/app/testing/tycho-indexer
- ./runner.py:/app/testing/runner.py
- ./src/runner/runner.py:/app/testing/src.py
ports:
- "80:80"
depends_on:

View File

@@ -2,4 +2,4 @@ psycopg2==2.9.9
PyYAML==6.0.1
Requests==2.32.2
web3==5.31.3
./tycho-client
-e ./tycho-client

View File

@@ -7,20 +7,28 @@ def main() -> None:
description="Run indexer within a specified range of blocks"
)
parser.add_argument(
"--test_yaml_path", type=str, help="Path to the test configuration YAML file."
"--package", type=str, help="Name of the package to test."
)
parser.add_argument(
"--with_binary_logs",
"--tycho-logs",
action="store_true",
help="Flag to activate logs from Tycho.",
)
parser.add_argument(
"--db_url", type=str, help="Postgres database URL for the Tycho indexer."
"--db-url", type=str, help="Postgres database URL for the Tycho indexer."
)
parser.add_argument(
"--vm-traces",
action="store_true",
help="Enable tracing during vm simulations.",
)
args = parser.parse_args()
test_runner = TestRunner(
args.test_yaml_path, args.with_binary_logs, db_url=args.db_url
args.package,
args.tycho_logs,
db_url=args.db_url,
vm_traces=args.vm_traces,
)
test_runner.run_tests()

View File

@@ -46,21 +46,26 @@ class SimulationFailure(BaseModel):
class TestRunner:
def __init__(self, config_path: str, with_binary_logs: bool, db_url: str):
def __init__(self, package: str, with_binary_logs: bool, db_url: str, vm_traces: bool):
self.repo_root = os.getcwd()
config_path = os.path.join(self.repo_root, "substreams", package, "test_assets.yaml")
self.config = load_config(config_path)
self.base_dir = os.path.dirname(config_path)
self.tycho_runner = TychoRunner(with_binary_logs)
self.spkg_src = os.path.join(self.repo_root, "substreams", package)
self.adapters_src = os.path.join(self.repo_root, "evm")
self.tycho_runner = TychoRunner(db_url, with_binary_logs, self.config["initialized_accounts"])
self.tycho_rpc_client = TychoRPCClient()
self.db_url = db_url
self._vm_traces = vm_traces
self._chain = Blockchain.ethereum
def run_tests(self) -> None:
"""Run all tests specified in the configuration."""
print(f"Running tests ...")
for test in self.config["tests"]:
self.tycho_runner.empty_database(self.db_url)
spkg_path = self.build_spkg(
os.path.join(self.base_dir, self.config["substreams_yaml_path"]),
os.path.join(self.spkg_src, self.config["substreams_yaml_path"]),
lambda data: self.update_initial_block(data, test["start_block"]),
)
self.tycho_runner.run_tycho(
@@ -68,6 +73,7 @@ class TestRunner:
test["start_block"],
test["stop_block"],
self.config["protocol_type_names"],
test.get("initialized_accounts", []),
)
result = self.tycho_runner.run_with_rpc_server(
@@ -80,8 +86,6 @@ class TestRunner:
else:
print(f"❗️ {test['name']} failed: {result.message}")
self.tycho_runner.empty_database(self.db_url)
def validate_state(self, expected_state: dict, stop_block: int) -> TestResult:
"""Validate the current protocol state against the expected state."""
protocol_components = self.tycho_rpc_client.get_protocol_components()
@@ -101,13 +105,15 @@ class TestRunner:
component = components[comp_id]
for key, value in expected_component.items():
if key not in ["tokens", "static_attributes", "creation_tx"]:
continue
if key not in component:
return TestResult.Failed(
f"Missing '{key}' in component '{comp_id}'."
)
if isinstance(value, list):
if set(map(str.lower, value)) != set(
map(str.lower, component[key])
map(str.lower, component[key])
):
return TestResult.Failed(
f"List mismatch for key '{key}': {value} != {component[key]}"
@@ -145,11 +151,14 @@ class TestRunner:
f"from rpc call and {tycho_balance} from Substreams"
)
contract_states = self.tycho_rpc_client.get_contract_state()
filtered_components = {'protocol_components': [pc for pc in protocol_components["protocol_components"] if
pc["id"] in [c["id"].lower() for c in
expected_state["protocol_components"] if
c.get("skip_simulation", False) is False]]}
simulation_failures = self.simulate_get_amount_out(
token_balances,
stop_block,
protocol_states,
protocol_components,
filtered_components,
contract_states,
)
if len(simulation_failures):
@@ -169,12 +178,11 @@ class TestRunner:
return TestResult.Failed(error_message)
def simulate_get_amount_out(
self,
token_balances: dict[str, dict[str, int]],
block_number: int,
protocol_states: dict,
protocol_components: dict,
contract_state: dict,
self,
block_number: int,
protocol_states: dict,
protocol_components: dict,
contract_state: dict,
) -> dict[str, list[SimulationFailure]]:
protocol_type_names = self.config["protocol_type_names"]
@@ -188,9 +196,10 @@ class TestRunner:
failed_simulations: dict[str, list[SimulationFailure]] = dict()
for protocol in protocol_type_names:
adapter_contract = os.path.join(
self.base_dir, "evm", self.config["adapter_contract"]
self.adapters_src, "out", f"{self.config['adapter_contract']}.sol",
f"{self.config['adapter_contract']}.evm.runtime"
)
decoder = ThirdPartyPoolTychoDecoder(adapter_contract, 0, False)
decoder = ThirdPartyPoolTychoDecoder(adapter_contract, 0, trace=self._vm_traces)
stream_adapter = TychoPoolStateStreamAdapter(
tycho_url="0.0.0.0:4242",
protocol=protocol,
@@ -204,21 +213,17 @@ class TestRunner:
for pool_state in decoded.pool_states.values():
pool_id = pool_state.id_
protocol_balances = token_balances.get(pool_id)
if not protocol_balances:
if not pool_state.balances:
raise ValueError(f"Missing balances for pool {pool_id}")
for sell_token, buy_token in itertools.permutations(
pool_state.tokens, 2
pool_state.tokens, 2
):
# Try to sell 0.1% of the protocol balance
sell_amount = Decimal("0.001") * pool_state.balances[sell_token.address]
try:
# Try to sell 0.1% of the protocol balance
sell_amount = Decimal("0.001") * sell_token.from_onchain_amount(
protocol_balances[sell_token.address]
)
amount_out, gas_used, _ = pool_state.get_amount_out(
sell_token, sell_amount, buy_token
)
# TODO: Should we validate this with an archive node or RPC reader?
print(
f"Amount out for {pool_id}: {sell_amount} {sell_token} -> {amount_out} {buy_token} - "
f"Gas used: {gas_used}"
@@ -233,8 +238,8 @@ class TestRunner:
failed_simulations[pool_id].append(
SimulationFailure(
pool_id=pool_id,
sell_token=sell_token,
buy_token=buy_token,
sell_token=str(sell_token),
buy_token=str(buy_token),
error=str(e),
)
)

View File

@@ -1,29 +1,42 @@
import os
import platform
import signal
import subprocess
import sys
import threading
import time
from pathlib import Path
import psycopg2
import requests
from psycopg2 import sql
def get_binary_path():
path = Path(__file__).parent
if sys.platform.startswith("darwin") and platform.machine() == "arm64":
return Path(__file__).parent / "tycho-indexer-mac-arm64"
elif sys.platform.startswith("linux") and platform.machine() == "x86_64":
return Path(__file__).parent / "tycho-indexer-linux-x64"
else:
raise RuntimeError("Unsupported platform or architecture")
import os
binary_path = get_binary_path()
def find_binary_file(file_name):
# Define usual locations for binary files in Unix-based systems
locations = [
"/bin",
"/sbin",
"/usr/bin",
"/usr/sbin",
"/usr/local/bin",
"/usr/local/sbin",
]
# Add user's local bin directory if it exists
home = os.path.expanduser("~")
if os.path.exists(home + "/.local/bin"):
locations.append(home + "/.local/bin")
# Check each location
for location in locations:
potential_path = location + "/" + file_name
if os.path.exists(potential_path):
return potential_path
# If binary is not found in the usual locations, return None
raise RuntimeError("Unable to locate tycho-indexer binary")
binary_path = find_binary_file("tycho-indexer")
class TychoRPCClient:
@@ -50,7 +63,7 @@ class TychoRPCClient:
def get_contract_state(self) -> dict:
"""Retrieve contract state from the RPC server."""
url = self.rpc_url + "/v1/ethereum/contract_state"
url = self.rpc_url + "/v1/ethereum/contract_state?include_balances=false"
headers = {"accept": "application/json", "Content-Type": "application/json"}
data = {}
@@ -59,25 +72,32 @@ class TychoRPCClient:
class TychoRunner:
def __init__(self, with_binary_logs: bool = False):
def __init__(self, db_url: str, with_binary_logs: bool = False, initialized_accounts: list[str] = None):
self.with_binary_logs = with_binary_logs
self._db_url = db_url
self._initialized_accounts = initialized_accounts or []
def run_tycho(
self,
spkg_path: str,
start_block: int,
end_block: int,
protocol_type_names: list,
self,
spkg_path: str,
start_block: int,
end_block: int,
protocol_type_names: list,
initialized_accounts: list,
) -> None:
"""Run the Tycho indexer with the specified SPKG and block range."""
env = os.environ.copy()
env["RUST_LOG"] = "info"
env["RUST_LOG"] = "tycho_indexer=info"
all_accounts = self._initialized_accounts + initialized_accounts
try:
process = subprocess.Popen(
[
binary_path,
"--database-url",
self._db_url,
"run",
"--spkg",
spkg_path,
@@ -88,14 +108,15 @@ class TychoRunner:
"--start-block",
str(start_block),
"--stop-block",
str(end_block + 2),
], # +2 is to make up for the cache in the index side.
# +2 is to make up for the cache in the index side.
str(end_block + 2)
] + (["--initialized-accounts", ",".join(all_accounts)] if all_accounts else []),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1,
env=env,
)
)
with process.stdout:
for line in iter(process.stdout.readline, ""):
@@ -128,7 +149,12 @@ class TychoRunner:
env["RUST_LOG"] = "info"
process = subprocess.Popen(
[binary_path, "rpc"],
[
binary_path,
"--database-url",
self._db_url,
"rpc"
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
@@ -178,7 +204,7 @@ class TychoRunner:
def empty_database(db_url: str) -> None:
"""Drop and recreate the Tycho indexer database."""
try:
conn = psycopg2.connect(db_url)
conn = psycopg2.connect(db_url[:db_url.rfind('/')])
conn.autocommit = True
cursor = conn.cursor()

View File

@@ -1,7 +1,4 @@
from setuptools import setup, find_packages
import sys
import platform
from pathlib import Path
def read_requirements():
@@ -11,25 +8,6 @@ def read_requirements():
return [req for req in requirements if req and not req.startswith("#")]
# Determine the correct wheel file based on the platform and Python version
def get_wheel_file():
path = Path(__file__).parent
if sys.platform.startswith("darwin") and platform.machine() == "arm64":
return str(
path / "wheels" / f"protosim_py-0.4.9-cp39-cp39-macosx_11_0_arm64.whl"
)
elif sys.platform.startswith("linux") and platform.machine() == "x86_64":
return str(
path
/ "wheels"
/ f"protosim_py-0.4.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
)
else:
raise RuntimeError("Unsupported platform or architecture")
wheel_file = get_wheel_file()
setup(
name="tycho-client",
version="0.1.0",
@@ -51,7 +29,7 @@ setup(
"eth-utils==1.9.5",
"hexbytes==0.3.1",
"pydantic==2.8.2",
f"protosim_py @ file://{wheel_file}",
"protosim_py==0.4.11",
],
package_data={"tycho-client": ["../wheels/*", "./assets/*", "./bins/*"]},
include_package_data=True,

View File

@@ -1,11 +1,18 @@
import time
from decimal import Decimal
from logging import getLogger
from typing import Any
import eth_abi
from eth_utils import keccak
from protosim_py import SimulationEngine, SimulationParameters, AccountInfo
from .constants import EXTERNAL_ACCOUNT
from .exceptions import TychoDecodeError
from .models import EVMBlock, EthereumToken
from .pool_state import ThirdPartyPool
from .utils import decode_tycho_exchange
from .tycho_db import TychoDBSingleton
from .utils import decode_tycho_exchange, get_code_for_address
log = getLogger(__name__)
@@ -13,16 +20,16 @@ log = getLogger(__name__)
class ThirdPartyPoolTychoDecoder:
"""ThirdPartyPool decoder for protocol messages from the Tycho feed"""
def __init__(self, adapter_contract: str, minimum_gas: int, hard_limit: bool):
def __init__(self, adapter_contract: str, minimum_gas: int, trace: bool):
self.adapter_contract = adapter_contract
self.minimum_gas = minimum_gas
self.hard_limit = hard_limit
self.trace = trace
def decode_snapshot(
self,
snapshot: dict[str, Any],
block: EVMBlock,
tokens: dict[str, EthereumToken],
self,
snapshot: dict[str, Any],
block: EVMBlock,
tokens: dict[str, EthereumToken],
) -> tuple[dict[str, ThirdPartyPool], list[str]]:
pools = {}
failed_pools = []
@@ -38,7 +45,7 @@ class ThirdPartyPoolTychoDecoder:
return pools, failed_pools
def decode_pool_state(
self, snap: dict, block: EVMBlock, tokens: dict[str, EthereumToken]
self, snap: dict, block: EVMBlock, tokens: dict[str, EthereumToken]
) -> ThirdPartyPool:
component = snap["component"]
exchange, _ = decode_tycho_exchange(component["protocol_system"])
@@ -49,7 +56,7 @@ class ThirdPartyPoolTychoDecoder:
raise TychoDecodeError("Unsupported token", pool_id=component["id"])
balances = self.decode_balances(snap, tokens)
optional_attributes = self.decode_optional_attributes(component, snap)
optional_attributes = self.decode_optional_attributes(component, snap, block.id)
return ThirdPartyPool(
id_=optional_attributes.pop("pool_id", component["id"]),
@@ -61,41 +68,68 @@ class ThirdPartyPoolTychoDecoder:
exchange=exchange,
adapter_contract_name=self.adapter_contract,
minimum_gas=self.minimum_gas,
hard_sell_limit=self.hard_limit,
trace=False,
trace=self.trace,
**optional_attributes,
)
@staticmethod
def decode_optional_attributes(component, snap):
def decode_optional_attributes(component, snap, block_number):
# Handle optional state attributes
attributes = snap["state"]["attributes"]
pool_id = attributes.get("pool_id") or component["id"]
balance_owner = attributes.get("balance_owner")
stateless_contracts = {}
static_attributes = snap["component"]["static_attributes"]
pool_id = static_attributes.get("pool_id") or component["id"]
index = 0
while f"stateless_contract_addr_{index}" in static_attributes:
encoded_address = static_attributes[f"stateless_contract_addr_{index}"]
address = bytes.fromhex(encoded_address[2:] if encoded_address.startswith('0x') else encoded_address).decode('utf-8')
decoded = bytes.fromhex(
encoded_address[2:] if encoded_address.startswith('0x') else encoded_address).decode('utf-8')
if decoded.startswith("call"):
address = ThirdPartyPoolTychoDecoder.get_address_from_call(block_number, decoded)
else:
address = decoded
code = static_attributes.get(f"stateless_contract_code_{index}") or get_code_for_address(address)
stateless_contracts[address] = code
index += 1
index = 0
while f"stateless_contract_addr_{index}" in attributes:
address = attributes[f"stateless_contract_addr_{index}"]
code = attributes.get(f"stateless_contract_code_{index}") or get_code_for_address(address)
stateless_contracts[address] = code
index += 1
index += 1
return {
"balance_owner": balance_owner,
"pool_id": pool_id,
"stateless_contracts": stateless_contracts,
}
@staticmethod
def get_address_from_call(block_number, decoded):
db = TychoDBSingleton.get_instance()
engine = SimulationEngine.new_with_tycho_db(db=db)
engine.init_account(
address="0x0000000000000000000000000000000000000000",
account=AccountInfo(balance=0, nonce=0),
mocked=False,
permanent_storage=None,
)
selector = keccak(text=decoded.split(":")[-1])[:4]
sim_result = engine.run_sim(SimulationParameters(
data=bytearray(selector),
to=decoded.split(':')[1],
block_number=block_number,
timestamp=int(time.time()),
overrides={},
caller=EXTERNAL_ACCOUNT,
value=0,
))
address = eth_abi.decode(["address"], bytearray(sim_result.result))
return address[0]
@staticmethod
def decode_balances(snap, tokens):
balances = {}
@@ -109,10 +143,10 @@ class ThirdPartyPoolTychoDecoder:
@staticmethod
def apply_update(
pool: ThirdPartyPool,
pool_update: dict[str, Any],
balance_updates: dict[str, Any],
block: EVMBlock,
pool: ThirdPartyPool,
pool_update: dict[str, Any],
balance_updates: dict[str, Any],
block: EVMBlock,
) -> ThirdPartyPool:
# check for and apply optional state attributes
attributes = pool_update.get("updated_attributes")

View File

@@ -51,7 +51,7 @@ class EthereumToken(BaseModel):
return int(amount)
def from_onchain_amount(
self, onchain_amount: Union[int, Fraction], quantize: bool = True
self, onchain_amount: Union[int, Fraction], quantize: bool = True
) -> Decimal:
"""Converts an Integer to a quantized decimal, by shifting left by the token's
maximum amount of decimals (e.g.: 1000000 becomes 1.000000 for a 6-decimal token
@@ -66,14 +66,14 @@ class EthereumToken(BaseModel):
with localcontext(Context(rounding=ROUND_FLOOR, prec=256)):
if isinstance(onchain_amount, Fraction):
return (
Decimal(onchain_amount.numerator)
/ Decimal(onchain_amount.denominator)
/ Decimal(10 ** self.decimals)
Decimal(onchain_amount.numerator)
/ Decimal(onchain_amount.denominator)
/ Decimal(10 ** self.decimals)
).quantize(Decimal(f"{1 / 10 ** self.decimals}"))
if quantize is True:
try:
amount = (
Decimal(str(onchain_amount)) / 10 ** self.decimals
Decimal(str(onchain_amount)) / 10 ** self.decimals
).quantize(Decimal(f"{1 / 10 ** self.decimals}"))
except InvalidOperation:
amount = Decimal(str(onchain_amount)) / Decimal(10 ** self.decimals)
@@ -114,6 +114,8 @@ class Capability(IntEnum):
ConstantPrice = auto()
TokenBalanceIndependent = auto()
ScaledPrice = auto()
HardLimits = auto()
MarginalPrice = auto()
class SynchronizerState(Enum):

View File

@@ -64,13 +64,6 @@ class ThirdPartyPool(BaseModel):
trace: bool = False
hard_sell_limit: bool = False
"""
Whether the pool will revert if you attempt to sell more than the limit. Defaults to
False where it is assumed that exceeding the limit will provide a bad price but will
still succeed.
"""
def __init__(self, **data):
super().__init__(**data)
self._set_engine(data.get("engine", None))
@@ -112,7 +105,7 @@ class ThirdPartyPool(BaseModel):
engine.init_account(
address=ADAPTER_ADDRESS,
account=AccountInfo(
balance=0,
balance=MAX_BALANCE,
nonce=0,
code=get_contract_bytecode(self.adapter_contract_name),
),
@@ -172,14 +165,14 @@ class ThirdPartyPool(BaseModel):
)
def get_amount_out(
self: TPoolState,
sell_token: EthereumToken,
sell_amount: Decimal,
buy_token: EthereumToken,
self: TPoolState,
sell_token: EthereumToken,
sell_amount: Decimal,
buy_token: EthereumToken,
) -> tuple[Decimal, int, TPoolState]:
# if the pool has a hard limit and the sell amount exceeds that, simulate and
# raise a partial trade
if self.hard_sell_limit:
if Capability.HardLimits in self.capabilities:
sell_limit = self.get_sell_amount_limit(sell_token, buy_token)
if sell_amount > sell_limit:
partial_trade = self._get_amount_out(sell_token, sell_limit, buy_token)
@@ -192,10 +185,10 @@ class ThirdPartyPool(BaseModel):
return self._get_amount_out(sell_token, sell_amount, buy_token)
def _get_amount_out(
self: TPoolState,
sell_token: EthereumToken,
sell_amount: Decimal,
buy_token: EthereumToken,
self: TPoolState,
sell_token: EthereumToken,
sell_amount: Decimal,
buy_token: EthereumToken,
) -> tuple[Decimal, int, TPoolState]:
trade, state_changes = self._adapter_contract.swap(
cast(HexStr, self.id_),
@@ -223,7 +216,7 @@ class ThirdPartyPool(BaseModel):
return buy_amount, trade.gas_used, new_state
def _get_overwrites(
self, sell_token: EthereumToken, buy_token: EthereumToken, **kwargs
self, sell_token: EthereumToken, buy_token: EthereumToken, **kwargs
) -> dict[Address, dict[int, int]]:
"""Get an overwrites dictionary to use in a simulation.
@@ -234,7 +227,7 @@ class ThirdPartyPool(BaseModel):
return _merge(self.block_lasting_overwrites, token_overwrites)
def _get_token_overwrites(
self, sell_token: EthereumToken, buy_token: EthereumToken, max_amount=None
self, sell_token: EthereumToken, buy_token: EthereumToken, max_amount=None
) -> dict[Address, dict[int, int]]:
"""Creates overwrites for a token.
@@ -297,13 +290,12 @@ class ThirdPartyPool(BaseModel):
engine=self._engine,
balances=self.balances,
minimum_gas=self.minimum_gas,
hard_sell_limit=self.hard_sell_limit,
balance_owner=self.balance_owner,
stateless_contracts=self.stateless_contracts,
)
def get_sell_amount_limit(
self, sell_token: EthereumToken, buy_token: EthereumToken
self, sell_token: EthereumToken, buy_token: EthereumToken
) -> Decimal:
"""
Retrieves the sell amount of the given token.

View File

@@ -26,10 +26,10 @@ log = getLogger(__name__)
class TokenLoader:
def __init__(
self,
tycho_url: str,
blockchain: Blockchain,
min_token_quality: Optional[int] = 0,
self,
tycho_url: str,
blockchain: Blockchain,
min_token_quality: Optional[int] = 0,
):
self.tycho_url = tycho_url
self.blockchain = blockchain
@@ -45,10 +45,10 @@ class TokenLoader:
start = time.monotonic()
all_tokens = []
while data := self._get_all_with_pagination(
url=url,
page=page,
limit=self._token_limit,
params={"min_quality": self.min_token_quality},
url=url,
page=page,
limit=self._token_limit,
params={"min_quality": self.min_token_quality},
):
all_tokens.extend(data)
page += 1
@@ -73,10 +73,10 @@ class TokenLoader:
start = time.monotonic()
all_tokens = []
while data := self._get_all_with_pagination(
url=url,
page=page,
limit=self._token_limit,
params={"min_quality": self.min_token_quality, "addresses": addresses},
url=url,
page=page,
limit=self._token_limit,
params={"min_quality": self.min_token_quality, "addresses": addresses},
):
all_tokens.extend(data)
page += 1
@@ -95,7 +95,7 @@ class TokenLoader:
@staticmethod
def _get_all_with_pagination(
url: str, params: Optional[Dict] = None, page: int = 0, limit: int = 50
url: str, params: Optional[Dict] = None, page: int = 0, limit: int = 50
) -> Dict:
if params is None:
params = {}
@@ -122,14 +122,14 @@ class BlockProtocolChanges:
class TychoPoolStateStreamAdapter:
def __init__(
self,
tycho_url: str,
protocol: str,
decoder: ThirdPartyPoolTychoDecoder,
blockchain: Blockchain,
min_tvl: Optional[Decimal] = 10,
min_token_quality: Optional[int] = 0,
include_state=True,
self,
tycho_url: str,
protocol: str,
decoder: ThirdPartyPoolTychoDecoder,
blockchain: Blockchain,
min_tvl: Optional[Decimal] = 10,
min_token_quality: Optional[int] = 0,
include_state=True,
):
"""
:param tycho_url: URL to connect to Tycho DB
@@ -238,7 +238,7 @@ class TychoPoolStateStreamAdapter:
@staticmethod
def build_snapshot_message(
protocol_components: dict, protocol_states: dict, contract_states: dict
protocol_components: dict, protocol_states: dict, contract_states: dict
) -> dict[str, ThirdPartyPool]:
vm_states = {state["address"]: state for state in contract_states["accounts"]}
states = {}
@@ -248,7 +248,7 @@ class TychoPoolStateStreamAdapter:
for state in protocol_states["states"]:
pool_id = state["component_id"]
if pool_id not in states:
log.warning(f"State for pool {pool_id} not found in components")
log.debug(f"{pool_id} was present in snapshot but not in components")
continue
states[pool_id]["state"] = state
snapshot = {"vm_storage": vm_states, "states": states}
@@ -269,7 +269,7 @@ class TychoPoolStateStreamAdapter:
return self.process_snapshot(block, state_msg["snapshot"])
def process_snapshot(
self, block: EVMBlock, state_msg: dict
self, block: EVMBlock, state_msg: dict
) -> BlockProtocolChanges:
start = time.monotonic()
removed_pools = set()

View File

@@ -74,6 +74,7 @@ class ERC20OverwriteFactory:
self._overwrites = dict()
self._balance_slot: Final[int] = 0
self._allowance_slot: Final[int] = 1
self._total_supply_slot: Final[int] = 2
def set_balance(self, balance: int, owner: Address):
"""
@@ -111,6 +112,19 @@ class ERC20OverwriteFactory:
f"spender={spender} value={allowance} slot={storage_index}",
)
def set_total_supply(self, supply: int):
"""
Set the total supply of the token.
Parameters:
supply: The total supply value.
"""
self._overwrites[self._total_supply_slot] = supply
log.log(
5,
f"Override total supply: token={self._token.address} supply={supply}"
)
def get_protosim_overwrites(self) -> dict[Address, dict[int, int]]:
"""
Get the overwrites dictionary of previously collected values.