fix(ci): clippy, fmt and remove substreams logs
This commit is contained in:
@@ -34,6 +34,7 @@ impl SerializableVecBigInt for Vec<BigInt> {
|
|||||||
/// - Weighted Pool Factories
|
/// - Weighted Pool Factories
|
||||||
/// - Linear Pool Factories
|
/// - Linear Pool Factories
|
||||||
/// - Stable Pool Factories
|
/// - Stable Pool Factories
|
||||||
|
///
|
||||||
/// (Balancer does have a bit more (esp. in the deprecated section) that could be implemented as
|
/// (Balancer does have a bit more (esp. in the deprecated section) that could be implemented as
|
||||||
/// desired.)
|
/// desired.)
|
||||||
/// We use the specific ABIs to decode both the log event and corresponding call to gather
|
/// We use the specific ABIs to decode both the log event and corresponding call to gather
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
let files = fs::read_dir(abi_folder)?;
|
let files = fs::read_dir(abi_folder)?;
|
||||||
let mut mod_rs_content = String::new();
|
let mut mod_rs_content = String::new();
|
||||||
|
mod_rs_content.push_str("#![allow(clippy::all)]\n");
|
||||||
|
mod_rs_content.push_str("#[allow(non_snake_case)]\n");
|
||||||
|
|
||||||
for file in files {
|
for file in files {
|
||||||
let file = file?;
|
let file = file?;
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::all)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
pub mod crypto_pool_factory;
|
pub mod crypto_pool_factory;
|
||||||
pub mod stableswap_factory;
|
pub mod stableswap_factory;
|
||||||
pub mod susd;
|
pub mod susd;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||||
mod abi;
|
mod abi;
|
||||||
mod consts;
|
mod consts;
|
||||||
pub mod modules;
|
pub mod modules;
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ pub fn map_components(
|
|||||||
.logs_with_calls()
|
.logs_with_calls()
|
||||||
.filter(|(_, call)| !call.call.state_reverted)
|
.filter(|(_, call)| !call.call.state_reverted)
|
||||||
.filter_map(|(log, call)| {
|
.filter_map(|(log, call)| {
|
||||||
Some(pool_factories::address_map(
|
pool_factories::address_map(
|
||||||
call.call
|
call.call
|
||||||
.address
|
.address
|
||||||
.as_slice()
|
.as_slice()
|
||||||
@@ -54,11 +54,11 @@ pub fn map_components(
|
|||||||
log,
|
log,
|
||||||
call.call,
|
call.call,
|
||||||
tx,
|
tx,
|
||||||
)?)
|
)
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
if let Some(component) = emit_specific_pools(¶ms, &tx).expect(
|
if let Some(component) = emit_specific_pools(¶ms, tx).expect(
|
||||||
"An unexpected error occured when parsing params for emitting specific pools",
|
"An unexpected error occured when parsing params for emitting specific pools",
|
||||||
) {
|
) {
|
||||||
components.push(component)
|
components.push(component)
|
||||||
@@ -95,8 +95,8 @@ pub fn store_component_tokens(map: BlockTransactionProtocolComponents, store: St
|
|||||||
&component
|
&component
|
||||||
.tokens
|
.tokens
|
||||||
.iter()
|
.iter()
|
||||||
.map(|token| hex::encode(token))
|
.map(hex::encode)
|
||||||
.join(":".into()),
|
.join(":"),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -112,11 +112,10 @@ pub fn map_relative_balances(
|
|||||||
balance_deltas: {
|
balance_deltas: {
|
||||||
let mut deltas: Vec<_> = block
|
let mut deltas: Vec<_> = block
|
||||||
.transactions()
|
.transactions()
|
||||||
.into_iter()
|
|
||||||
.flat_map(|tx| {
|
.flat_map(|tx| {
|
||||||
emit_eth_deltas(&tx, &tokens_store)
|
emit_eth_deltas(tx, &tokens_store)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(emit_deltas(&tx, &tokens_store))
|
.chain(emit_deltas(tx, &tokens_store))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
@@ -268,7 +267,7 @@ pub fn map_protocol_changes(
|
|||||||
}),
|
}),
|
||||||
changes: transaction_contract_changes
|
changes: transaction_contract_changes
|
||||||
.drain()
|
.drain()
|
||||||
.sorted_unstable_by_key(|(index, _)| index.clone())
|
.sorted_unstable_by_key(|(index, _)| *index)
|
||||||
.filter_map(|(_, change)| {
|
.filter_map(|(_, change)| {
|
||||||
if change.contract_changes.is_empty() &&
|
if change.contract_changes.is_empty() &&
|
||||||
change.component_changes.is_empty() &&
|
change.component_changes.is_empty() &&
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
fn get_pool_tokens(pool_address: &Vec<u8>, tokens_store: &StoreGetString) -> Option<Vec<String>> {
|
fn get_pool_tokens(pool_address: &Vec<u8>, tokens_store: &StoreGetString) -> Option<Vec<String>> {
|
||||||
let pool_key = format!("pool:{}", hex::encode(&pool_address));
|
let pool_key = format!("pool:{}", hex::encode(pool_address));
|
||||||
Some(
|
Some(
|
||||||
tokens_store
|
tokens_store
|
||||||
.get_last(pool_key)?
|
.get_last(pool_key)?
|
||||||
@@ -24,10 +24,8 @@ fn get_pool_tokens(pool_address: &Vec<u8>, tokens_store: &StoreGetString) -> Opt
|
|||||||
/// Tracks `Transfers` in and out of tracked pools if it matches the specific tokens.
|
/// Tracks `Transfers` in and out of tracked pools if it matches the specific tokens.
|
||||||
pub fn emit_deltas(tx: &TransactionTrace, tokens_store: &StoreGetString) -> Vec<BalanceDelta> {
|
pub fn emit_deltas(tx: &TransactionTrace, tokens_store: &StoreGetString) -> Vec<BalanceDelta> {
|
||||||
tx.logs_with_calls()
|
tx.logs_with_calls()
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(log, _)| {
|
.filter_map(|(log, _)| {
|
||||||
let transfer = abi::erc20::events::Transfer::match_and_decode(log)?;
|
let transfer = abi::erc20::events::Transfer::match_and_decode(log)?;
|
||||||
|
|
||||||
let (component_id, pool_tokens, is_incoming) =
|
let (component_id, pool_tokens, is_incoming) =
|
||||||
if let Some(pool_tokens) = get_pool_tokens(&transfer.to, tokens_store) {
|
if let Some(pool_tokens) = get_pool_tokens(&transfer.to, tokens_store) {
|
||||||
(hex::encode(&transfer.to), pool_tokens, true)
|
(hex::encode(&transfer.to), pool_tokens, true)
|
||||||
@@ -53,7 +51,6 @@ pub fn emit_deltas(tx: &TransactionTrace, tokens_store: &StoreGetString) -> Vec<
|
|||||||
component_id: component_id.into(),
|
component_id: component_id.into(),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
substreams::log::info!("Token {:?} not in pool: {:?}", token_id, &component_id);
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -69,7 +66,6 @@ pub fn emit_deltas(tx: &TransactionTrace, tokens_store: &StoreGetString) -> Vec<
|
|||||||
/// - If neither, it's likely an erroneous ETH transactions that many older pools don't reject.
|
/// - If neither, it's likely an erroneous ETH transactions that many older pools don't reject.
|
||||||
pub fn emit_eth_deltas(tx: &TransactionTrace, tokens_store: &StoreGetString) -> Vec<BalanceDelta> {
|
pub fn emit_eth_deltas(tx: &TransactionTrace, tokens_store: &StoreGetString) -> Vec<BalanceDelta> {
|
||||||
tx.calls()
|
tx.calls()
|
||||||
.into_iter()
|
|
||||||
.flat_map(|call| {
|
.flat_map(|call| {
|
||||||
call.call
|
call.call
|
||||||
.balance_changes
|
.balance_changes
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ impl SerializableVecBigInt for Vec<BigInt> {
|
|||||||
fn deserialize_bytes(bytes: &[u8]) -> Vec<BigInt> {
|
fn deserialize_bytes(bytes: &[u8]) -> Vec<BigInt> {
|
||||||
bytes
|
bytes
|
||||||
.chunks_exact(32)
|
.chunks_exact(32)
|
||||||
.map(|chunk| BigInt::from_signed_bytes_be(chunk))
|
.map(BigInt::from_signed_bytes_be)
|
||||||
.collect::<Vec<BigInt>>()
|
.collect::<Vec<BigInt>>()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -381,7 +381,7 @@ pub fn address_map(
|
|||||||
hash: tx.hash.clone(),
|
hash: tx.hash.clone(),
|
||||||
index: tx.index.into(),
|
index: tx.index.into(),
|
||||||
}),
|
}),
|
||||||
tokens: pool_added.coins.into(),
|
tokens: pool_added.coins,
|
||||||
contracts: vec![component_id.into()],
|
contracts: vec![component_id.into()],
|
||||||
static_att: vec![
|
static_att: vec![
|
||||||
Attribute {
|
Attribute {
|
||||||
@@ -487,7 +487,7 @@ pub fn address_map(
|
|||||||
index: tx.index.into(),
|
index: tx.index.into(),
|
||||||
}),
|
}),
|
||||||
tokens,
|
tokens,
|
||||||
contracts: vec![pool_added.pool.into()],
|
contracts: vec![pool_added.pool],
|
||||||
static_att: vec![
|
static_att: vec![
|
||||||
Attribute {
|
Attribute {
|
||||||
name: "pool_type".into(),
|
name: "pool_type".into(),
|
||||||
@@ -701,8 +701,7 @@ fn get_token_from_pool(pool: &Vec<u8>) -> Vec<u8> {
|
|||||||
.call(META_REGISTRY.to_vec())
|
.call(META_REGISTRY.to_vec())
|
||||||
})
|
})
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
substreams::log::info!(format!("Using pool tree with pool {}", hex::encode(&pool)));
|
match hex::encode(pool).as_str() {
|
||||||
match hex::encode(&pool).as_str() {
|
|
||||||
// Curve.fi DAI/USDC/USDT (3Crv)
|
// Curve.fi DAI/USDC/USDT (3Crv)
|
||||||
"bebc44782c7db0a1a60cb6fe97d0b483032ff1c7" => {
|
"bebc44782c7db0a1a60cb6fe97d0b483032ff1c7" => {
|
||||||
hex::decode("6c3F90f043a72FA612cbac8115EE7e52BDe6E490").ok()
|
hex::decode("6c3F90f043a72FA612cbac8115EE7e52BDe6E490").ok()
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ struct PoolQueryParams {
|
|||||||
/// This function can error based on some basic parsing errors and deeper down hex decoding errors
|
/// This function can error based on some basic parsing errors and deeper down hex decoding errors
|
||||||
/// if various addresses are not formatted properly.
|
/// if various addresses are not formatted properly.
|
||||||
pub fn emit_specific_pools(
|
pub fn emit_specific_pools(
|
||||||
params: &String,
|
params: &str,
|
||||||
tx: &TransactionTrace,
|
tx: &TransactionTrace,
|
||||||
) -> Result<Option<ProtocolComponent>> {
|
) -> Result<Option<ProtocolComponent>> {
|
||||||
let pools = parse_params(params)?;
|
let pools = parse_params(params)?;
|
||||||
@@ -60,12 +60,10 @@ fn create_component(
|
|||||||
static_att: zip(
|
static_att: zip(
|
||||||
pool.attribute_keys
|
pool.attribute_keys
|
||||||
.clone()
|
.clone()
|
||||||
.unwrap_or(vec![])
|
.unwrap_or(vec![]),
|
||||||
.into_iter(),
|
|
||||||
pool.attribute_vals
|
pool.attribute_vals
|
||||||
.clone()
|
.clone()
|
||||||
.unwrap_or(vec![])
|
.unwrap_or(vec![]),
|
||||||
.into_iter(),
|
|
||||||
)
|
)
|
||||||
.clone()
|
.clone()
|
||||||
.map(|(key, value)| Attribute {
|
.map(|(key, value)| Attribute {
|
||||||
|
|||||||
Reference in New Issue
Block a user