chore: update format! macro use to satisfy latest clippy version (#194)
This commit is contained in:
@@ -4,6 +4,6 @@ pub trait Hexable {
|
|||||||
|
|
||||||
impl<T: AsRef<[u8]>> Hexable for T {
|
impl<T: AsRef<[u8]>> Hexable for T {
|
||||||
fn to_hex(&self) -> String {
|
fn to_hex(&self) -> String {
|
||||||
format!("0x{}", hex::encode(self))
|
format!("0x{encoded}", encoded = hex::encode(self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub mod functions {
|
|||||||
&[ethabi::ParamType::Address, ethabi::ParamType::Address],
|
&[ethabi::ParamType::Address, ethabi::ParamType::Address],
|
||||||
maybe_data.unwrap(),
|
maybe_data.unwrap(),
|
||||||
)
|
)
|
||||||
.map_err(|e| format!("unable to decode call.input: {:?}", e))?;
|
.map_err(|e| format!("unable to decode call.input: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
owner: values
|
owner: values
|
||||||
@@ -55,7 +55,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(256usize)], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(256usize)], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok({
|
Ok({
|
||||||
let mut v = [0 as u8; 32];
|
let mut v = [0 as u8; 32];
|
||||||
values
|
values
|
||||||
@@ -132,7 +132,7 @@ pub mod functions {
|
|||||||
&[ethabi::ParamType::Address, ethabi::ParamType::Uint(256usize)],
|
&[ethabi::ParamType::Address, ethabi::ParamType::Uint(256usize)],
|
||||||
maybe_data.unwrap(),
|
maybe_data.unwrap(),
|
||||||
)
|
)
|
||||||
.map_err(|e| format!("unable to decode call.input: {:?}", e))?;
|
.map_err(|e| format!("unable to decode call.input: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
spender: values
|
spender: values
|
||||||
@@ -178,7 +178,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<bool, String> {
|
pub fn output(data: &[u8]) -> Result<bool, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Bool], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Bool], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok(values
|
Ok(values
|
||||||
.pop()
|
.pop()
|
||||||
.expect("one output data should have existed")
|
.expect("one output data should have existed")
|
||||||
@@ -246,7 +246,7 @@ pub mod functions {
|
|||||||
return Err("no data to decode".to_string());
|
return Err("no data to decode".to_string());
|
||||||
}
|
}
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Address], maybe_data.unwrap())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Address], maybe_data.unwrap())
|
||||||
.map_err(|e| format!("unable to decode call.input: {:?}", e))?;
|
.map_err(|e| format!("unable to decode call.input: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
owner: values
|
owner: values
|
||||||
@@ -273,7 +273,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(256usize)], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(256usize)], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok({
|
Ok({
|
||||||
let mut v = [0 as u8; 32];
|
let mut v = [0 as u8; 32];
|
||||||
values
|
values
|
||||||
@@ -355,7 +355,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(8usize)], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(8usize)], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok({
|
Ok({
|
||||||
let mut v = [0 as u8; 32];
|
let mut v = [0 as u8; 32];
|
||||||
values
|
values
|
||||||
@@ -437,7 +437,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<String, String> {
|
pub fn output(data: &[u8]) -> Result<String, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::String], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::String], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok(values
|
Ok(values
|
||||||
.pop()
|
.pop()
|
||||||
.expect("one output data should have existed")
|
.expect("one output data should have existed")
|
||||||
@@ -514,7 +514,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<String, String> {
|
pub fn output(data: &[u8]) -> Result<String, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::String], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::String], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok(values
|
Ok(values
|
||||||
.pop()
|
.pop()
|
||||||
.expect("one output data should have existed")
|
.expect("one output data should have existed")
|
||||||
@@ -591,7 +591,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
pub fn output(data: &[u8]) -> Result<substreams::scalar::BigInt, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(256usize)], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Uint(256usize)], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok({
|
Ok({
|
||||||
let mut v = [0 as u8; 32];
|
let mut v = [0 as u8; 32];
|
||||||
values
|
values
|
||||||
@@ -668,7 +668,7 @@ pub mod functions {
|
|||||||
&[ethabi::ParamType::Address, ethabi::ParamType::Uint(256usize)],
|
&[ethabi::ParamType::Address, ethabi::ParamType::Uint(256usize)],
|
||||||
maybe_data.unwrap(),
|
maybe_data.unwrap(),
|
||||||
)
|
)
|
||||||
.map_err(|e| format!("unable to decode call.input: {:?}", e))?;
|
.map_err(|e| format!("unable to decode call.input: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
to: values
|
to: values
|
||||||
@@ -714,7 +714,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<bool, String> {
|
pub fn output(data: &[u8]) -> Result<bool, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Bool], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Bool], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok(values
|
Ok(values
|
||||||
.pop()
|
.pop()
|
||||||
.expect("one output data should have existed")
|
.expect("one output data should have existed")
|
||||||
@@ -791,7 +791,7 @@ pub mod functions {
|
|||||||
],
|
],
|
||||||
maybe_data.unwrap(),
|
maybe_data.unwrap(),
|
||||||
)
|
)
|
||||||
.map_err(|e| format!("unable to decode call.input: {:?}", e))?;
|
.map_err(|e| format!("unable to decode call.input: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
from: values
|
from: values
|
||||||
@@ -845,7 +845,7 @@ pub mod functions {
|
|||||||
}
|
}
|
||||||
pub fn output(data: &[u8]) -> Result<bool, String> {
|
pub fn output(data: &[u8]) -> Result<bool, String> {
|
||||||
let mut values = ethabi::decode(&[ethabi::ParamType::Bool], data.as_ref())
|
let mut values = ethabi::decode(&[ethabi::ParamType::Bool], data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode output data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode output data: {e:?}"))?;
|
||||||
Ok(values
|
Ok(values
|
||||||
.pop()
|
.pop()
|
||||||
.expect("one output data should have existed")
|
.expect("one output data should have existed")
|
||||||
@@ -929,20 +929,19 @@ pub mod events {
|
|||||||
.topics
|
.topics
|
||||||
.get(0)
|
.get(0)
|
||||||
.expect("bounds already checked")
|
.expect("bounds already checked")
|
||||||
.as_ref() ==
|
.as_ref()
|
||||||
Self::TOPIC_ID;
|
== Self::TOPIC_ID;
|
||||||
}
|
}
|
||||||
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
|
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
|
||||||
let mut values =
|
let mut values =
|
||||||
ethabi::decode(&[ethabi::ParamType::Uint(256usize)], log.data.as_ref())
|
ethabi::decode(&[ethabi::ParamType::Uint(256usize)], log.data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode log.data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode log.data: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
owner: ethabi::decode(&[ethabi::ParamType::Address], log.topics[1usize].as_ref())
|
owner: ethabi::decode(&[ethabi::ParamType::Address], log.topics[1usize].as_ref())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
format!(
|
format!(
|
||||||
"unable to decode param 'owner' from topic of type 'address': {:?}",
|
"unable to decode param 'owner' from topic of type 'address': {e:?}"
|
||||||
e
|
|
||||||
)
|
)
|
||||||
})?
|
})?
|
||||||
.pop()
|
.pop()
|
||||||
@@ -954,8 +953,7 @@ pub mod events {
|
|||||||
spender: ethabi::decode(&[ethabi::ParamType::Address], log.topics[2usize].as_ref())
|
spender: ethabi::decode(&[ethabi::ParamType::Address], log.topics[2usize].as_ref())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
format!(
|
format!(
|
||||||
"unable to decode param 'spender' from topic of type 'address': {:?}",
|
"unable to decode param 'spender' from topic of type 'address': {e:?}"
|
||||||
e
|
|
||||||
)
|
)
|
||||||
})?
|
})?
|
||||||
.pop()
|
.pop()
|
||||||
@@ -1009,21 +1007,18 @@ pub mod events {
|
|||||||
.topics
|
.topics
|
||||||
.get(0)
|
.get(0)
|
||||||
.expect("bounds already checked")
|
.expect("bounds already checked")
|
||||||
.as_ref() ==
|
.as_ref()
|
||||||
Self::TOPIC_ID;
|
== Self::TOPIC_ID;
|
||||||
}
|
}
|
||||||
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
|
pub fn decode(log: &substreams_ethereum::pb::eth::v2::Log) -> Result<Self, String> {
|
||||||
let mut values =
|
let mut values =
|
||||||
ethabi::decode(&[ethabi::ParamType::Uint(256usize)], log.data.as_ref())
|
ethabi::decode(&[ethabi::ParamType::Uint(256usize)], log.data.as_ref())
|
||||||
.map_err(|e| format!("unable to decode log.data: {:?}", e))?;
|
.map_err(|e| format!("unable to decode log.data: {e:?}"))?;
|
||||||
values.reverse();
|
values.reverse();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
from: ethabi::decode(&[ethabi::ParamType::Address], log.topics[1usize].as_ref())
|
from: ethabi::decode(&[ethabi::ParamType::Address], log.topics[1usize].as_ref())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
format!(
|
format!("unable to decode param 'from' from topic of type 'address': {e:?}")
|
||||||
"unable to decode param 'from' from topic of type 'address': {:?}",
|
|
||||||
e
|
|
||||||
)
|
|
||||||
})?
|
})?
|
||||||
.pop()
|
.pop()
|
||||||
.expect(INTERNAL_ERR)
|
.expect(INTERNAL_ERR)
|
||||||
@@ -1033,7 +1028,7 @@ pub mod events {
|
|||||||
.to_vec(),
|
.to_vec(),
|
||||||
to: ethabi::decode(&[ethabi::ParamType::Address], log.topics[2usize].as_ref())
|
to: ethabi::decode(&[ethabi::ParamType::Address], log.topics[2usize].as_ref())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
format!("unable to decode param 'to' from topic of type 'address': {:?}", e)
|
format!("unable to decode param 'to' from topic of type 'address': {e:?}")
|
||||||
})?
|
})?
|
||||||
.pop()
|
.pop()
|
||||||
.expect(INTERNAL_ERR)
|
.expect(INTERNAL_ERR)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -24,7 +24,7 @@ pub fn json_serialize_address_list(addresses: &[Vec<u8>]) -> Vec<u8> {
|
|||||||
json_serialize_value(
|
json_serialize_value(
|
||||||
addresses
|
addresses
|
||||||
.iter()
|
.iter()
|
||||||
.map(|a| format!("0x{}", hex::encode(a)))
|
.map(|a| format!("0x{encoded}", encoded = hex::encode(a)))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -40,7 +40,7 @@ pub fn json_serialize_bigint_list(values: &[BigInt]) -> Vec<u8> {
|
|||||||
json_serialize_value(
|
json_serialize_value(
|
||||||
values
|
values
|
||||||
.iter()
|
.iter()
|
||||||
.map(|v| format!("0x{}", hex::encode(v.to_signed_bytes_be())))
|
.map(|v| format!("0x{encoded}", encoded = hex::encode(v.to_signed_bytes_be())))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -64,12 +64,9 @@ pub fn store_balance_changes(deltas: BlockBalanceDeltas, store: impl StoreAdd<Bi
|
|||||||
.balance_deltas
|
.balance_deltas
|
||||||
.iter()
|
.iter()
|
||||||
.for_each(|delta| {
|
.for_each(|delta| {
|
||||||
let balance_key = format!(
|
let component = String::from_utf8(delta.component_id.clone())
|
||||||
"{0}:{1}",
|
.expect("delta.component_id is not valid utf-8!");
|
||||||
String::from_utf8(delta.component_id.clone())
|
let balance_key = format!("{component}:{token}", token = hex::encode(&delta.token));
|
||||||
.expect("delta.component_id is not valid utf-8!"),
|
|
||||||
hex::encode(&delta.token)
|
|
||||||
);
|
|
||||||
let current_ord = delta.ord;
|
let current_ord = delta.ord;
|
||||||
previous_ordinal
|
previous_ordinal
|
||||||
.entry(balance_key.clone())
|
.entry(balance_key.clone())
|
||||||
@@ -77,8 +74,7 @@ pub fn store_balance_changes(deltas: BlockBalanceDeltas, store: impl StoreAdd<Bi
|
|||||||
// ordinals must arrive in increasing order
|
// ordinals must arrive in increasing order
|
||||||
if *ord >= current_ord {
|
if *ord >= current_ord {
|
||||||
panic!(
|
panic!(
|
||||||
"Invalid ordinal sequence for {}: {} >= {}",
|
"Invalid ordinal sequence for {balance_key}: {ord} >= {current_ord}",
|
||||||
balance_key, *ord, current_ord
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
*ord = current_ord;
|
*ord = current_ord;
|
||||||
@@ -323,17 +319,12 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn store_deltas() -> StoreDeltas {
|
fn store_deltas() -> StoreDeltas {
|
||||||
let comp_id = "0x42c0ffee"
|
let comp_id = "0x42c0ffee".to_string();
|
||||||
.to_string()
|
|
||||||
.as_bytes()
|
|
||||||
.to_vec();
|
|
||||||
let token_0 = hex::decode("bad999").unwrap();
|
let token_0 = hex::decode("bad999").unwrap();
|
||||||
let token_1 = hex::decode("babe00").unwrap();
|
let token_1 = hex::decode("babe00").unwrap();
|
||||||
|
|
||||||
let t0_key =
|
let t0_key = format!("{comp_id}:{token}", token = hex::encode(token_0));
|
||||||
format!("{}:{}", String::from_utf8(comp_id.clone()).unwrap(), hex::encode(token_0));
|
let t1_key = format!("{comp_id}:{token}", token = hex::encode(token_1));
|
||||||
let t1_key =
|
|
||||||
format!("{}:{}", String::from_utf8(comp_id.clone()).unwrap(), hex::encode(token_1));
|
|
||||||
StoreDeltas {
|
StoreDeltas {
|
||||||
deltas: vec![
|
deltas: vec![
|
||||||
StoreDelta {
|
StoreDelta {
|
||||||
@@ -394,26 +385,15 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_store_balances() {
|
fn test_store_balances() {
|
||||||
let comp_id = "0x42c0ffee"
|
let comp_id = "0x42c0ffee".to_string();
|
||||||
.to_string()
|
|
||||||
.as_bytes()
|
|
||||||
.to_vec();
|
|
||||||
let token_0 = hex::decode("bad999").unwrap();
|
let token_0 = hex::decode("bad999").unwrap();
|
||||||
let token_1 = hex::decode("babe00").unwrap();
|
let token_1 = hex::decode("babe00").unwrap();
|
||||||
let deltas = block_balance_deltas();
|
let deltas = block_balance_deltas();
|
||||||
let store = <MockStore as StoreNew>::new();
|
let store = <MockStore as StoreNew>::new();
|
||||||
|
|
||||||
store_balance_changes(deltas, store.clone());
|
store_balance_changes(deltas, store.clone());
|
||||||
let res_0 = store.get_last(format!(
|
let res_0 = store.get_last(format!("{comp_id}:{token}", token = hex::encode(token_0)));
|
||||||
"{}:{}",
|
let res_1 = store.get_last(format!("{comp_id}:{token}", token = hex::encode(token_1)));
|
||||||
String::from_utf8(comp_id.clone()).unwrap(),
|
|
||||||
hex::encode(token_0)
|
|
||||||
));
|
|
||||||
let res_1 = store.get_last(format!(
|
|
||||||
"{}:{}",
|
|
||||||
String::from_utf8(comp_id.clone()).unwrap(),
|
|
||||||
hex::encode(token_1)
|
|
||||||
));
|
|
||||||
|
|
||||||
assert_eq!(res_0, Some(BigInt::from_str("+999").unwrap()));
|
assert_eq!(res_0, Some(BigInt::from_str("+999").unwrap()));
|
||||||
assert_eq!(res_1, Some(BigInt::from_str("+150").unwrap()));
|
assert_eq!(res_1, Some(BigInt::from_str("+150").unwrap()));
|
||||||
|
|||||||
@@ -238,7 +238,7 @@ impl ProtocolComponent {
|
|||||||
/// - `tx`: Reference to the associated transaction.
|
/// - `tx`: Reference to the associated transaction.
|
||||||
pub fn at_contract(id: &[u8]) -> Self {
|
pub fn at_contract(id: &[u8]) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id: format!("0x{}", hex::encode(id)),
|
id: format!("0x{encoded}", encoded = hex::encode(id)),
|
||||||
tokens: Vec::new(),
|
tokens: Vec::new(),
|
||||||
contracts: vec![id.to_vec()],
|
contracts: vec![id.to_vec()],
|
||||||
static_att: Vec::new(),
|
static_att: Vec::new(),
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pub fn store_pool_balances(changes: BlockPoolChanges, balance_store: StoreAddBig
|
|||||||
let pool_hash_hex = hex::encode(&balance_delta.pool_hash);
|
let pool_hash_hex = hex::encode(&balance_delta.pool_hash);
|
||||||
balance_store.add(
|
balance_store.add(
|
||||||
balance_delta.ordinal,
|
balance_delta.ordinal,
|
||||||
format!("{}:{}", pool_hash_hex, balance_delta.token_type),
|
format!("{pool_hash_hex}:{type}", type = balance_delta.token_type),
|
||||||
BigInt::from_signed_bytes_be(&balance_delta.token_delta),
|
BigInt::from_signed_bytes_be(&balance_delta.token_delta),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -315,7 +315,7 @@ fn map_changes(
|
|||||||
let pool_hash_hex = hex::encode(balance_delta.pool_hash);
|
let pool_hash_hex = hex::encode(balance_delta.pool_hash);
|
||||||
let pool = match pool_store.get_last(pool_hash_hex.clone()) {
|
let pool = match pool_store.get_last(pool_hash_hex.clone()) {
|
||||||
Some(pool) => pool,
|
Some(pool) => pool,
|
||||||
None => panic!("Pool not found in store for given hash: {}", pool_hash_hex),
|
None => panic!("Pool not found in store for given hash: {pool_hash_hex}"),
|
||||||
};
|
};
|
||||||
let token_type = substreams::key::segment_at(&store_delta.key, 1);
|
let token_type = substreams::key::segment_at(&store_delta.key, 1);
|
||||||
let token_index = if token_type == "quote" { 1 } else { 0 };
|
let token_index = if token_type == "quote" { 1 } else { 0 };
|
||||||
|
|||||||
@@ -26,10 +26,10 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
let contract_name = file_name.split('.').next().unwrap();
|
let contract_name = file_name.split('.').next().unwrap();
|
||||||
|
|
||||||
let input_path = format!("{}/{}", abi_folder, file_name);
|
let input_path = format!("{abi_folder}/{file_name}");
|
||||||
let output_path = format!("{}/{}.rs", output_folder, contract_name);
|
let output_path = format!("{output_folder}/{contract_name}.rs");
|
||||||
|
|
||||||
mod_rs_content.push_str(&format!("pub mod {};\n", contract_name));
|
mod_rs_content.push_str(&format!("pub mod {contract_name};\n"));
|
||||||
|
|
||||||
if std::path::Path::new(&output_path).exists() {
|
if std::path::Path::new(&output_path).exists() {
|
||||||
continue;
|
continue;
|
||||||
@@ -40,7 +40,7 @@ fn main() -> Result<()> {
|
|||||||
.write_to_file(&output_path)?;
|
.write_to_file(&output_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mod_rs_path = format!("{}/mod.rs", output_folder);
|
let mod_rs_path = format!("{output_folder}/mod.rs");
|
||||||
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
||||||
|
|
||||||
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ pub fn store_components(map: BlockTransactionProtocolComponents, store: StoreSet
|
|||||||
tx_pc
|
tx_pc
|
||||||
.components
|
.components
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.for_each(|pc| store.set(0, format!("pool:{0}", &pc.id[..42]), &pc.id))
|
.for_each(|pc| store.set(0, format!("pool:{id}", id = &pc.id[..42]), &pc.id))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,10 +73,10 @@ pub fn map_relative_balances(
|
|||||||
if let Some(ev) =
|
if let Some(ev) =
|
||||||
abi::vault::events::PoolBalanceChanged::match_and_decode(vault_log.log)
|
abi::vault::events::PoolBalanceChanged::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let component_id = format!("0x{}", hex::encode(ev.pool_id));
|
let component_id = format!("0x{id}", id = hex::encode(ev.pool_id));
|
||||||
|
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", &component_id[..42]))
|
.get_last(format!("pool:{id}", id = &component_id[..42]))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
for (token, delta) in ev
|
for (token, delta) in ev
|
||||||
@@ -95,10 +95,10 @@ pub fn map_relative_balances(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if let Some(ev) = abi::vault::events::Swap::match_and_decode(vault_log.log) {
|
} else if let Some(ev) = abi::vault::events::Swap::match_and_decode(vault_log.log) {
|
||||||
let component_id = format!("0x{}", hex::encode(ev.pool_id));
|
let component_id = format!("0x{id}", id = hex::encode(ev.pool_id));
|
||||||
|
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", &component_id[..42]))
|
.get_last(format!("pool:{id}", id = &component_id[..42]))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
deltas.extend_from_slice(&[
|
deltas.extend_from_slice(&[
|
||||||
@@ -121,9 +121,9 @@ pub fn map_relative_balances(
|
|||||||
} else if let Some(ev) =
|
} else if let Some(ev) =
|
||||||
abi::vault::events::PoolBalanceManaged::match_and_decode(vault_log.log)
|
abi::vault::events::PoolBalanceManaged::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let component_id = format!("0x{}", hex::encode(ev.pool_id));
|
let component_id = format!("0x{id}", id = hex::encode(ev.pool_id));
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", &component_id[..42]))
|
.get_last(format!("pool:{id}", id = &component_id[..42]))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
deltas.extend_from_slice(&[BalanceDelta {
|
deltas.extend_from_slice(&[BalanceDelta {
|
||||||
@@ -230,7 +230,7 @@ pub fn map_protocol_changes(
|
|||||||
&block,
|
&block,
|
||||||
|addr| {
|
|addr| {
|
||||||
components_store
|
components_store
|
||||||
.get_last(format!("pool:0x{0}", hex::encode(addr)))
|
.get_last(format!("pool:0x{id}", id = hex::encode(addr)))
|
||||||
.is_some() ||
|
.is_some() ||
|
||||||
addr.eq(VAULT_ADDRESS)
|
addr.eq(VAULT_ADDRESS)
|
||||||
},
|
},
|
||||||
@@ -251,7 +251,7 @@ pub fn map_protocol_changes(
|
|||||||
if address != VAULT_ADDRESS {
|
if address != VAULT_ADDRESS {
|
||||||
// We reconstruct the component_id from the address here
|
// We reconstruct the component_id from the address here
|
||||||
let id = components_store
|
let id = components_store
|
||||||
.get_last(format!("pool:0x{}", hex::encode(address)))
|
.get_last(format!("pool:0x{id}", id = hex::encode(address)))
|
||||||
.unwrap(); // Shouldn't happen because we filter by known components in
|
.unwrap(); // Shouldn't happen because we filter by known components in
|
||||||
// `extract_contract_changes_builder`
|
// `extract_contract_changes_builder`
|
||||||
change.mark_component_as_updated(&id);
|
change.mark_component_as_updated(&id);
|
||||||
|
|||||||
@@ -26,10 +26,10 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
let contract_name = file_name.split('.').next().unwrap();
|
let contract_name = file_name.split('.').next().unwrap();
|
||||||
|
|
||||||
let input_path = format!("{}/{}", abi_folder, file_name);
|
let input_path = format!("{abi_folder}/{file_name}");
|
||||||
let output_path = format!("{}/{}.rs", output_folder, contract_name);
|
let output_path = format!("{output_folder}/{contract_name}.rs");
|
||||||
|
|
||||||
mod_rs_content.push_str(&format!("pub mod {};\n", contract_name));
|
mod_rs_content.push_str(&format!("pub mod {contract_name};\n"));
|
||||||
|
|
||||||
if std::path::Path::new(&output_path).exists() {
|
if std::path::Path::new(&output_path).exists() {
|
||||||
continue;
|
continue;
|
||||||
@@ -40,7 +40,7 @@ fn main() -> Result<()> {
|
|||||||
.write_to_file(&output_path)?;
|
.write_to_file(&output_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mod_rs_path = format!("{}/mod.rs", output_folder);
|
let mod_rs_path = format!("{output_folder}/mod.rs");
|
||||||
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
||||||
|
|
||||||
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ fn address_to_bytes_with_0x(address: &[u8; 20]) -> Vec<u8> {
|
|||||||
|
|
||||||
/// Converts address bytes into a string containing a leading `0x`.
|
/// Converts address bytes into a string containing a leading `0x`.
|
||||||
fn address_to_string_with_0x(address: &[u8]) -> String {
|
fn address_to_string_with_0x(address: &[u8]) -> String {
|
||||||
format!("0x{}", hex::encode(address))
|
format!("0x{encoded}", encoded = hex::encode(address))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Function that swaps `WETH` addresses for `ETH` address for specific factory types that decide
|
/// Function that swaps `WETH` addresses for `ETH` address for specific factory types that decide
|
||||||
@@ -499,8 +499,8 @@ pub fn address_map(
|
|||||||
name: "stateless_contract_addr_0".into(),
|
name: "stateless_contract_addr_0".into(),
|
||||||
// Call views_implementation() on CRYPTO_SWAP_NG_FACTORY
|
// Call views_implementation() on CRYPTO_SWAP_NG_FACTORY
|
||||||
value: format!(
|
value: format!(
|
||||||
"call:0x{}:views_implementation()",
|
"call:0x{factory}:views_implementation()",
|
||||||
hex::encode(CRYPTO_SWAP_NG_FACTORY)
|
factory = hex::encode(CRYPTO_SWAP_NG_FACTORY)
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
change: ChangeType::Creation.into(),
|
change: ChangeType::Creation.into(),
|
||||||
@@ -579,8 +579,8 @@ pub fn address_map(
|
|||||||
name: "stateless_contract_addr_0".into(),
|
name: "stateless_contract_addr_0".into(),
|
||||||
// Call views_implementation() on CRYPTO_SWAP_NG_FACTORY
|
// Call views_implementation() on CRYPTO_SWAP_NG_FACTORY
|
||||||
value: format!(
|
value: format!(
|
||||||
"call:0x{}:views_implementation()",
|
"call:0x{factory}:views_implementation()",
|
||||||
hex::encode(CRYPTO_SWAP_NG_FACTORY)
|
factory = hex::encode(CRYPTO_SWAP_NG_FACTORY)
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
change: ChangeType::Creation.into(),
|
change: ChangeType::Creation.into(),
|
||||||
@@ -589,8 +589,8 @@ pub fn address_map(
|
|||||||
name: "stateless_contract_addr_1".into(),
|
name: "stateless_contract_addr_1".into(),
|
||||||
// Call math_implementation() on CRYPTO_SWAP_NG_FACTORY
|
// Call math_implementation() on CRYPTO_SWAP_NG_FACTORY
|
||||||
value: format!(
|
value: format!(
|
||||||
"call:0x{}:math_implementation()",
|
"call:0x{factory}:math_implementation()",
|
||||||
hex::encode(CRYPTO_SWAP_NG_FACTORY)
|
factory = hex::encode(CRYPTO_SWAP_NG_FACTORY)
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
change: ChangeType::Creation.into(),
|
change: ChangeType::Creation.into(),
|
||||||
@@ -651,14 +651,14 @@ pub fn address_map(
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
vec![EntityChanges {
|
vec![EntityChanges {
|
||||||
component_id: format!("0x{}", id),
|
component_id: format!("0x{id}"),
|
||||||
attributes: vec![
|
attributes: vec![
|
||||||
Attribute {
|
Attribute {
|
||||||
name: "stateless_contract_addr_0".into(),
|
name: "stateless_contract_addr_0".into(),
|
||||||
// Call views_implementation() on TRICRYPTO_FACTORY
|
// Call views_implementation() on TRICRYPTO_FACTORY
|
||||||
value: format!(
|
value: format!(
|
||||||
"call:0x{}:views_implementation()",
|
"call:0x{factory}:views_implementation()",
|
||||||
hex::encode(TRICRYPTO_FACTORY)
|
factory = hex::encode(TRICRYPTO_FACTORY)
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
change: ChangeType::Creation.into(),
|
change: ChangeType::Creation.into(),
|
||||||
@@ -667,8 +667,8 @@ pub fn address_map(
|
|||||||
name: "stateless_contract_addr_1".into(),
|
name: "stateless_contract_addr_1".into(),
|
||||||
// Call math_implementation() on TRICRYPTO_FACTORY
|
// Call math_implementation() on TRICRYPTO_FACTORY
|
||||||
value: format!(
|
value: format!(
|
||||||
"call:0x{}:math_implementation()",
|
"call:0x{factory}:math_implementation()",
|
||||||
hex::encode(TRICRYPTO_FACTORY)
|
factory = hex::encode(TRICRYPTO_FACTORY)
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
change: ChangeType::Creation.into(),
|
change: ChangeType::Creation.into(),
|
||||||
@@ -907,14 +907,14 @@ pub fn address_map(
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
vec![EntityChanges {
|
vec![EntityChanges {
|
||||||
component_id: format!("0x{}", id),
|
component_id: format!("0x{id}"),
|
||||||
attributes: vec![
|
attributes: vec![
|
||||||
Attribute {
|
Attribute {
|
||||||
name: "stateless_contract_addr_0".into(),
|
name: "stateless_contract_addr_0".into(),
|
||||||
// Call views_implementation() on TWOCRYPTO_FACTORY
|
// Call views_implementation() on TWOCRYPTO_FACTORY
|
||||||
value: format!(
|
value: format!(
|
||||||
"call:0x{}:views_implementation()",
|
"call:0x{factory}:views_implementation()",
|
||||||
hex::encode(TWOCRYPTO_FACTORY)
|
factory = hex::encode(TWOCRYPTO_FACTORY)
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
change: ChangeType::Creation.into(),
|
change: ChangeType::Creation.into(),
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ fn create_component(
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
vec![EntityChanges {
|
vec![EntityChanges {
|
||||||
component_id: format!("0x{}", pool.address.clone()),
|
component_id: format!("0x{pool_address}", pool_address = pool.address.clone()),
|
||||||
attributes: zip(
|
attributes: zip(
|
||||||
pool.attribute_keys
|
pool.attribute_keys
|
||||||
.clone()
|
.clone()
|
||||||
@@ -127,7 +127,7 @@ fn parse_params(params: &str) -> Result<HashMap<String, PoolQueryParams>, anyhow
|
|||||||
.split(PARAMS_SEPERATOR)
|
.split(PARAMS_SEPERATOR)
|
||||||
.map(|param| {
|
.map(|param| {
|
||||||
let pool: PoolQueryParams = serde_qs::from_str(param)
|
let pool: PoolQueryParams = serde_qs::from_str(param)
|
||||||
.with_context(|| format!("Failed to parse pool query params: {0}", param))?;
|
.with_context(|| format!("Failed to parse pool query params: {param}"))?;
|
||||||
Ok((pool.tx_hash.clone(), pool))
|
Ok((pool.tx_hash.clone(), pool))
|
||||||
})
|
})
|
||||||
.collect::<Result<HashMap<_, _>>>()
|
.collect::<Result<HashMap<_, _>>>()
|
||||||
|
|||||||
@@ -26,10 +26,10 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
let contract_name = file_name.split('.').next().unwrap();
|
let contract_name = file_name.split('.').next().unwrap();
|
||||||
|
|
||||||
let input_path = format!("{}/{}", abi_folder, file_name);
|
let input_path = format!("{abi_folder}/{file_name}");
|
||||||
let output_path = format!("{}/{}.rs", output_folder, contract_name);
|
let output_path = format!("{output_folder}/{contract_name}.rs");
|
||||||
|
|
||||||
mod_rs_content.push_str(&format!("pub mod {};\n", contract_name));
|
mod_rs_content.push_str(&format!("pub mod {contract_name};\n"));
|
||||||
|
|
||||||
if std::path::Path::new(&output_path).exists() {
|
if std::path::Path::new(&output_path).exists() {
|
||||||
continue;
|
continue;
|
||||||
@@ -40,7 +40,7 @@ fn main() -> Result<()> {
|
|||||||
.write_to_file(&output_path)?;
|
.write_to_file(&output_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mod_rs_path = format!("{}/mod.rs", output_folder);
|
let mod_rs_path = format!("{output_folder}/mod.rs");
|
||||||
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
||||||
|
|
||||||
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ pub fn store_pools(pools_created: BlockChanges, store: StoreSetIfNotExistsProto<
|
|||||||
)
|
)
|
||||||
.to_u64(),
|
.to_u64(),
|
||||||
};
|
};
|
||||||
store.set_if_not_exists(0, format!("{}:{}", "Pool", pool_address), &pool);
|
store.set_if_not_exists(0, format!("Pool:{pool_address}"), &pool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ pub fn map_events(
|
|||||||
.logs
|
.logs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|log| {
|
.filter_map(|log| {
|
||||||
let key = format!("{}:{}", "Pool", log.address.to_hex());
|
let key = format!("Pool:{address}", address = log.address.to_hex());
|
||||||
// Skip if the log is not from a known pool.
|
// Skip if the log is not from a known pool.
|
||||||
if let Some(pool) = pools_store.get_last(key) {
|
if let Some(pool) = pools_store.get_last(key) {
|
||||||
log_to_event(log, pool, &tx)
|
log_to_event(log, pool, &tx)
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ pub fn store_pools_balances(balances_deltas: BlockBalanceDeltas, store: StoreAdd
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn event_to_balance_deltas(event: PoolEvent) -> Vec<BalanceDelta> {
|
fn event_to_balance_deltas(event: PoolEvent) -> Vec<BalanceDelta> {
|
||||||
let address = format!("0x{}", event.pool_address)
|
let address = format!("0x{addr}", addr = event.pool_address)
|
||||||
.as_bytes()
|
.as_bytes()
|
||||||
.to_vec();
|
.to_vec();
|
||||||
match event.r#type.unwrap() {
|
match event.r#type.unwrap() {
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ pub fn store_pool_current_tick(events: Events, store: StoreSetInt64) {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(event_to_current_tick)
|
.filter_map(event_to_current_tick)
|
||||||
.for_each(|(pool, ordinal, new_tick_index)| {
|
.for_each(|(pool, ordinal, new_tick_index)| {
|
||||||
store.set(ordinal, format!("pool:{0}", pool), &new_tick_index.into())
|
store.set(ordinal, format!("pool:{pool}"), &new_tick_index.into())
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ pub fn map_liquidity_changes(
|
|||||||
.map(|e| {
|
.map(|e| {
|
||||||
(
|
(
|
||||||
pools_current_tick_store
|
pools_current_tick_store
|
||||||
.get_at(e.log_ordinal, format!("pool:{0}", &e.pool_address))
|
.get_at(e.log_ordinal, format!("pool:{pool_addr}", pool_addr = &e.pool_address))
|
||||||
.unwrap_or(0),
|
.unwrap_or(0),
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
@@ -57,14 +57,14 @@ pub fn store_liquidity(ticks_deltas: LiquidityChanges, store: StoreSetSumBigInt)
|
|||||||
LiquidityChangeType::Delta => {
|
LiquidityChangeType::Delta => {
|
||||||
store.sum(
|
store.sum(
|
||||||
changes.ordinal,
|
changes.ordinal,
|
||||||
format!("pool:{0}", hex::encode(&changes.pool_address)),
|
format!("pool:{addr}", addr = hex::encode(&changes.pool_address)),
|
||||||
BigInt::from_signed_bytes_be(&changes.value),
|
BigInt::from_signed_bytes_be(&changes.value),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
LiquidityChangeType::Absolute => {
|
LiquidityChangeType::Absolute => {
|
||||||
store.set(
|
store.set(
|
||||||
changes.ordinal,
|
changes.ordinal,
|
||||||
format!("pool:{0}", hex::encode(&changes.pool_address)),
|
format!("pool:{addr}", addr = hex::encode(&changes.pool_address)),
|
||||||
BigInt::from_signed_bytes_be(&changes.value),
|
BigInt::from_signed_bytes_be(&changes.value),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,7 +34,11 @@ pub fn store_ticks_liquidity(ticks_deltas: TickDeltas, store: StoreAddBigInt) {
|
|||||||
deltas.iter().for_each(|delta| {
|
deltas.iter().for_each(|delta| {
|
||||||
store.add(
|
store.add(
|
||||||
delta.ordinal,
|
delta.ordinal,
|
||||||
format!("pool:{0}:tick:{1}", hex::encode(&delta.pool_address), delta.tick_index,),
|
format!(
|
||||||
|
"pool:{addr}:tick:{index}",
|
||||||
|
addr = hex::encode(&delta.pool_address),
|
||||||
|
index = delta.tick_index,
|
||||||
|
),
|
||||||
BigInt::from_signed_bytes_be(&delta.liquidity_net_delta),
|
BigInt::from_signed_bytes_be(&delta.liquidity_net_delta),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -86,7 +86,8 @@ pub fn map_protocol_changes(
|
|||||||
BigInt::from_str(&String::from_utf8(store_delta.old_value).unwrap())
|
BigInt::from_str(&String::from_utf8(store_delta.old_value).unwrap())
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.is_zero();
|
.is_zero();
|
||||||
let attribute_name = format!("ticks/{}/net-liquidity", tick_delta.tick_index);
|
let attribute_name =
|
||||||
|
format!("ticks/{index}/net-liquidity", index = tick_delta.tick_index);
|
||||||
let attribute = Attribute {
|
let attribute = Attribute {
|
||||||
name: attribute_name,
|
name: attribute_name,
|
||||||
value: new_value_bigint.to_signed_bytes_be(),
|
value: new_value_bigint.to_signed_bytes_be(),
|
||||||
|
|||||||
@@ -85,9 +85,9 @@ pub fn map_relative_balances(
|
|||||||
abi::stakedfrax_contract::events::Withdraw::match_and_decode(vault_log.log)
|
abi::stakedfrax_contract::events::Withdraw::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let address_bytes_be = vault_log.address();
|
let address_bytes_be = vault_log.address();
|
||||||
let address_hex = format!("0x{}", hex::encode(address_bytes_be));
|
let address_hex = format!("0x{encoded}", encoded = hex::encode(address_bytes_be));
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", address_hex))
|
.get_last(format!("pool:{address_hex}"))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
deltas.extend_from_slice(&[
|
deltas.extend_from_slice(&[
|
||||||
@@ -119,10 +119,10 @@ pub fn map_relative_balances(
|
|||||||
abi::stakedfrax_contract::events::Deposit::match_and_decode(vault_log.log)
|
abi::stakedfrax_contract::events::Deposit::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let address_bytes_be = vault_log.address();
|
let address_bytes_be = vault_log.address();
|
||||||
let address_hex = format!("0x{}", hex::encode(address_bytes_be));
|
let address_hex = format!("0x{encoded}", encoded = hex::encode(address_bytes_be));
|
||||||
|
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", address_hex))
|
.get_last(format!("pool:{address_hex}"))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
deltas.extend_from_slice(&[
|
deltas.extend_from_slice(&[
|
||||||
@@ -154,10 +154,10 @@ pub fn map_relative_balances(
|
|||||||
abi::stakedfrax_contract::events::DistributeRewards::match_and_decode(vault_log.log)
|
abi::stakedfrax_contract::events::DistributeRewards::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let address_bytes_be = vault_log.address();
|
let address_bytes_be = vault_log.address();
|
||||||
let address_hex = format!("0x{}", hex::encode(address_bytes_be));
|
let address_hex = format!("0x{encoded}", encoded = hex::encode(address_bytes_be));
|
||||||
|
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", address_hex))
|
.get_last(format!("pool:{address_hex}"))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
deltas.extend_from_slice(&[BalanceDelta {
|
deltas.extend_from_slice(&[BalanceDelta {
|
||||||
@@ -231,7 +231,7 @@ pub fn map_protocol_changes(
|
|||||||
&block,
|
&block,
|
||||||
|addr| {
|
|addr| {
|
||||||
components_store
|
components_store
|
||||||
.get_last(format!("pool:0x{0}", hex::encode(addr)))
|
.get_last(format!("pool:0x{encoded}", encoded = hex::encode(addr)))
|
||||||
.is_some()
|
.is_some()
|
||||||
},
|
},
|
||||||
&mut transaction_contract,
|
&mut transaction_contract,
|
||||||
|
|||||||
@@ -117,10 +117,11 @@ pub fn store_reward_cycles(block_reward_cycles: BlockRewardCycles, store: StoreS
|
|||||||
.reward_cycles
|
.reward_cycles
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.for_each(|reward_cycle| {
|
.for_each(|reward_cycle| {
|
||||||
let address_hex = format!("0x{}", hex::encode(&reward_cycle.vault_address));
|
let address_hex =
|
||||||
|
format!("0x{encoded}", encoded = hex::encode(&reward_cycle.vault_address));
|
||||||
store.set(
|
store.set(
|
||||||
reward_cycle.ord,
|
reward_cycle.ord,
|
||||||
format!("reward_cycle:{}", address_hex),
|
format!("reward_cycle:{address_hex}"),
|
||||||
&reward_cycle.next_reward_amount,
|
&reward_cycle.next_reward_amount,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -141,10 +142,10 @@ pub fn map_relative_balances(
|
|||||||
abi::sfraxeth_contract::events::Withdraw::match_and_decode(vault_log.log)
|
abi::sfraxeth_contract::events::Withdraw::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let address_bytes_be = vault_log.address();
|
let address_bytes_be = vault_log.address();
|
||||||
let address_hex = format!("0x{}", hex::encode(address_bytes_be));
|
let address_hex = format!("0x{encoded}", encoded = hex::encode(address_bytes_be));
|
||||||
|
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", address_hex))
|
.get_last(format!("pool:{address_hex}"))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
substreams::log::info!(
|
substreams::log::info!(
|
||||||
@@ -175,9 +176,9 @@ pub fn map_relative_balances(
|
|||||||
abi::sfraxeth_contract::events::Deposit::match_and_decode(vault_log.log)
|
abi::sfraxeth_contract::events::Deposit::match_and_decode(vault_log.log)
|
||||||
{
|
{
|
||||||
let address_bytes_be = vault_log.address();
|
let address_bytes_be = vault_log.address();
|
||||||
let address_hex = format!("0x{}", hex::encode(address_bytes_be));
|
let address_hex = format!("0x{encoded}", encoded = hex::encode(address_bytes_be));
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", address_hex))
|
.get_last(format!("pool:{address_hex}"))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
deltas.extend_from_slice(&[
|
deltas.extend_from_slice(&[
|
||||||
@@ -204,9 +205,9 @@ pub fn map_relative_balances(
|
|||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
let address_bytes_be = vault_log.address();
|
let address_bytes_be = vault_log.address();
|
||||||
let address_hex = format!("0x{}", hex::encode(address_bytes_be));
|
let address_hex = format!("0x{encoded}", encoded = hex::encode(address_bytes_be));
|
||||||
if store
|
if store
|
||||||
.get_last(format!("pool:{}", address_hex))
|
.get_last(format!("pool:{address_hex}"))
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
// When the NextRewardsCycle event is emitted:
|
// When the NextRewardsCycle event is emitted:
|
||||||
@@ -221,13 +222,12 @@ pub fn map_relative_balances(
|
|||||||
if let Some(last_reward_amount) = reward_store
|
if let Some(last_reward_amount) = reward_store
|
||||||
.deltas
|
.deltas
|
||||||
.iter()
|
.iter()
|
||||||
.find(|el| el.key == format!("reward_cycle:{}", address_hex))
|
.find(|el| el.key == format!("reward_cycle:{address_hex}"))
|
||||||
.map(|el| el.old_value.clone())
|
.map(|el| el.old_value.clone())
|
||||||
{
|
{
|
||||||
substreams::log::info!(
|
substreams::log::info!(
|
||||||
"Reward cycle balance change: address {}, sfraxEth amount {}",
|
"Reward cycle balance change: address {address_hex}, sfraxEth amount {amount}",
|
||||||
address_hex,
|
amount = BigInt::from_signed_bytes_be(&last_reward_amount)
|
||||||
BigInt::from_signed_bytes_be(&last_reward_amount)
|
|
||||||
);
|
);
|
||||||
deltas.push(BalanceDelta {
|
deltas.push(BalanceDelta {
|
||||||
ord: vault_log.ordinal(),
|
ord: vault_log.ordinal(),
|
||||||
@@ -319,7 +319,7 @@ pub fn map_protocol_changes(
|
|||||||
&block,
|
&block,
|
||||||
|addr| {
|
|addr| {
|
||||||
components_store
|
components_store
|
||||||
.get_last(format!("pool:0x{0}", hex::encode(addr)))
|
.get_last(format!("pool:0x{encoded}", encoded = hex::encode(addr)))
|
||||||
.is_some()
|
.is_some()
|
||||||
},
|
},
|
||||||
&mut transaction_changes,
|
&mut transaction_changes,
|
||||||
|
|||||||
@@ -26,10 +26,10 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
let contract_name = file_name.split('.').next().unwrap();
|
let contract_name = file_name.split('.').next().unwrap();
|
||||||
|
|
||||||
let input_path = format!("{}/{}", abi_folder, file_name);
|
let input_path = format!("{abi_folder}/{file_name}");
|
||||||
let output_path = format!("{}/{}.rs", output_folder, contract_name);
|
let output_path = format!("{output_folder}/{contract_name}.rs");
|
||||||
|
|
||||||
mod_rs_content.push_str(&format!("pub mod {};\n", contract_name));
|
mod_rs_content.push_str(&format!("pub mod {contract_name};\n"));
|
||||||
|
|
||||||
if std::path::Path::new(&output_path).exists() {
|
if std::path::Path::new(&output_path).exists() {
|
||||||
continue;
|
continue;
|
||||||
@@ -40,7 +40,7 @@ fn main() -> Result<()> {
|
|||||||
.write_to_file(&output_path)?;
|
.write_to_file(&output_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mod_rs_path = format!("{}/mod.rs", output_folder);
|
let mod_rs_path = format!("{output_folder}/mod.rs");
|
||||||
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
||||||
|
|
||||||
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
||||||
|
|||||||
@@ -26,10 +26,10 @@ fn main() -> Result<()> {
|
|||||||
|
|
||||||
let contract_name = file_name.split('.').next().unwrap();
|
let contract_name = file_name.split('.').next().unwrap();
|
||||||
|
|
||||||
let input_path = format!("{}/{}", abi_folder, file_name);
|
let input_path = format!("{abi_folder}/{file_name}");
|
||||||
let output_path = format!("{}/{}.rs", output_folder, contract_name);
|
let output_path = format!("{output_folder}/{contract_name}.rs");
|
||||||
|
|
||||||
mod_rs_content.push_str(&format!("pub mod {};\n", contract_name));
|
mod_rs_content.push_str(&format!("pub mod {contract_name};\n"));
|
||||||
|
|
||||||
if std::path::Path::new(&output_path).exists() {
|
if std::path::Path::new(&output_path).exists() {
|
||||||
continue;
|
continue;
|
||||||
@@ -40,7 +40,7 @@ fn main() -> Result<()> {
|
|||||||
.write_to_file(&output_path)?;
|
.write_to_file(&output_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mod_rs_path = format!("{}/mod.rs", output_folder);
|
let mod_rs_path = format!("{output_folder}/mod.rs");
|
||||||
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
|
||||||
|
|
||||||
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ fn handle_sync(
|
|||||||
});
|
});
|
||||||
|
|
||||||
for (i, reserve_bytes) in reserves_bytes.iter().enumerate() {
|
for (i, reserve_bytes) in reserves_bytes.iter().enumerate() {
|
||||||
let attribute_name = format!("reserve{}", i);
|
let attribute_name = format!("reserve{i}");
|
||||||
// By using a HashMap, we can overwrite the previous value of the reserve attribute if
|
// By using a HashMap, we can overwrite the previous value of the reserve attribute if
|
||||||
// it is for the same pool and the same attribute name (reserves).
|
// it is for the same pool and the same attribute name (reserves).
|
||||||
tx_change.entity_changes.insert(
|
tx_change.entity_changes.insert(
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ pub enum StoreKey {
|
|||||||
|
|
||||||
impl StoreKey {
|
impl StoreKey {
|
||||||
pub fn get_unique_pool_key(&self, key: &str) -> String {
|
pub fn get_unique_pool_key(&self, key: &str) -> String {
|
||||||
format!("{}:{}", self.unique_id(), key)
|
format!("{prefix}:{key}", prefix = self.unique_id())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unique_id(&self) -> String {
|
pub fn unique_id(&self) -> String {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ pub fn store_pools(pools_created: BlockEntityChanges, store: StoreSetIfNotExists
|
|||||||
token1: component_change.tokens[1].clone(),
|
token1: component_change.tokens[1].clone(),
|
||||||
created_tx_hash: change.tx.as_ref().unwrap().hash.clone(),
|
created_tx_hash: change.tx.as_ref().unwrap().hash.clone(),
|
||||||
};
|
};
|
||||||
store.set_if_not_exists(0, format!("{}:{}", "Pool", pool_address), &pool);
|
store.set_if_not_exists(0, format!("Pool:{pool_address}"), &pool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ pub fn map_events(
|
|||||||
.logs
|
.logs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|log| {
|
.filter_map(|log| {
|
||||||
let key = format!("{}:{}", "Pool", log.address.to_hex());
|
let key = format!("Pool:{address}", address = log.address.to_hex());
|
||||||
// Skip if the log is not from a known uniswapV3 pool.
|
// Skip if the log is not from a known uniswapV3 pool.
|
||||||
if let Some(pool) = pools_store.get_last(key) {
|
if let Some(pool) = pools_store.get_last(key) {
|
||||||
log_to_event(log, pool, &tx)
|
log_to_event(log, pool, &tx)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ pub fn store_pool_current_tick(events: Events, store: StoreSetInt64) {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(event_to_current_tick)
|
.filter_map(event_to_current_tick)
|
||||||
.for_each(|(pool, ordinal, new_tick_index)| {
|
.for_each(|(pool, ordinal, new_tick_index)| {
|
||||||
store.set(ordinal, format!("pool:{0}", pool), &new_tick_index.into())
|
store.set(ordinal, format!("pool:{pool}"), &new_tick_index.into())
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub fn store_pools(pools_created: BlockChanges, store: StoreSetIfNotExistsProto<
|
|||||||
token1: component_change.tokens[1].clone(),
|
token1: component_change.tokens[1].clone(),
|
||||||
created_tx_hash: change.tx.as_ref().unwrap().hash.clone(),
|
created_tx_hash: change.tx.as_ref().unwrap().hash.clone(),
|
||||||
};
|
};
|
||||||
store.set_if_not_exists(0, format!("{}:{}", "Pool", pool_address), &pool);
|
store.set_if_not_exists(0, format!("Pool:{pool_address}"), &pool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ pub fn map_balance_changes(
|
|||||||
{
|
{
|
||||||
// Skip if the log is not from a known uniswapV3 pool.
|
// Skip if the log is not from a known uniswapV3 pool.
|
||||||
if let Some(pool) =
|
if let Some(pool) =
|
||||||
pools_store.get_last(format!("{}:{}", "Pool", &log.address.to_hex()))
|
pools_store.get_last(format!("Pool:{address}", address = &log.address.to_hex()))
|
||||||
{
|
{
|
||||||
tx_deltas.extend(get_log_changed_balances(log, &pool))
|
tx_deltas.extend(get_log_changed_balances(log, &pool))
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ pub fn map_pool_events(
|
|||||||
for (log, call_view) in trx.logs_with_calls() {
|
for (log, call_view) in trx.logs_with_calls() {
|
||||||
// Skip if the log is not from a known uniswapV3 pool.
|
// Skip if the log is not from a known uniswapV3 pool.
|
||||||
if let Some(pool) =
|
if let Some(pool) =
|
||||||
pools_store.get_last(format!("{}:{}", "Pool", &log.address.to_hex()))
|
pools_store.get_last(format!("Pool:{address}", address = &log.address.to_hex()))
|
||||||
{
|
{
|
||||||
let changed_attributes = get_log_changed_attributes(
|
let changed_attributes = get_log_changed_attributes(
|
||||||
log,
|
log,
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ impl<'a> UniswapPoolStorage<'a> {
|
|||||||
// We need this to keep the references to the names alive until we call
|
// We need this to keep the references to the names alive until we call
|
||||||
// `get_changed_attributes()`
|
// `get_changed_attributes()`
|
||||||
for tick_idx in ticks_idx.iter() {
|
for tick_idx in ticks_idx.iter() {
|
||||||
tick_names.push(format!("ticks/{}/net-liquidity", tick_idx));
|
tick_names.push(format!("ticks/{tick_idx}/net-liquidity"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Then, iterate over ticks_idx and tick_names simultaneously
|
// Then, iterate over ticks_idx and tick_names simultaneously
|
||||||
|
|||||||
@@ -32,30 +32,18 @@ pub fn left_pad(input: &[u8], padding_value: u8) -> [u8; 32] {
|
|||||||
pub fn read_bytes(buf: &[u8], offset: usize, number_of_bytes: usize) -> &[u8] {
|
pub fn read_bytes(buf: &[u8], offset: usize, number_of_bytes: usize) -> &[u8] {
|
||||||
let buf_length = buf.len();
|
let buf_length = buf.len();
|
||||||
if buf_length < number_of_bytes {
|
if buf_length < number_of_bytes {
|
||||||
panic!(
|
panic!("attempting to read {number_of_bytes} bytes in buffer size {buf_length}",)
|
||||||
"attempting to read {number_of_bytes} bytes in buffer size {buf_size}",
|
|
||||||
number_of_bytes = number_of_bytes,
|
|
||||||
buf_size = buf.len()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if offset > (buf_length - 1) {
|
if offset > (buf_length - 1) {
|
||||||
panic!(
|
panic!("offset {offset} exceeds buffer size {buf_length}")
|
||||||
"offset {offset} exceeds buffer size {buf_size}",
|
|
||||||
offset = offset,
|
|
||||||
buf_size = buf.len()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = buf_length - 1 - offset;
|
let end = buf_length - 1 - offset;
|
||||||
let start_opt = (end + 1).checked_sub(number_of_bytes);
|
let start_opt = (end + 1).checked_sub(number_of_bytes);
|
||||||
if start_opt.is_none() {
|
if start_opt.is_none() {
|
||||||
panic!(
|
panic!(
|
||||||
"number of bytes {number_of_bytes} with offset {offset} exceeds buffer size
|
"number of bytes {number_of_bytes} with offset {offset} exceeds buffer size {buf_length}"
|
||||||
{buf_size}",
|
|
||||||
number_of_bytes = number_of_bytes,
|
|
||||||
offset = offset,
|
|
||||||
buf_size = buf.len()
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
let start = start_opt.unwrap();
|
let start = start_opt.unwrap();
|
||||||
@@ -158,7 +146,7 @@ mod tests {
|
|||||||
fn encode_hex(bytes: &[u8]) -> String {
|
fn encode_hex(bytes: &[u8]) -> String {
|
||||||
let mut s = String::with_capacity(bytes.len() * 2);
|
let mut s = String::with_capacity(bytes.len() * 2);
|
||||||
for &b in bytes {
|
for &b in bytes {
|
||||||
write!(&mut s, "{:02x}", b).unwrap();
|
write!(&mut s, "{b:02x}").unwrap();
|
||||||
}
|
}
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ pub fn store_pools(pools_created: BlockEntityChanges, store: StoreSetIfNotExists
|
|||||||
currency1: component_change.tokens[1].clone(),
|
currency1: component_change.tokens[1].clone(),
|
||||||
created_tx_hash: change.tx.as_ref().unwrap().hash.clone(),
|
created_tx_hash: change.tx.as_ref().unwrap().hash.clone(),
|
||||||
};
|
};
|
||||||
store.set_if_not_exists(0, format!("{}:{}", "pool", pool_address), &pool);
|
store.set_if_not_exists(0, format!("pool:{pool_address}"), &pool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ fn log_to_event(
|
|||||||
// We need to track initialization again to keep track of pool current tick, which is set on
|
// We need to track initialization again to keep track of pool current tick, which is set on
|
||||||
// initialization and changed on swaps.
|
// initialization and changed on swaps.
|
||||||
let pool_id = init.id.to_vec().to_hex();
|
let pool_id = init.id.to_vec().to_hex();
|
||||||
let pool = pools_store.get_last(format!("{}:{}", "pool", &pool_id))?;
|
let pool = pools_store.get_last(format!("pool:{pool_id}"))?;
|
||||||
Some(PoolEvent {
|
Some(PoolEvent {
|
||||||
log_ordinal: event.ordinal,
|
log_ordinal: event.ordinal,
|
||||||
pool_id,
|
pool_id,
|
||||||
@@ -67,7 +67,7 @@ fn log_to_event(
|
|||||||
})
|
})
|
||||||
} else if let Some(swap) = Swap::match_and_decode(event) {
|
} else if let Some(swap) = Swap::match_and_decode(event) {
|
||||||
let pool_id = swap.id.to_vec().to_hex();
|
let pool_id = swap.id.to_vec().to_hex();
|
||||||
let pool = pools_store.get_last(format!("{}:{}", "pool", &pool_id))?;
|
let pool = pools_store.get_last(format!("pool:{pool_id}"))?;
|
||||||
Some(PoolEvent {
|
Some(PoolEvent {
|
||||||
log_ordinal: event.ordinal,
|
log_ordinal: event.ordinal,
|
||||||
pool_id,
|
pool_id,
|
||||||
@@ -102,7 +102,7 @@ fn log_to_event(
|
|||||||
// })
|
// })
|
||||||
} else if let Some(modify_liquidity) = ModifyLiquidity::match_and_decode(event) {
|
} else if let Some(modify_liquidity) = ModifyLiquidity::match_and_decode(event) {
|
||||||
let pool_id = modify_liquidity.id.to_vec().to_hex();
|
let pool_id = modify_liquidity.id.to_vec().to_hex();
|
||||||
let pool = pools_store.get_last(format!("{}:{}", "pool", &pool_id))?;
|
let pool = pools_store.get_last(format!("pool:{pool_id}"))?;
|
||||||
Some(PoolEvent {
|
Some(PoolEvent {
|
||||||
log_ordinal: event.ordinal,
|
log_ordinal: event.ordinal,
|
||||||
pool_id,
|
pool_id,
|
||||||
@@ -124,7 +124,7 @@ fn log_to_event(
|
|||||||
.id
|
.id
|
||||||
.to_vec()
|
.to_vec()
|
||||||
.to_hex();
|
.to_hex();
|
||||||
let pool = pools_store.get_last(format!("{}:{}", "pool", &pool_id))?;
|
let pool = pools_store.get_last(format!("pool:{pool_id}"))?;
|
||||||
Some(PoolEvent {
|
Some(PoolEvent {
|
||||||
log_ordinal: event.ordinal,
|
log_ordinal: event.ordinal,
|
||||||
pool_id: pool_id.clone(),
|
pool_id: pool_id.clone(),
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ pub fn store_pool_current_sqrt_price(events: Events, store: StoreSetBigInt) {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(event_to_current_sqrt_price)
|
.filter_map(event_to_current_sqrt_price)
|
||||||
.for_each(|(pool, ordinal, new_tick_index)| {
|
.for_each(|(pool, ordinal, new_tick_index)| {
|
||||||
store.set(ordinal, format!("pool:{0}", pool), &new_tick_index)
|
store.set(ordinal, format!("pool:{pool}"), &new_tick_index)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ pub fn store_pool_current_tick(events: Events, store: StoreSetInt64) {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(event_to_current_tick)
|
.filter_map(event_to_current_tick)
|
||||||
.for_each(|(pool, ordinal, new_tick_index)| {
|
.for_each(|(pool, ordinal, new_tick_index)| {
|
||||||
store.set(ordinal, format!("pool:{0}", pool), &new_tick_index.into())
|
store.set(ordinal, format!("pool:{pool}"), &new_tick_index.into())
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
fn event_to_current_tick(event: PoolEvent) -> Option<(String, u64, i32)> {
|
fn event_to_current_tick(event: PoolEvent) -> Option<(String, u64, i32)> {
|
||||||
|
|||||||
Reference in New Issue
Block a user