Compare commits

..

13 Commits

Author SHA1 Message Date
tim
f1e602d414 Liquidity Party adapter 2025-12-10 15:53:45 -04:00
tim
f0f2f20c3c first pass at substreams 2025-10-22 16:56:40 -04:00
tim
e3ffa339bc first pass at substreams 2025-10-22 16:51:40 -04:00
TAMARA LIPOWSKI
1287d8c5c5 fix: Ekubo test 2025-10-09 17:52:31 +02:00
zach
07315294ce fix: add traced entrypoint 2025-10-09 16:42:51 +02:00
Zizou
568f26116e feat: add native balance in StorageChanges (#292)
* feat: add native balance in StorageChanges

This will be used by the next update of DCI to properly track native balance for all the contract that it's indexing. Also added a performance optimization, we now ignore slot updates if the value after the transaction is the same than before.

* docs: add docs on how to generate test assets

---------

Co-authored-by: zizou <111426680+flopell@users.noreply.github.com>
2025-10-06 10:12:37 +00:00
dianacarvalho1
128de3037e fix: Misc improvements (#291)
* fix: Misc improvements

Make module_name optional and default to map_protocol_changes
Add build instructions for the DB image inside of the docker-compose
Improve error message
Remove unnecessary module_name from yaml files

#time 39m

* fix: Bring back module_name in balancer v3 tests

#time 8m

* fix: Fix balance and allowance overwrites

For tokens with proxies we need to use the address that is returned by the detector

#time 13m
2025-10-03 14:37:49 +00:00
dianacarvalho1
243cf14c3e feat: Upgrade tycho dependencies (#289)
Don't use tycho_execution from tycho_simulation

#time 8m
2025-09-30 14:33:01 +01:00
TAMARA LIPOWSKI
8de285a2ee feat: PancakeV3 test
- There are only two post-cancun PancakeV3 pools.
- One was ORDER-WETH which I guess had a liquidity issue, it failed with `StateDecodingFailure pool="0xb2dc4d7627501338b578985c214208eb32283086" error=Missing attributes tick_liquidities`
- The second one is this USDT-USDf pool used for this test, though we fail to detect the balance slot of USDT: `WrongSlotError(“Slot override didn’t change balance.“)`
- For this reason, we skip execution.
- Also includes stop_block fix (better explained in Uniswap V3 test PR)
2025-09-29 12:04:07 -04:00
TAMARA LIPOWSKI
f9b4b5c223 fix: UniswapV2 test - use post-cancun created pool
- Also fix balance check: this should use the stop block not start block, since the start block is before the component was even created.
2025-09-29 09:29:44 -04:00
Zizou
3c6f9c2083 fix: update balancer_v2 dependency after a fix in tycho-substreams (#281)
Co-authored-by: zizou <111426680+flopell@users.noreply.github.com>
2025-09-26 21:28:39 +00:00
dianacarvalho1
ea10bfa99a feat: Add state_registry.rs (#285)
This it to make it easier for users to add new protocol states

#time 34m


#time 0m
2025-09-26 17:10:44 +01:00
dianacarvalho1
9a7e6a1cf7 feat: Remove python suite (deprecated) (#284)
Update readme

#time 10m
2025-09-26 16:47:21 +01:00
75 changed files with 5055 additions and 3501 deletions

View File

@@ -40,7 +40,6 @@ interface ISwapAdapterTypes {
}
/// @dev Representation used for rational numbers such as prices.
// TODO: Use only uint128 for numerator and denominator.
struct Fraction {
uint256 numerator;
uint256 denominator;
@@ -61,6 +60,20 @@ interface ISwapAdapterTypes {
/// available for unexpected reason. E.g. it was paused due to a bug.
error Unavailable(string reason);
/// @dev The InvalidOrder error is thrown when the input to a swap is
/// not valid: e.g. if the limit price is negative, or below the
/// current price; the request amount is 0; the requested swap tokens
/// are not part of the pool; etc.
error InvalidOrder(string reason);
/// @dev The TooSmall error is thrown when the requested trade amount
/// is too small, causing either zero output or a numerical imprecision
/// problem. If lowerLimit is not zero, then it specifies the minimum
/// trade size required. If lowerLimit is zero, then the lower bound
/// cannot be easily computed, in which case solvers can binary search
/// for a precise lower bound.
error TooSmall(uint256 lowerLimit);
/// @dev The LimitExceeded error is thrown when a limit has been exceeded.
/// E.g. the specified amount can't be traded safely.
error LimitExceeded(uint256 limit);

View File

@@ -13,14 +13,267 @@ library FractionMath {
ISwapAdapterTypes.Fraction memory frac1,
ISwapAdapterTypes.Fraction memory frac2
) internal pure returns (int8) {
uint256 crossProduct1 = frac1.numerator * frac2.denominator;
uint256 crossProduct2 = frac2.numerator * frac1.denominator;
uint256 fixed1 = toQ128x128(frac1.numerator, frac1.denominator);
uint256 fixed2 = toQ128x128(frac2.numerator, frac2.denominator);
// fractions are equal
if (crossProduct1 == crossProduct2) return 0;
if (fixed1 == fixed2) return 0;
// frac1 is greater than frac2
else if (crossProduct1 > crossProduct2) return 1;
else if (fixed1 > fixed2) return 1;
// frac1 is less than frac2
else return -1;
}
/// @notice Converts a Fraction into unsigned Q128.128 fixed point
function toQ128x128(ISwapAdapterTypes.Fraction memory rational)
internal
pure
returns (uint256 result)
{
return toQ128x128(rational.numerator, rational.denominator);
}
/// @notice Converts an unsigned rational `numerator / denominator`
/// into Q128.128 (unsigned 128.128 fixed point),
/// rounding toward zero (floor for positive inputs).
///
/// see https://github.com/Liquidity-Party/toQ128x128
///
/// @dev Reverts if:
/// - `denominator == 0`, or
/// - the exact result >= 2^256 (i.e. overflow of uint256).
///
/// This computes floor(numerator * 2^128 / denominator)
/// using a full 512-bit intermediate to avoid precision loss.
///
function toQ128x128(uint256 numerator, uint256 denominator)
internal
pure
returns (uint256 result)
{
require(denominator != 0, "toQ128x128: div by zero");
// We want (numerator * 2^128) / denominator using full precision,
// so we implement a 512-bit muldiv.
//
// Let:
// prod = numerator * 2^128
//
// Since 2^128 is a power of two, the 512-bit product is easy:
// prod0 = (numerator << 128) mod 2^256 (low 256 bits)
// prod1 = (numerator >> 128) (high 256 bits)
//
// So prod = (prod1 * 2^256 + prod0).
uint256 prod0;
uint256 prod1;
unchecked {
prod0 = numerator << 128;
prod1 = numerator >> 128;
}
// If the high 256 bits are zero, the product fits in 256 bits.
// This is the cheap path: just do a normal 256-bit division.
if (prod1 == 0) {
unchecked {
// denominator was already checked for 0.
return prod0 / denominator;
}
}
// At this point prod1 > 0, so the 512-bit product does not fit in a
// uint256. We need a full-precision 512/256 division:
//
// result = floor((prod1 * 2^256 + prod0) / denominator)
//
// and we must ensure the final result fits in uint256.
// Ensure result < 2^256. This is equivalent to requiring:
// denominator > prod1
// because if denominator <= prod1, then:
// (prod1 * 2^256) / denominator >= 2^256.
require(denominator > prod1, "Q128x128: overflow");
// Make division exact by subtracting the remainder from [prod1 prod0].
uint256 remainder;
assembly {
// remainder = (prod1 * 2^256 + prod0) % denominator
// Since we can only directly mod 256-bit values, we first mod
// `prod0`, then adjust using the high word.
remainder := mulmod(numerator, shl(128, 1), denominator)
}
// Now subtract `remainder` from the 512-bit product [prod1 prod0].
assembly {
// Subtract remainder from the low part; if it underflows, borrow
// 1 from the high part.
let borrow := lt(prod0, remainder)
prod0 := sub(prod0, remainder)
prod1 := sub(prod1, borrow)
}
// Factor powers of two out of denominator to simplify the division.
//
// Let denominator = d * 2^shift, with d odd.
// We can divide prod0 by 2^shift cheaply (bit shift),
// then do an exact division by the odd d using modular inverse.
uint256 twos;
unchecked {
// largest power of two divisor of denominator
twos = denominator & (~denominator + 1);
}
assembly {
// Divide denominator by twos.
denominator := div(denominator, twos)
// Divide the low word by twos.
prod0 := div(prod0, twos)
// Adjust the high word so that the full 512-bit number is shifted
// by `twos`.
// twos = 2^k, so:
// combined = prod1 * 2^256 + prod0
// combined / twos =
// prod1 * 2^256 / twos + prod0 / twos
// and 2^256 / twos = 2^(256-k).
//
// Here we compute:
// twos = 2^256 / twos
twos := add(div(sub(0, twos), twos), 1)
// Now add the shifted high bits into prod0:
prod0 := or(prod0, mul(prod1, twos))
}
// At this point, denominator is odd and the 512-bit value
// has been squeezed into prod0 (prod1 is effectively 0).
// Compute the modular inverse of denominator modulo 2^256.
// This uses Newton-Raphson iteration:
//
// inv ≡ denominator^{-1} (mod 2^256)
//
// Starting from a seed for odd denominator:
// All operations must be unchecked as they rely on modular arithmetic.
unchecked {
uint256 inv = (3 * denominator) ^ 2;
// Perform Newton-Raphson iterations to refine the inverse.
// Starting from inv which is correct modulo 2^4, then each
// Newton-Raphson step doubles the number of correct bits:
// 2⁴ → 2⁸ → 2¹⁶ → 2³² → 2⁶⁴ →
// 2¹²⁸ → 2²⁵⁶
// Requiring six iterations for 256-bit precision:
inv *= 2 - denominator * inv;
inv *= 2 - denominator * inv;
inv *= 2 - denominator * inv;
inv *= 2 - denominator * inv;
inv *= 2 - denominator * inv;
inv *= 2 - denominator * inv;
// Now inv is the modular inverse of denominator mod 2^256.
// The exact division result is then:
//
// result = (prod0 * inv) mod 2^256
//
// which is just ordinary 256-bit multiplication.
result = prod0 * inv;
}
}
/// @notice Multiply a Fraction and a uint256 using full precision
function mul(ISwapAdapterTypes.Fraction memory rational, uint256 y)
internal
pure
returns (uint256 result)
{
return mulDiv(rational.numerator, y, rational.denominator);
}
/// @notice Full-precision mulDiv: computes floor(x * y / denominator)
/// with 512-bit intermediate precision to avoid overflow.
///
/// @dev Reverts if `denominator == 0` or the exact result >= 2^256.
/// The implementation mirrors the 512/256 division flow used by
/// `toQ128x128(uint256,uint256)`, but with a general multiplicand `y`
/// instead of the fixed 2^128 shift.
function mulDiv(uint256 x, uint256 y, uint256 denominator)
internal
pure
returns (uint256 result)
{
require(denominator != 0, "mulDiv: div by zero");
// Compute the 512-bit product [prod1 prod0] = x * y.
// mm = (x * y) mod (2^256 - 1)
// prod0 = (x * y) mod 2^256
// prod1 = (x * y - prod0 - (mm < prod0 ? 1 : 0)) / 2^256
uint256 prod0;
uint256 prod1;
assembly {
let mm := mulmod(x, y, not(0))
prod0 := mul(x, y)
prod1 := sub(sub(mm, prod0), lt(mm, prod0))
}
// If the high 256 bits are zero, we can do a simple 256-bit division.
if (prod1 == 0) {
unchecked {
return prod0 / denominator;
}
}
// Ensure result < 2^256. This is equivalent to requiring
// denominator > prod1.
require(denominator > prod1, "mulDiv: overflow");
// Make division exact by subtracting the remainder from [prod1 prod0].
uint256 remainder;
assembly {
remainder := mulmod(x, y, denominator)
// Subtract remainder from the low part; if it underflows, borrow 1
// from the high part.
let borrow := lt(prod0, remainder)
prod0 := sub(prod0, remainder)
prod1 := sub(prod1, borrow)
}
// Factor powers of two out of denominator to simplify the division.
uint256 twos;
unchecked {
// largest power of two divisor of denominator
twos = denominator & (~denominator + 1);
}
assembly {
// Divide denominator by twos.
denominator := div(denominator, twos)
// Divide the low word by twos.
prod0 := div(prod0, twos)
// Compute twos = 2^256 / twos.
twos := add(div(sub(0, twos), twos), 1)
// Shift bits from the high word into the low word.
prod0 := or(prod0, mul(prod1, twos))
}
// Compute modular inverse of the (now odd) denominator modulo 2^256
// via Newton-Raphson iterations.
// `inv` is correct to four bits, so we require six iterations
// to achieve 256-bit precision.
unchecked {
uint256 inv = (3 * denominator) ^ 2;
inv *= 2 - denominator * inv; // 2^8
inv *= 2 - denominator * inv; // 2^16
inv *= 2 - denominator * inv; // 2^32
inv *= 2 - denominator * inv; // 2^64
inv *= 2 - denominator * inv; // 2^128
inv *= 2 - denominator * inv; // 2^256
// Exact division: result = prod0 * inv mod 2^256
result = prod0 * inv;
}
}
}

View File

@@ -0,0 +1,18 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.27;
library Funding {
/// @notice a constant passed to swap as the fundingSelector to indicate
/// that the payer has used regular ERC20 approvals to allow the pool to
/// move the necessary input tokens.
// Slither analysis of this line is literally wrong and broken. The extra zero digits are REQUIRED by Solidity since it is a bytes4 literal.
// slither-disable-next-line too-many-digits
bytes4 internal constant APPROVALS = 0x00000000;
/// @notice a constant passed to swap as the fundingSelector to indicate
/// that the payer has already sent sufficient input tokens to the pool
/// before calling swap, so no movement of input tokens is required.
// Slither analysis of this line is literally wrong and broken. The extra zero digits are REQUIRED by Solidity since it is a bytes4 literal.
// slither-disable-next-line too-many-digits
bytes4 internal constant PREFUNDING = 0x00000001;
}

View File

@@ -0,0 +1,24 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.27;
import {IPartyPool} from "./IPartyPool.sol";
interface IPartyInfo {
/// @notice returns true iff the pool is not killed and has been initialized
/// with liquidity.
function working(IPartyPool pool) external view returns (bool);
/// @notice Infinitesimal out-per-in marginal price for swap base->quote as
/// Q128.128, not adjusted for token decimals.
/// @dev Returns p_base / p_quote in Q128.128 format, scaled to external
/// units by (denom_quote / denom_base). This aligns with the swap kernel so
/// that, fee-free, avg(out/in) ≤ price(base, quote) for exact-in trades.
/// @param baseTokenIndex index of the input (base) asset
/// @param quoteTokenIndex index of the output (quote) asset
/// @return price Q128.128 value equal to out-per-in (j per i)
function price(
IPartyPool pool,
uint256 baseTokenIndex,
uint256 quoteTokenIndex
) external view returns (uint256);
}

View File

@@ -0,0 +1,18 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.27;
import {IPartyPool} from "./IPartyPool.sol";
/// @title IPartyPlanner
/// @notice Interface for factory contract for creating and tracking PartyPool
/// instances
interface IPartyPlanner {
/// @notice Retrieves a page of pool addresses
/// @param offset Starting index for pagination
/// @param limit Maximum number of items to return
/// @return pools Array of pool addresses for the requested page
function getAllPools(uint256 offset, uint256 limit)
external
view
returns (IPartyPool[] memory pools);
}

View File

@@ -0,0 +1,86 @@
pragma solidity ^0.8.27;
/// @title PartyPool - LMSR-backed multi-asset pool with LP ERC20 token
/// @notice A multi-asset liquidity pool backed by the LMSRStabilized pricing
/// model. The pool issues an ERC20 LP token representing proportional
/// ownership.
/// It supports:
/// - Proportional minting and burning of LP _tokens,
/// - Single-token mint (swapMint) and single-asset withdrawal (burnSwap),
/// - Exact-input swaps and swaps-to-price-limits,
/// - Flash loans via a callback interface.
interface IPartyPool {
/// @notice If a security problem is found, the vault owner may call this
/// function to permanently disable swap and mint functionality, leaving
/// only burns (withdrawals) working.
function killed() external view returns (bool);
/// @notice Returns the number of tokens (n) in the pool.
function numTokens() external view returns (uint256);
/// @notice Returns the list of all token addresses in the pool (copy).
function allTokens() external view returns (address[] memory);
/// @notice External view to quote exact-in swap amounts (gross input incl.
/// fee and output), matching swap() computations @param inputTokenIndex
/// index of input token
/// @param outputTokenIndex index of output token
/// @param maxAmountIn maximum gross input allowed (inclusive of fee)
/// @param limitPrice maximum acceptable marginal price (pass 0 to ignore)
/// @return amountIn gross input amount to transfer (includes fee),
/// amountOut output amount user would receive, inFee fee taken from input
/// amount
function swapAmounts(
uint256 inputTokenIndex,
uint256 outputTokenIndex,
uint256 maxAmountIn,
int128 limitPrice
) external view returns (uint256 amountIn, uint256 amountOut, uint256 inFee);
/// @notice Swap input token inputTokenIndex -> token outputTokenIndex.
/// Payer must approve token inputTokenIndex. @param payer address of the
/// account that pays for the swap
/// @param fundingSelector If set to USE_APPROVALS, then the payer must use
/// regular ERC20 approvals to authorize the pool to move the required input
/// amount. If this fundingSelector is USE_PREFUNDING, then all of the input
/// amount is expected to have already been sent to the pool and no
/// additional transfers are needed. Refunds of excess input amount are NOT
/// provided and it is illegal to use this funding method with a limit
/// price. Otherwise, for any other fundingSelector value, a callback style
/// funding mechanism is used where the given selector is invoked on the
/// payer, passing the arguments of (address inputToken, uint256
/// inputAmount). The callback function must send the given amount of input
/// coin to the pool in order to continue the swap transaction, otherwise
/// "Insufficient funds" is thrown. @param receiver address that will
/// receive the output tokens
/// @param inputTokenIndex index of input asset
/// @param outputTokenIndex index of output asset
/// @param maxAmountIn maximum amount of token inputTokenIndex (uint256) to
/// transfer in (inclusive of fees) @param limitPrice maximum acceptable
/// marginal price (64.64 fixed point). Pass 0 to ignore.
/// @param deadline timestamp after which the transaction will revert. Pass
/// 0 to ignore. @param unwrap If true, then any output of wrapper token
/// will be unwrapped and native ETH sent to the receiver.
/// @param cbData callback data if fundingSelector is of the callback type.
/// @return amountIn actual input used (uint256), amountOut actual output
/// sent (uint256), inFee fee taken from the input (uint256)
function swap(
address payer,
bytes4 fundingSelector,
address receiver,
uint256 inputTokenIndex,
uint256 outputTokenIndex,
uint256 maxAmountIn,
int128 limitPrice,
uint256 deadline,
bool unwrap,
bytes memory cbData
)
external
payable
returns (uint256 amountIn, uint256 amountOut, uint256 inFee);
/// @notice Effective combined fee in ppm for the given asset pair (i as
/// input, j as output).
function fee(uint256 i, uint256 j) external view returns (uint256);
}

View File

@@ -0,0 +1,258 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
pragma solidity ^0.8.27;
import {
IERC20
} from "../../lib/openzeppelin-contracts/contracts/token/ERC20/IERC20.sol";
import {
SafeERC20
} from "../../lib/openzeppelin-contracts/contracts/token/ERC20/utils/SafeERC20.sol";
import {ISwapAdapter} from "../interfaces/ISwapAdapter.sol";
import {Funding} from "./Funding.sol";
import {IPartyInfo} from "./IPartyInfo.sol";
import {IPartyPlanner} from "./IPartyPlanner.sol";
import {IPartyPool} from "./IPartyPool.sol";
import {console2} from "../../lib/forge-std/src/console2.sol";
contract LiquidityPartySwapAdapter is ISwapAdapter {
using SafeERC20 for IERC20;
// Forge lint wants immutables to be all caps. Slither wants them to be
// mixed case. Why do we care about pedantic linters? The Solidity style
// guide mentions "constants" but never "immutables." Faced with an
// irresolvable linter conflict, I chose to disable the slither linter,
// since its detection of immutables as constants seems to be broken.
// slither-disable-next-line naming-convention
IPartyPlanner public immutable PLANNER;
// slither-disable-next-line naming-convention
IPartyInfo public immutable INFO;
constructor(IPartyPlanner planner, IPartyInfo info) {
PLANNER = planner;
INFO = info;
}
function price(
bytes32 poolId,
address sellToken,
address buyToken,
uint256[] memory specifiedAmounts
) external view override returns (Fraction[] memory prices) {
IPartyPool pool = _poolFromId(poolId);
(uint256 indexIn, uint256 indexOut) =
_tokenIndexes(pool, sellToken, buyToken);
prices = new Fraction[](specifiedAmounts.length);
for (uint256 i = 0; i < specifiedAmounts.length; i++) {
uint256 amount = specifiedAmounts[i];
if (amount == 0) {
// Marginal price support
prices[i] = _marginalPrice(pool, indexIn, indexOut);
} else {
// Regular slippage calculation
// slither-disable-next-line unused-return calls-loop
(
uint256 amountIn,
uint256 amountOut, /*uint256 inFee*/
) = pool.swapAmounts(indexIn, indexOut, amount, 0);
prices[i].numerator = amountOut;
prices[i].denominator = amountIn;
}
}
}
function swap(
bytes32 poolId,
address sellToken,
address buyToken,
OrderSide,
/*side*/
uint256 specifiedAmount
) external returns (Trade memory trade) {
// Setup
address swapper = msg.sender;
IPartyPool pool = _poolFromId(poolId);
// This require should never trigger if the substreams module correctly
// removes components that were killed.
if (!INFO.working(pool)) {
revert Unavailable("LiqP pool not working");
}
(uint256 indexIn, uint256 indexOut) =
_tokenIndexes(pool, sellToken, buyToken);
// Transfer and Swap
uint256 startingGas = gasleft();
IERC20(sellToken)
.safeTransferFrom(swapper, address(pool), specifiedAmount);
// slither-disable-next-line unused-return
try pool.swap(
address(0),
Funding.PREFUNDING,
swapper,
indexIn,
indexOut,
specifiedAmount,
0,
0,
false,
""
) returns (
uint256 amountIn, uint256 amountOut, uint256 inFee
) {
uint256 endingGas = gasleft();
uint256 gasUsed = startingGas - endingGas;
Fraction memory poolPrice = _marginalPrice(pool, indexIn, indexOut);
console2.log("Successfully swapped", amountOut);
// forge-lint: disable-next-line(named-struct-fields)
return Trade(amountOut, gasUsed, poolPrice);
} catch (bytes memory reason) {
bytes32 hash = keccak256(reason);
if (hash == keccak256("swap: input too small after fee")) {
revert TooSmall(0);
} else if (
hash == keccak256("swap: transfer exceeds max")
|| hash
== keccak256("LMSR: a/b too large (would overflow exp)")
|| hash == keccak256("swap: transfer exceeds max")
) {
revert LimitExceeded(0); // todo size
} else if (hash == keccak256("killed")) {
revert Unavailable("pool has been permanently killed");
} else if (hash == keccak256("LMSR: size metric zero")) {
revert Unavailable("pool currently has no LP assets");
} else if (hash == keccak256("LMSR: limitPrice <= current price")) {
revert InvalidOrder("limit price is below current price");
} else if (
hash == keccak256("LMSR: ratio<=0") // invalid limit price
) {
revert InvalidOrder("limit price cannot be negative");
} else {
console2.log("Unhandled error", string(reason));
// re-raise
assembly {
revert(add(reason, 0x20), mload(reason))
}
}
}
}
function getLimits(bytes32 poolId, address sellToken, address buyToken)
external
view
returns (uint256[] memory limits)
{
// We arbitrarily limit the amounts like Uniswap V2 does, to make the
// test cases work. There is no theoretical limit on the input amount.
// forge-lint: disable-next-line(unsafe-typecast)
address pool = address(bytes20(poolId));
limits = new uint256[](2);
// input token limit: Theoretically unlimited, but artificially limited
// here to appease Tycho's test cases. Instead of estimating actual
// input limits based on a maximum target slippage, we merely return the
// current
// inventory of input token. Even for large stablecoin pools with a
// kappa near 1, this input amount should result in an "unreasonably"
// high slippage:
// Pool Size => Slippage for inputAmount=reserveBalance and kappa=1
// 2 => 33.7%
// 10 => 9.2%
// 25 => 3.9%
// 50 => 2.1%
// See the commented-out method below for an exact computation of the
// maximum input amount for a given pool configuration and target
// slippage.
limits[0] = IERC20(sellToken).balanceOf(pool);
// output token limit: the pool's current balance (an overestimate)
limits[1] = IERC20(buyToken).balanceOf(pool);
}
function getCapabilities(
bytes32,
/*poolId*/
address,
/*sellToken*/
address /*buyToken*/
)
external
pure
returns (Capability[] memory capabilities)
{
capabilities = new Capability[](3);
capabilities[0] = Capability.SellOrder;
capabilities[1] = Capability.PriceFunction;
capabilities[2] = Capability.MarginalPrice;
return capabilities;
}
function getTokens(bytes32 poolId)
external
view
returns (address[] memory tokens)
{
IPartyPool pool = _poolFromId(poolId);
return pool.allTokens();
}
function getPoolIds(uint256 offset, uint256 limit)
external
view
returns (bytes32[] memory ids)
{
IPartyPool[] memory pools = PLANNER.getAllPools(offset, limit);
ids = new bytes32[](pools.length);
for (uint256 i = 0; i < pools.length; i++) {
ids[i] = bytes32(uint256(uint160(address(pools[i]))));
}
}
//
// Internal Helpers
//
uint256 private constant NONE = type(uint256).max;
/// @dev Liquidity Party pools identify tokens by index rather than address,
/// saving 5200 gas per swap.
function _tokenIndexes(IPartyPool pool, address sellToken, address buyToken)
internal
view
returns (uint256 indexIn, uint256 indexOut)
{
indexIn = NONE;
indexOut = NONE;
address[] memory tokens = pool.allTokens();
uint256 numTokens = pool.numTokens();
for (uint256 i = 0; i < numTokens; i++) {
if (tokens[i] == sellToken) {
indexIn = i;
} else if (tokens[i] == buyToken) {
indexOut = i;
}
}
// This should never happen if the token metadata was correctly loaded
// by substreams
require(indexIn != NONE && indexOut != NONE, "tokens not in pool");
}
function _marginalPrice(IPartyPool pool, uint256 indexIn, uint256 indexOut)
internal
view
returns (Fraction memory poolPrice)
{
// Liquidity Party prices are Q128.128 fixed point format
// slither-disable-next-line calls-loop
uint256 price128x128 = INFO.price(pool, indexIn, indexOut);
uint256 feePpm = pool.fee(indexIn, indexOut);
// price128x128 *= 1_000_000 - feePpm;
// price128x128 /= 1_000_000;
// forge-lint: disable-next-line(unsafe-typecast,named-struct-fields)
return Fraction(price128x128, 1 << 128);
}
function _poolFromId(bytes32 poolId) internal pure returns (IPartyPool) {
// forge-lint: disable-next-line(unsafe-typecast)
return IPartyPool(address(bytes20(poolId)));
}
}

View File

@@ -0,0 +1,25 @@
author:
name: Tim Olson
email: tim@dexorder.com
constants:
# This is our median gas cost for a 20-asset pool. Gas varies by pool size from 120k to 200k.
protocol_gas: 147238
capabilities:
- SellSide
- PriceFunction
- MarginalPrice
contract: LiquidityPartySwapAdapter.sol
# Deployment instances used to generate chain specific bytecode.
instances:
- chain:
name: sepolia
id: 11155111
arguments:
- "0x77C29B1790D18A3AD269BcE09b7dB1074911Dcb6" # PartyPlanner
- "0x784BA6cD19B484bEE9Cee880B18b57fC6e8b2D5c" # PartyInfo
# We do implement getPoolIds() and getTokens(), so explicit swap tests are not needed.
tests: {}

View File

@@ -12,7 +12,6 @@ contract AdapterTest is Test, ISwapAdapterTypes {
using FractionMath for Fraction;
using EfficientERC20 for IERC20;
uint256 constant pricePrecision = 10e24;
string[] public stringPctgs = ["0%", "0.1%", "50%", "100%"];
// @notice Test the behavior of a swap adapter for a list of pools
@@ -44,9 +43,10 @@ contract AdapterTest is Test, ISwapAdapterTypes {
}
// Prices should:
// 1. Be monotonic decreasing
// 1. Be monotonic decreasing (within rounding tolerance)
// 2. Be positive
// 3. Always be >= the executed price and >= the price after the swap
// (within rounding tolerance)
function testPricesForPair(
ISwapAdapter adapter,
bytes32 poolId,
@@ -76,7 +76,10 @@ contract AdapterTest is Test, ISwapAdapterTypes {
Fraction[] memory prices =
adapter.price(poolId, tokenIn, tokenOut, amounts);
assertGt(
fractionToInt(prices[0]),
fractionToInt(prices[0])
// within rounding tolerance
* (amounts[amounts.length - 1] + 1)
/ amounts[amounts.length - 1],
fractionToInt(prices[prices.length - 1]),
"Price at limit should be smaller than price at 0"
);
@@ -92,7 +95,6 @@ contract AdapterTest is Test, ISwapAdapterTypes {
uint256 priceAtZero = fractionToInt(prices[0]);
console2.log("TEST: Price at 0: %d", priceAtZero);
Trade memory trade;
deal(tokenIn, address(this), 5 * amounts[amounts.length - 1]);
uint256 initialState = vm.snapshot();
@@ -104,51 +106,94 @@ contract AdapterTest is Test, ISwapAdapterTypes {
amounts[j]
);
uint256 priceAtAmount = fractionToInt(prices[j]);
// We allow the assertions to tolerate rounding errors
// not greater than `1/amounts[j]`
uint256 toleranceDenominator = amounts[j];
console2.log("TEST: Swapping %d of %s", amounts[j], tokenIn);
trade = adapter.swap(
try adapter.swap(
poolId, tokenIn, tokenOut, OrderSide.Sell, amounts[j]
);
uint256 executedPrice =
trade.calculatedAmount * pricePrecision / amounts[j];
) returns (
Trade memory trade
) {
uint256 executedPrice = Fraction(
trade.calculatedAmount, amounts[j]
).toQ128x128();
uint256 priceAfterSwap = fractionToInt(trade.price);
console2.log("TEST: - Executed price: %d", executedPrice);
console2.log("TEST: - Price at amount: %d", priceAtAmount);
console2.log("TEST: - Price after swap: %d", priceAfterSwap);
if (hasPriceImpact) {
assertGe(
assertGeTol(
executedPrice,
priceAtAmount,
"Price should be greated than executed price."
toleranceDenominator,
"Price should be greater than executed price."
);
assertGt(
assertGtTol(
executedPrice,
priceAfterSwap,
toleranceDenominator,
"Executed price should be greater than price after swap."
);
assertGt(
assertGtTol(
priceAtZero,
executedPrice,
"Price should be greated than price after swap."
toleranceDenominator,
"Price should be greater than price after swap."
);
} else {
assertGe(
assertGeTol(
priceAtZero,
priceAfterSwap,
toleranceDenominator,
"Executed price should be or equal to price after swap."
);
assertGe(
assertGeTol(
priceAtZero,
priceAtAmount,
toleranceDenominator,
"Executed price should be or equal to price after swap."
);
assertGe(
assertGeTol(
priceAtZero,
executedPrice,
toleranceDenominator,
"Price should be or equal to price after swap."
);
}
} catch (bytes memory reason) {
(bool isTooSmall, uint256 lowerLimit) =
decodeTooSmallError(reason);
(bool isLimitExceeded, uint256 limit) =
decodeLimitExceededError(reason);
if (isTooSmall) {
// We allow a TooSmall exception to occur for the smallest
// amount only.
if (j == 1) {
console2.log(
"TEST: TooSmall exception tolerated for smallest amount"
);
} else {
revert(
"TEST: TooSmall thrown for a significantly sized amount"
);
}
} else if (isLimitExceeded) {
// We never allow LimitExceeded to be thrown, since all
// amounts should be within the stated limits.
revert(
"TEST: LimitExceeded thrown for an amount within limits"
);
} else {
// any other revert reason bubbles up
assembly {
revert(add(reason, 32), mload(reason))
}
}
}
vm.revertTo(initialState);
}
@@ -185,24 +230,87 @@ contract AdapterTest is Test, ISwapAdapterTypes {
);
uint256[] memory aboveLimitArray = new uint256[](1);
aboveLimitArray[0] = amountAboveLimit;
bool supportsLimitExceeded = false;
try adapter.price(poolId, tokenIn, tokenOut, aboveLimitArray) {
revert(
"Pool shouldn't be able to fetch prices above the sell limit"
);
} catch Error(string memory s) {
} catch (bytes memory reason) {
(bool isTooSmall, uint256 lowerLimit) = decodeTooSmallError(reason);
(bool isLimitExceeded, uint256 limit) =
decodeLimitExceededError(reason);
if (isLimitExceeded) {
supportsLimitExceeded = true;
console2.log(
"TEST: Expected error when fetching price above limit: %s", s
"TEST: LimitExceeded supported! Thrown when fetching price above limit: %i",
limit
);
} else if (isTooSmall) {
console2.log(
"TEST: UNEXPECTED TooSmall error when fetching price below limit: %i",
lowerLimit
);
revert TooSmall(lowerLimit);
} else if (
reason.length >= 4
&& bytes4(reason) == bytes4(keccak256("Error(string)"))
) {
string memory s = abi.decode(
sliceBytes(reason, 4, reason.length - 4), (string)
);
console2.log(
"TEST: Expected error when fetching price above limit: %s",
s
);
} else {
// Unexpected error type: re-raise.
assembly {
revert(add(reason, 32), mload(reason))
}
}
}
try adapter.swap(
poolId, tokenIn, tokenOut, OrderSide.Sell, aboveLimitArray[0]
) {
revert("Pool shouldn't be able to swap above the sell limit");
} catch Error(string memory s) {
} catch (bytes memory reason) {
(bool isTooSmall, uint256 lowerLimit) = decodeTooSmallError(reason);
(bool isLimitExceeded, uint256 limit) =
decodeLimitExceededError(reason);
if (isLimitExceeded) {
supportsLimitExceeded = true;
console2.log(
"TEST: LimitExceeded supported! Thrown when swapping above limit: %i",
limit
);
} else if (isTooSmall) {
console2.log(
"TEST: UNEXPECTED TooSmall error when swapping above limit: %i",
lowerLimit
);
revert TooSmall(lowerLimit);
} else if (
reason.length >= 4
&& bytes4(reason) == bytes4(keccak256("Error(string)"))
) {
string memory s = abi.decode(
sliceBytes(reason, 4, reason.length - 4), (string)
);
console2.log(
"TEST: Expected error when swapping above limit: %s", s
);
} else {
// Unexpected error type: re-raise.
assembly {
revert(add(reason, 32), mload(reason))
}
}
}
if (supportsLimitExceeded) {
console.log(unicode"Adapter supports LimitExceeded ✓");
}
}
@@ -244,7 +352,7 @@ contract AdapterTest is Test, ISwapAdapterTypes {
pure
returns (uint256)
{
return price.numerator * pricePrecision / price.denominator;
return price.toQ128x128();
}
function hasCapability(
@@ -259,4 +367,85 @@ contract AdapterTest is Test, ISwapAdapterTypes {
return false;
}
//
// Custom Error Helper Functions
// TODO should we expose these in a better location / library for solvers to
// also leverage?
// Helper function to check if error is TooSmall and decode it
function decodeTooSmallError(bytes memory reason)
internal
pure
returns (bool, uint256)
{
if (reason.length >= 4 && bytes4(reason) == TooSmall.selector) {
if (reason.length == 36) {
uint256 lowerLimit =
abi.decode(sliceBytes(reason, 4, 32), (uint256));
return (true, lowerLimit);
}
}
return (false, 0);
}
// Helper function to check if error is LimitExceeded and decode it
function decodeLimitExceededError(bytes memory reason)
internal
pure
returns (bool, uint256)
{
if (reason.length >= 4 && bytes4(reason) == LimitExceeded.selector) {
if (reason.length == 36) {
uint256 limit = abi.decode(sliceBytes(reason, 4, 32), (uint256));
return (true, limit);
}
}
return (false, 0);
}
// Helper function to slice bytes
function sliceBytes(bytes memory data, uint256 start, uint256 length)
internal
pure
returns (bytes memory)
{
bytes memory result = new bytes(length);
for (uint256 i = 0; i < length; i++) {
result[i] = data[start + i];
}
return result;
}
//
// Helper functions to assert with tolerance
//
function assertGeTol(
uint256 a,
uint256 b,
uint256 toleranceDenominator,
string memory errorMessage
) internal {
// The tolerance is `1 / toleranceDenominator`, so we increase the value
// of `a` by this amount. adjustedA = a * (denom+1) / denom
uint256 adjustedA = FractionMath.mulDiv(
a, toleranceDenominator + 1, toleranceDenominator
);
assertGe(adjustedA, b, errorMessage);
}
function assertGtTol(
uint256 a,
uint256 b,
uint256 toleranceDenominator,
string memory errorMessage
) internal {
// The tolerance is `1 / toleranceDenominator`, so we increase the value
// of `a` by this amount. adjustedA = a * (denom+1) / denom
uint256 adjustedA = FractionMath.mulDiv(
a, toleranceDenominator + 1, toleranceDenominator
);
assertGt(adjustedA, b, errorMessage);
}
}

View File

@@ -0,0 +1,224 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.27;
import "forge-std/console2.sol"; // todo
import {
IERC20
} from "../lib/openzeppelin-contracts/contracts/token/ERC20/IERC20.sol";
import {
IERC20Metadata
} from "../lib/openzeppelin-contracts/contracts/token/ERC20/extensions/IERC20Metadata.sol";
import {FractionMath} from "../src/libraries/FractionMath.sol";
import {IPartyInfo} from "../src/liquidityparty/IPartyInfo.sol";
import {IPartyPlanner} from "../src/liquidityparty/IPartyPlanner.sol";
import {IPartyPool} from "../src/liquidityparty/IPartyPool.sol";
import {
LiquidityPartySwapAdapter
} from "../src/liquidityparty/LiquidityPartySwapAdapter.sol";
import {AdapterTest} from "./AdapterTest.sol";
contract LiquidityPartyFunctionTest is AdapterTest {
using FractionMath for Fraction;
IPartyPlanner internal constant PLANNER =
IPartyPlanner(0x42977f565971F6D288a05ddEbC87A17276F71A29);
IPartyInfo internal constant INFO =
IPartyInfo(0x605F803cD27F5c1fa01440B2cbd5D3E4Cf7EE850);
address internal constant MINT_IMPL =
0xA0375403921e9B357E1BeD57bef3fA3FCE80acd0;
address internal constant SWAP_IMPL =
0x6aA001e87F86E83bc4D569883332882cb47E2A13;
IPartyPool internal constant POOL =
IPartyPool(0x2A804e94500AE379ee0CcC423a67B07cc0aF548C);
bytes32 internal constant POOL_ID = bytes32(bytes20(address(POOL)));
uint256 internal constant FORK_BLOCK = 23978797; // block in which the pool
// was created
LiquidityPartySwapAdapter internal adapter;
uint256 internal constant TEST_ITERATIONS = 10;
address[] internal tokens;
address internal constant USDT = 0xdAC17F958D2ee523a2206206994597C13D831ec7;
address internal constant USDC = 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48;
address internal constant WBTC = 0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599;
address internal constant WETH = 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2;
address internal constant UNI = 0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984;
address internal constant WSOL = 0xD31a59c85aE9D8edEFeC411D448f90841571b89c;
address internal constant TRX = 0x50327c6c5a14DCaDE707ABad2E27eB517df87AB5;
address internal constant AAVE = 0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9;
address internal constant PEPE = 0x6982508145454Ce325dDbE47a25d4ec3d2311933;
address internal constant SHIB = 0x95aD61b0a150d79219dCF64E1E6Cc01f0B64C4cE;
address private constant INPUT_TOKEN = WBTC;
uint8 private constant INPUT_INDEX = 2;
address private constant OUTPUT_TOKEN = SHIB;
uint8 private constant OUTPUT_INDEX = 9;
function setUp() public {
tokens = new address[](10);
tokens[0] = USDT;
tokens[1] = USDC;
tokens[2] = WBTC;
tokens[3] = WETH;
tokens[4] = UNI;
tokens[5] = WSOL;
tokens[6] = TRX;
tokens[7] = AAVE;
tokens[8] = PEPE;
tokens[9] = SHIB;
vm.createSelectFork(vm.rpcUrl("mainnet"), FORK_BLOCK);
adapter = new LiquidityPartySwapAdapter(PLANNER, INFO);
vm.label(address(PLANNER), "PartyPlanner");
vm.label(address(INFO), "PartyInfo");
vm.label(address(MINT_IMPL), "PartyPoolMintImpl");
vm.label(address(SWAP_IMPL), "PartyPoolSwapImpl");
vm.label(address(POOL), "PartyPool");
vm.label(address(adapter), "LiquidityPartySwapAdapter");
for (uint256 i = 0; i < tokens.length; i++) {
vm.label(address(tokens[i]), IERC20Metadata(tokens[i]).symbol());
}
}
function testPrice() public view {
uint256[] memory amounts = new uint256[](3);
uint256 balance = IERC20(INPUT_TOKEN).balanceOf(address(POOL));
amounts[0] = 2; // cannot use 1: the fee will round up and take
// everything, resulting in a zero-output reversion
amounts[1] = balance;
amounts[2] = balance * 2;
Fraction[] memory prices =
adapter.price(POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN, amounts);
for (uint256 i = 0; i < prices.length; i++) {
assertGt(prices[i].numerator, 0);
assertGt(prices[i].denominator, 0);
}
}
function testPriceDecreasing() public view {
uint256[] memory limits =
adapter.getLimits(POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN);
uint256[] memory amounts = new uint256[](TEST_ITERATIONS);
for (uint256 i = 0; i < TEST_ITERATIONS; i++) {
// The first entry will be a zero amount which returns the current
// marginal price.
amounts[i] = limits[0] * i / (TEST_ITERATIONS - 1);
}
Fraction[] memory prices =
adapter.price(POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN, amounts);
for (uint256 i = 0; i < TEST_ITERATIONS - 1; i++) {
console2.log("compare price", prices[i].numerator);
console2.log(" ", prices[i].denominator);
console2.log(" > ", prices[i + 1].numerator);
console2.log(" ", prices[i + 1].denominator);
console2.log();
assertEq(prices[i].compareFractions(prices[i + 1]), 1);
}
}
function testSwapFuzz(uint256 amount) public {
uint256[] memory limits =
adapter.getLimits(POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN);
vm.assume(amount > 1); // 1 will not work because we take fee-on-input
// and round up, leaving nothing to trade
vm.assume(amount <= limits[0]);
deal(INPUT_TOKEN, address(this), amount);
IERC20(INPUT_TOKEN).approve(address(adapter), amount);
uint256 usdtBalance = IERC20(INPUT_TOKEN).balanceOf(address(this));
uint256 wethBalance = IERC20(OUTPUT_TOKEN).balanceOf(address(this));
Trade memory trade = adapter.swap(
POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN, OrderSide.Sell, amount
);
if (trade.calculatedAmount > 0) {
assertEq(
amount,
usdtBalance - IERC20(INPUT_TOKEN).balanceOf(address(this))
);
assertEq(
trade.calculatedAmount,
IERC20(OUTPUT_TOKEN).balanceOf(address(this)) - wethBalance
);
}
}
function testSwapSellIncreasing() public {
uint256[] memory limits =
adapter.getLimits(POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN);
uint256[] memory amounts = new uint256[](TEST_ITERATIONS);
Trade[] memory trades = new Trade[](TEST_ITERATIONS);
for (uint256 i = 0; i < TEST_ITERATIONS; i++) {
amounts[i] = limits[0] * (i + 1) / (TEST_ITERATIONS - 1);
uint256 beforeSwap = vm.snapshot();
deal(INPUT_TOKEN, address(this), amounts[i]);
IERC20(INPUT_TOKEN).approve(address(adapter), amounts[i]);
trades[i] = adapter.swap(
POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN, OrderSide.Sell, amounts[i]
);
vm.revertTo(beforeSwap);
}
for (uint256 i = 0; i < TEST_ITERATIONS - 1; i++) {
assertLe(trades[i].calculatedAmount, trades[i + 1].calculatedAmount);
assertEq(
trades[i].price.denominator, trades[i + 1].price.denominator
); // must share a basis
assertGe(trades[i].price.numerator, trades[i + 1].price.numerator);
}
}
function testGetLimits() public view {
uint256[] memory limits =
adapter.getLimits(POOL_ID, INPUT_TOKEN, OUTPUT_TOKEN);
assert(limits.length == 2);
assert(limits[0] > 0);
assert(limits[1] > 0);
}
function testGetTokens() public view {
address[] memory adapterTokens = adapter.getTokens(POOL_ID);
for (uint256 i = 0; i < tokens.length; i++) {
assertEq(adapterTokens[i], tokens[i]);
}
}
function testGetPoolIds() public view {
uint256 offset = 0;
uint256 limit = 10;
bytes32[] memory poolIds = adapter.getPoolIds(offset, limit);
assertLe(
poolIds.length,
limit,
"Number of pool IDs should be less than or equal to limit"
);
if (poolIds.length > 0) {
assertGt(uint256(poolIds[0]), 0, "Pool ID should be greater than 0");
}
}
// Many of the tests above seem entirely redundant with runPoolBehaviorTest
// :shrug:
function testLiquidityPartyPoolBehaviour() public {
bytes32[] memory poolIds = new bytes32[](1);
poolIds[0] = POOL_ID;
runPoolBehaviourTest(adapter, poolIds);
}
}

View File

@@ -192,6 +192,8 @@ message StorageChanges {
bytes address = 1;
// The contract's storage changes
repeated ContractSlot slots = 2;
// [optional] The contract's balance change
optional bytes native_balance = 3;
}
// Aggregate entities

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,8 @@ glob = "0.3.0"
miette = { version = "7.6.0", features = ["fancy"] }
# Logging & Tracing
tracing = "0.1.37"
tycho-simulation = { git = "https://github.com/propeller-heads/tycho-simulation.git", rev = "f73c2ef28328abdde791edf1fb21748f78dbee6a", features = ["evm"] }
tycho-simulation = { git = "https://github.com/propeller-heads/tycho-simulation.git", tag = "0.167.0", features = ["evm"] }
tycho-execution = "0.129.0"
num-bigint = "0.4"
num-traits = "0.2"
num-rational = "0.4.2"

View File

@@ -1,8 +1,29 @@
# Protocol Testing
Rust-based integration testing framework for Tycho protocol implementations.
Rust-based integration testing framework for Tycho protocol implementations. See our full
docs [here](https://docs.propellerheads.xyz/tycho/for-dexs/protocol-integration/3.-testing).
## How to Run
## How to Run Locally
```bash
# Setup Environment Variables
export RPC_URL=..
export SUBSTREAMS_API_TOKEN=..
export RUST_LOG=protocol_testing=info,tycho_client=error
# Build Substreams wasm for BalancerV2
cd substreams
cargo build --release --package ethereum-balancer-v2 --target wasm32-unknown-unknown
cd ../protocol-testing
# Run Postgres DB using Docker compose
docker compose -f ./docker-compose.yaml up -d db
# Run test
cargo run -- --package ethereum-balancer-v2
```
## How to Run with Docker
```bash
# Build the images, from the project root dir
@@ -20,72 +41,3 @@ docker compose up -d && docker compose logs test-runner --follow
# Clean up
docker compose down
```
## Test Output Formatting
The test runner outputs results similar to:
```
Running 2 tests ...
--------------------------------
TEST 1: balancer_weighted_pool_test
✅ Protocol component validation passed.
✅ Token balance validation passed.
Amount out for 0x5c6ee304399dbdb9c8ef030ab642b10820db8f56000200000000000000000014: calculating for tokens "BAL"/"WETH"
Spot price "BAL"/"WETH": 0.123456
✅ Simulation validation passed.
✅ balancer_weighted_pool_test passed.
--------------------------------
Tests finished!
RESULTS: 2/2 passed.
```
## Module-specific Logging
```bash
# Enable debug logs for specific modules
export RUST_LOG=protocol_testing=debug,tycho_client=info
# Disable logs for noisy modules
export RUST_LOG=info,hyper=warn,reqwest=warn
```
## Running with Different Log Levels
```bash
# Standard test run with progress output
RUST_LOG=info cargo run -- --package uniswap-v2
# Detailed debug output
RUST_LOG=debug cargo run -- --package uniswap-v2
# Minimal output (errors only)
RUST_LOG=error cargo run -- --package uniswap-v2
```
## Test Configuration
Tests are configured via YAML files located in the substreams package directory:
- Test configuration: `../substreams/<package>/integration_test.tycho.yaml`
- Substreams configuration: `../substreams/<package>/substreams.yaml`
## What the Tests Do
1. **Component Validation**: Verifies that all expected protocol components are present in Tycho after indexing
2. **State Validation**: Compares indexed component states against expected values
3. **Balance Verification**: Validates token balances by querying the blockchain directly (can be skipped)
4. **Simulation Testing**: Runs Tycho simulation engine to verify protocol functionality
## Troubleshooting
- **Database Connection Issues**: Ensure PostgreSQL is running via `docker-compose up -d`
- **RPC Errors**: Verify `RPC_URL` is set and accessible
- **Missing Substreams**: Check that the package directory exists in `../substreams/<package>/`
- **Build Failures**: Ensure all dependencies are installed and environment variables are set

View File

@@ -1,5 +1,8 @@
services:
db:
build:
context: .
dockerfile: postgres.Dockerfile
image: protocol-testing-db:latest
restart: "always"
healthcheck:

View File

@@ -149,6 +149,6 @@ pub struct IntegrationTestsConfig {
pub skip_balance_check: bool,
pub protocol_type_names: Vec<String>,
pub protocol_system: String,
pub module_name: String,
pub module_name: Option<String>,
pub tests: Vec<IntegrationTest>,
}

View File

@@ -10,17 +10,15 @@ use alloy::{primitives::Keccak256, sol_types::SolValue};
use miette::{IntoDiagnostic, WrapErr};
use num_bigint::BigUint;
use serde_json::json;
use tycho_execution::encoding::{
errors::EncodingError,
evm::{encoder_builders::TychoRouterEncoderBuilder, utils::bytes_to_address},
models::{EncodedSolution, NativeAction, Solution, SwapBuilder, Transaction, UserTransferType},
};
use tycho_simulation::{
evm::protocol::u256_num::biguint_to_u256,
protocol::models::ProtocolComponent,
tycho_common::{dto::Chain, Bytes},
tycho_execution::encoding::{
errors::EncodingError,
evm::{encoder_builders::TychoRouterEncoderBuilder, utils::bytes_to_address},
models::{
EncodedSolution, NativeAction, Solution, SwapBuilder, Transaction, UserTransferType,
},
},
};
use crate::execution::EXECUTOR_ADDRESS;

View File

@@ -13,6 +13,7 @@ use alloy::{
use miette::{miette, IntoDiagnostic, WrapErr};
use num_bigint::BigUint;
use tracing::info;
use tycho_execution::encoding::models::Solution;
use tycho_simulation::{
evm::protocol::u256_num::{biguint_to_u256, u256_to_biguint},
tycho_common::{
@@ -23,7 +24,6 @@ use tycho_simulation::{
allowance_slot_detector::{AllowanceSlotDetectorConfig, EVMAllowanceSlotDetector},
balance_slot_detector::{BalanceSlotDetectorConfig, EVMBalanceSlotDetector},
},
tycho_execution::encoding::models::Solution,
};
use crate::rpc::RPCProvider;
@@ -236,19 +236,18 @@ pub async fn setup_router_overwrites(
/// - RPC queries for storage detection fail
async fn setup_user_overwrites(
solution: &Solution,
transaction: &tycho_simulation::tycho_execution::encoding::models::Transaction,
transaction: &tycho_execution::encoding::models::Transaction,
user_address: Address,
rpc_url: String,
block: &Block,
) -> miette::Result<AddressHashMap<AccountOverride>> {
let mut overwrites = AddressHashMap::default();
// Add ETH balance override for the user to ensure they have enough gas funds
let mut eth_balance = U256::from_str("100000000000000000000").unwrap(); // 100 ETH
let token_address = Address::from_slice(&solution.given_token[..20]);
// If given token is ETH, add the given amount to the balance
// If given token is ETH, add the given amount to the balance + 100 ETH for gas
if solution.given_token == Bytes::zero(20) {
eth_balance += biguint_to_u256(&solution.given_amount);
let eth_balance = biguint_to_u256(&solution.given_amount) +
U256::from_str("100000000000000000000").unwrap(); // given_amount + 100 ETH for gas
overwrites.insert(user_address, AccountOverride::default().with_balance(eth_balance));
// if the given token is not ETH, do balance and allowance slots overwrites
} else {
let detector = EVMBalanceSlotDetector::new(BalanceSlotDetectorConfig {
@@ -265,9 +264,9 @@ async fn setup_user_overwrites(
)
.await;
let balance_slot =
if let Some(Ok((_storage_addr, slot))) = results.get(&solution.given_token.clone()) {
slot
let (balance_storage_addr, balance_slot) =
if let Some(Ok((storage_addr, slot))) = results.get(&solution.given_token.clone()) {
(storage_addr, slot)
} else {
return Err(miette!("Couldn't find balance storage slot for token {token_address}"));
};
@@ -287,39 +286,66 @@ async fn setup_user_overwrites(
)
.await;
let allowance_slot = if let Some(Ok((_storage_addr, slot))) =
let (allowance_storage_addr, allowance_slot) = if let Some(Ok((storage_addr, slot))) =
results.get(&solution.given_token.clone())
{
slot
(storage_addr, slot)
} else {
return Err(miette!("Couldn't find allowance storage slot for token {token_address}"));
};
// Use the exact given amount for balance and allowance (no buffer, no max)
let token_balance = biguint_to_u256(&solution.given_amount);
let token_allowance = biguint_to_u256(&solution.given_amount);
let balance_storage_address = Address::from_slice(&balance_storage_addr[..20]);
let allowance_storage_address = Address::from_slice(&allowance_storage_addr[..20]);
// Apply balance and allowance overrides
// If both storage addresses are the same, combine them into one override
if balance_storage_address == allowance_storage_address {
overwrites.insert(
token_address,
balance_storage_address,
AccountOverride::default().with_state_diff(vec![
(
alloy::primitives::B256::from_slice(allowance_slot),
alloy::primitives::B256::from_slice(&U256::MAX.to_be_bytes::<32>()),
alloy::primitives::B256::from_slice(balance_slot),
alloy::primitives::B256::from_slice(&token_balance.to_be_bytes::<32>()),
),
(
alloy::primitives::B256::from_slice(balance_slot),
alloy::primitives::B256::from_slice(
&biguint_to_u256(&solution.given_amount).to_be_bytes::<32>(),
),
alloy::primitives::B256::from_slice(allowance_slot),
alloy::primitives::B256::from_slice(&token_allowance.to_be_bytes::<32>()),
),
]),
);
} else {
// Different storage addresses, apply separately
overwrites.insert(
balance_storage_address,
AccountOverride::default().with_state_diff(vec![(
alloy::primitives::B256::from_slice(balance_slot),
alloy::primitives::B256::from_slice(&token_balance.to_be_bytes::<32>()),
)]),
);
overwrites.insert(
allowance_storage_address,
AccountOverride::default().with_state_diff(vec![(
alloy::primitives::B256::from_slice(allowance_slot),
alloy::primitives::B256::from_slice(&token_allowance.to_be_bytes::<32>()),
)]),
);
}
overwrites.insert(user_address, AccountOverride::default().with_balance(eth_balance));
// Add 1 ETH for gas for non-ETH token swaps
let eth_balance = U256::from_str("100000000000000000000").unwrap(); // 100 ETH for gas
overwrites.insert(user_address, AccountOverride::default().with_balance(eth_balance));
}
Ok(overwrites)
}
/// Simulate a trade using eth_call for historical blocks
pub async fn simulate_trade_with_eth_call(
rpc_provider: &RPCProvider,
transaction: &tycho_simulation::tycho_execution::encoding::models::Transaction,
transaction: &tycho_execution::encoding::models::Transaction,
solution: &Solution,
block: &Block,
) -> miette::Result<BigUint> {

View File

@@ -3,6 +3,7 @@ mod config;
mod encoding;
mod execution;
mod rpc;
mod state_registry;
mod test_runner;
mod traces;
mod tycho_rpc;

View File

@@ -0,0 +1,50 @@
use tycho_simulation::{
evm::{
decoder::TychoStreamDecoder,
engine_db::tycho_db::PreCachedDB,
protocol::{
ekubo::state::EkuboState, pancakeswap_v2::state::PancakeswapV2State,
uniswap_v2::state::UniswapV2State, uniswap_v3::state::UniswapV3State,
vm::state::EVMPoolState,
},
},
protocol::models::DecoderContext,
tycho_client::feed::BlockHeader,
};
/// Register decoder based on protocol system. Defaults to EVMPoolState.
/// To add a new protocol, just add a case to the match statement.
pub fn register_decoder_for_protocol(
decoder: &mut TychoStreamDecoder<BlockHeader>,
protocol_system: &str,
decoder_context: DecoderContext,
) -> miette::Result<()> {
match protocol_system {
"uniswap_v2" | "sushiswap_v2" => {
decoder
.register_decoder_with_context::<UniswapV2State>(protocol_system, decoder_context);
}
"pancakeswap_v2" => {
decoder.register_decoder_with_context::<PancakeswapV2State>(
protocol_system,
decoder_context,
);
}
"uniswap_v3" | "pancakeswap_v3" => {
decoder
.register_decoder_with_context::<UniswapV3State>(protocol_system, decoder_context);
}
"ekubo_v2" => {
decoder.register_decoder_with_context::<EkuboState>(protocol_system, decoder_context);
}
// Default to EVMPoolState for all other protocols
_ => {
decoder.register_decoder_with_context::<EVMPoolState<PreCachedDB>>(
protocol_system,
decoder_context,
);
}
}
Ok(())
}

View File

@@ -21,27 +21,22 @@ use num_traits::{Signed, ToPrimitive, Zero};
use postgres::{Client, Error, NoTls};
use tokio::runtime::Runtime;
use tracing::{debug, error, info, warn};
use tycho_execution::encoding::evm::utils::bytes_to_address;
use tycho_simulation::{
evm::{
decoder::TychoStreamDecoder,
engine_db::tycho_db::PreCachedDB,
protocol::{
ekubo::state::EkuboState, pancakeswap_v2::state::PancakeswapV2State,
u256_num::bytes_to_u256, uniswap_v2::state::UniswapV2State,
uniswap_v3::state::UniswapV3State, vm::state::EVMPoolState,
},
},
evm::{decoder::TychoStreamDecoder, protocol::u256_num::bytes_to_u256},
protocol::models::{DecoderContext, Update},
tycho_client::feed::{
synchronizer::{ComponentWithState, Snapshot, StateSyncMessage},
BlockHeader, FeedMessage,
},
tycho_common::{
dto::{Chain, ProtocolComponent, ResponseAccount, ResponseProtocolState},
dto::{
Chain, EntryPointWithTracingParams, ProtocolComponent, ResponseAccount,
ResponseProtocolState, TracingResult,
},
models::token::Token,
Bytes,
},
tycho_execution::encoding::evm::utils::bytes_to_address,
};
use crate::{
@@ -50,6 +45,7 @@ use crate::{
encoding::encode_swap,
execution,
rpc::RPCProvider,
state_registry::register_decoder_for_protocol,
tycho_rpc::TychoClient,
tycho_runner::TychoRunner,
utils::build_spkg,
@@ -241,7 +237,7 @@ impl TestRunner {
test.stop_block,
&config.protocol_type_names,
&config.protocol_system,
&config.module_name,
config.module_name.clone(),
)
.wrap_err("Failed to run Tycho")?;
@@ -274,7 +270,7 @@ impl TestRunner {
self.validate_token_balances(
&component_tokens,
&response_protocol_states_by_id,
test.start_block,
test.stop_block,
)?;
info!("All token balances match the values found onchain")
}
@@ -376,6 +372,16 @@ impl TestRunner {
.into_diagnostic()
.wrap_err("Failed to get contract state")?;
let traced_entry_points = self
.runtime
.block_on(tycho_client.get_traced_entry_points(
protocol_system,
expected_component_ids.clone(),
chain,
))
.into_diagnostic()
.wrap_err("Failed to get trace points")?;
// Create a map of component IDs to components for easy lookup
let mut components_by_id: HashMap<String, ProtocolComponent> = protocol_components
.clone()
@@ -393,6 +399,7 @@ impl TestRunner {
debug!("Found {} protocol components", components_by_id.len());
debug!("Found {} protocol states", protocol_states_by_id.len());
debug!("Found {} traced entry points", traced_entry_points.len());
let adapter_contract_path;
let mut adapter_contract_path_str: Option<&str> = None;
@@ -436,36 +443,7 @@ impl TestRunner {
if let Some(vm_adapter_path) = adapter_contract_path_str {
decoder_context = decoder_context.vm_adapter_path(vm_adapter_path);
}
match protocol_system {
"uniswap_v2" | "sushiswap_v2" => {
decoder.register_decoder_with_context::<UniswapV2State>(
protocol_system,
decoder_context,
);
}
"pancakeswap_v2" => {
decoder.register_decoder_with_context::<PancakeswapV2State>(
protocol_system,
decoder_context,
);
}
"uniswap_v3" | "pancakeswap_v3" => {
decoder.register_decoder_with_context::<UniswapV3State>(
protocol_system,
decoder_context,
);
}
"ekubo_v2" => {
decoder
.register_decoder_with_context::<EkuboState>(protocol_system, decoder_context);
}
_ => {
decoder.register_decoder_with_context::<EVMPoolState<PreCachedDB>>(
protocol_system,
decoder_context,
);
}
}
register_decoder_for_protocol(&mut decoder, protocol_system, decoder_context)?;
// Mock a stream message, with only a Snapshot and no deltas
let mut states: HashMap<String, ComponentWithState> = HashMap::new();
@@ -474,18 +452,27 @@ impl TestRunner {
let state = protocol_states_by_id
.get(component_id)
.wrap_err(format!(
"Component {id} does not exist in protocol_states_by_id {protocol_states_by_id:?}"
))?
.wrap_err(format!("No state found for component: {id}"))?
.clone();
let traced_entry_points: Vec<(EntryPointWithTracingParams, TracingResult)> =
traced_entry_points
.get(component_id)
.map(|inner| {
inner
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect::<Vec<_>>()
})
.unwrap_or_default();
let component_with_state = ComponentWithState {
state,
component: component.clone(),
component_tvl: None,
// Neither UniswapV4 with hooks not certain balancer pools are currently supported
// for SDK testing
entrypoints: vec![],
entrypoints: traced_entry_points,
};
states.insert(component_id.clone(), component_with_state);
}
@@ -808,7 +795,7 @@ impl TestRunner {
&self,
component_tokens: &HashMap<String, Vec<Token>>,
protocol_states_by_id: &HashMap<String, ResponseProtocolState>,
start_block: u64,
stop_block: u64,
) -> miette::Result<()> {
for (id, component) in protocol_states_by_id.iter() {
let tokens = component_tokens.get(id);
@@ -833,12 +820,11 @@ impl TestRunner {
.block_on(self.rpc_provider.get_token_balance(
token_address,
component_address,
start_block,
stop_block,
))?;
if balance != node_balance {
return Err(miette!(
"Token balance mismatch for component {} and token {}",
id,
"Token balance mismatch for component {id} and token {}. Balance: {balance}, Node balance: {node_balance}",
token.symbol
));
}

View File

@@ -5,8 +5,9 @@ use tycho_simulation::{
tycho_client::{rpc::RPCClient, HttpRPCClient},
tycho_common::{
dto::{
Chain, PaginationParams, ProtocolComponent, ProtocolComponentsRequestBody,
ResponseAccount, ResponseProtocolState, ResponseToken, StateRequestBody, VersionParam,
Chain, EntryPointWithTracingParams, PaginationParams, ProtocolComponent,
ProtocolComponentsRequestBody, ResponseAccount, ResponseProtocolState, ResponseToken,
StateRequestBody, TracedEntryPointRequestBody, TracingResult, VersionParam,
},
models::token::Token,
Bytes,
@@ -153,4 +154,26 @@ impl TychoClient {
Ok(res)
}
/// Gets traced entry points from the RPC server
pub async fn get_traced_entry_points(
&self,
protocol_system: &str,
component_ids: Vec<String>,
chain: Chain,
) -> Result<HashMap<String, Vec<(EntryPointWithTracingParams, TracingResult)>>, RpcError> {
let request_body = TracedEntryPointRequestBody {
protocol_system: protocol_system.to_string(),
chain,
pagination: PaginationParams { page: 0, page_size: 100 },
component_ids: Some(component_ids),
};
let traced_entry_points = self
.http_client
.get_traced_entry_points(&request_body)
.await?;
Ok(traced_entry_points.traced_entry_points)
}
}

View File

@@ -31,7 +31,7 @@ impl TychoRunner {
end_block: u64,
protocol_type_names: &[String],
protocol_system: &str,
module_name: &str,
module_name: Option<String>,
) -> miette::Result<()> {
info!("Running Tycho indexer from block {start_block} to {end_block}...");
@@ -48,7 +48,9 @@ impl TychoRunner {
"--spkg",
spkg_path,
"--module",
module_name,
module_name
.as_deref()
.unwrap_or("map_protocol_changes"),
"--protocol-type-names",
&protocol_type_names.join(","),
"--protocol-system",

45
substreams/Cargo.lock generated
View File

@@ -35,6 +35,12 @@ version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "bigdecimal"
version = "0.3.1"
@@ -240,7 +246,7 @@ dependencies = [
[[package]]
name = "ethereum-balancer-v2"
version = "0.4.0"
version = "0.4.1"
dependencies = [
"anyhow",
"ethabi 18.0.0",
@@ -250,7 +256,7 @@ dependencies = [
"num-bigint",
"substreams",
"substreams-ethereum",
"tycho-substreams 0.5.0",
"tycho-substreams 0.5.1",
]
[[package]]
@@ -316,6 +322,24 @@ dependencies = [
"tycho-substreams 0.2.2",
]
[[package]]
name = "ethereum-liquidityparty"
version = "0.1.0"
dependencies = [
"anyhow",
"ethabi 18.0.0",
"hex",
"itertools 0.10.5",
"num-bigint",
"prost 0.11.9",
"serde",
"serde-sibor",
"serde_qs",
"substreams",
"substreams-ethereum",
"tycho-substreams 0.2.1 (git+https://github.com/propeller-heads/tycho-protocol-sdk.git?rev=52d5021)",
]
[[package]]
name = "ethereum-maverick-v2"
version = "0.1.1"
@@ -1415,7 +1439,7 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2c7fca123abff659d15ed30da5b605fa954a29e912c94260c488d0d18f9107d"
dependencies = [
"base64",
"base64 0.13.1",
"prost 0.11.9",
"prost-types 0.11.9",
"substreams",
@@ -1489,7 +1513,7 @@ name = "substreams-helper"
version = "0.0.2"
dependencies = [
"anyhow",
"base64",
"base64 0.13.1",
"bigdecimal",
"downcast-rs",
"ethabi 18.0.0",
@@ -1512,7 +1536,7 @@ version = "0.0.2"
source = "git+https://github.com/propeller-heads/tycho-protocol-sdk.git?tag=0.4.0#cfbf6812bdc9503ff51debcf5e171cd680b4d694"
dependencies = [
"anyhow",
"base64",
"base64 0.13.1",
"bigdecimal",
"downcast-rs",
"ethabi 18.0.0",
@@ -1535,7 +1559,7 @@ version = "0.0.2"
source = "git+https://github.com/propeller-heads/tycho-protocol-sdk.git?rev=52d5021#52d502198e9aa964814ef5f139df0886c3eb7bb0"
dependencies = [
"anyhow",
"base64",
"base64 0.13.1",
"bigdecimal",
"downcast-rs",
"ethabi 18.0.0",
@@ -1558,7 +1582,7 @@ version = "0.0.2"
source = "git+https://github.com/propeller-heads/tycho-protocol-sdk.git?rev=b8aeaa3#b8aeaa3dc6e7242a5dd23681921258ef2cb3c6dd"
dependencies = [
"anyhow",
"base64",
"base64 0.13.1",
"bigdecimal",
"downcast-rs",
"ethabi 18.0.0",
@@ -1760,9 +1784,9 @@ dependencies = [
[[package]]
name = "tycho-substreams"
version = "0.5.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "828cbe6f7b984fefe39d8fdb4c40311e329f30ded0a70e477e8f2add4d60483d"
checksum = "a164ecbc3f2d7515e9447d7c1933a01fba6e54a082bbe1c73eb7b827d2a45b47"
dependencies = [
"ethabi 18.0.0",
"hex",
@@ -1777,8 +1801,9 @@ dependencies = [
[[package]]
name = "tycho-substreams"
version = "0.5.1"
version = "0.6.0"
dependencies = [
"base64 0.22.1",
"ethabi 18.0.0",
"hex",
"itertools 0.12.1",

View File

@@ -17,6 +17,7 @@ members = [
"ethereum-ekubo-v2",
"ethereum-maverick-v2",
"ethereum-balancer-v3",
"ethereum-liquidityparty",
]
resolver = "2"

View File

@@ -1,6 +1,6 @@
[package]
name = "tycho-substreams"
version = "0.5.1"
version = "0.6.0"
edition = "2021"
description = "Tycho substreams development kit, contains tycho-indexer block changes model and helper functions for common indexing tasks."
repository = "https://github.com/propeller-heads/tycho-protocol-sdk/tree/main/substreams/crates/tycho-substreams"
@@ -24,3 +24,4 @@ serde_json = "1.0.120"
[dev-dependencies]
rstest = "0.24.0"
base64 = "0.22.1"

View File

@@ -10,3 +10,7 @@ directory:
```bash
buf generate --template substreams/crates/tycho-substreams/buf.gen.yaml --output substreams/crates/tycho-substreams/
```
## Generate block test assets
To be able to write complete unit tests, we rely on full block assets. These assets can be generated using the firecore tool from Substreams. More info in [Substreams documentation](https://docs.substreams.dev/reference-material/log-and-debug#generating-the-input-of-the-test)

File diff suppressed because one or more lines are too long

View File

@@ -248,7 +248,7 @@ pub fn extract_balance_deltas_from_tx<F: Fn(&[u8], &[u8]) -> bool>(
#[cfg(test)]
mod tests {
use super::*;
use crate::{mock_store::MockStore, pb::tycho::evm::v1::BalanceDelta};
use crate::{pb::tycho::evm::v1::BalanceDelta, testing::mock_store::MockStore};
use substreams::{
pb::substreams::StoreDelta,
prelude::{StoreGet, StoreNew},

View File

@@ -1,12 +1,12 @@
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use substreams_ethereum::pb::{
eth,
eth::v2::{block::DetailLevel, StorageChange},
use substreams_ethereum::pb::eth::{
self,
v2::{block::DetailLevel, BalanceChange, StorageChange},
};
use crate::{
models::{ContractSlot, StorageChanges, Transaction},
models::{ContractSlot, StorageChanges},
pb::tycho::evm::v1::TransactionStorageChanges,
};
@@ -30,8 +30,6 @@ pub fn get_block_storage_changes(block: &eth::v2::Block) -> Vec<TransactionStora
let mut block_storage_changes = Vec::with_capacity(block.transaction_traces.len());
for block_tx in block.transactions() {
let transaction: Transaction = block_tx.into();
let mut changes_by_address: HashMap<Vec<u8>, Vec<StorageChange>> = HashMap::new();
for storage_change in block_tx
.calls
@@ -45,10 +43,36 @@ pub fn get_block_storage_changes(block: &eth::v2::Block) -> Vec<TransactionStora
.push(storage_change.clone());
}
// For each address, sort by ordinal and collect latest changes per slot
let tx_storage_changes: Vec<StorageChanges> = changes_by_address
let mut native_balance_changes_by_address: HashMap<Vec<u8>, Vec<BalanceChange>> =
HashMap::new();
for balance_change in block_tx
.calls
.iter()
.filter(|call| !call.state_reverted)
.flat_map(|call| call.balance_changes.iter())
{
native_balance_changes_by_address
.entry(balance_change.address.clone())
.or_default()
.push(balance_change.clone());
}
// Collect all unique addresses from both storage changes and balance changes
let mut all_addresses = HashSet::new();
all_addresses.extend(changes_by_address.keys().cloned());
all_addresses.extend(
native_balance_changes_by_address
.keys()
.cloned(),
);
// For each address, collect both storage changes and balance changes
let tx_storage_changes: Vec<StorageChanges> = all_addresses
.into_iter()
.map(|(address, mut changes)| {
.map(|address| {
// Process storage changes for this address
let slots = if let Some(changes) = changes_by_address.get(&address) {
let mut changes = changes.clone();
changes.sort_unstable_by_key(|change| change.ordinal);
// Collect latest change per slot
@@ -67,16 +91,222 @@ pub fn get_block_storage_changes(block: &eth::v2::Block) -> Vec<TransactionStora
previous_value: change.old_value,
});
}
latest_changes.into_values().collect()
} else {
vec![]
};
StorageChanges { address, slots: latest_changes.into_values().collect() }
// Filter out slots that have the same value before and after the transaction
let slots = slots
.into_iter()
.filter(|slot| slot.previous_value != slot.value)
.collect();
// Process native balance changes for this address
let native_balance = native_balance_changes_by_address
.get(&address)
.and_then(|balance_changes| {
let (first, last) = balance_changes.iter().fold(
(None, None),
|(min, max): (Option<&BalanceChange>, Option<&BalanceChange>),
change| {
let new_min = match min {
None => Some(change),
Some(m) if change.ordinal < m.ordinal => Some(change),
_ => min,
};
let new_max = match max {
None => Some(change),
Some(m) if change.ordinal > m.ordinal => Some(change),
_ => max,
};
(new_min, new_max)
},
);
let balance_before_tx = first.map(|f| {
f.old_value
.as_ref()
.map(|b| b.bytes.clone())
.unwrap_or_default()
});
let balance_after_tx = last.map(|l| {
l.new_value
.as_ref()
.map(|b| b.bytes.clone())
.unwrap_or_default()
});
(balance_before_tx != balance_after_tx).then_some(balance_after_tx.clone())
})
.flatten();
StorageChanges { address, slots, native_balance }
})
.collect();
block_storage_changes.push(TransactionStorageChanges {
tx: Some(transaction),
tx: Some(block_tx.into()),
storage_changes: tx_storage_changes,
});
}
block_storage_changes
}
#[cfg(test)]
mod test {
use super::*;
use crate::testing::assets::read_block;
#[test]
fn test_get_block_storage_changes_ethereum_block_23490768() {
let block = read_block("./assets/ethereum-block-23490768.binpb.base64");
let changes = get_block_storage_changes(&block);
let mut balance_map: HashMap<String, HashMap<String, String>> = HashMap::new();
let mut storage_map: HashMap<String, HashMap<String, HashMap<String, (String, String)>>> =
HashMap::new();
for change in changes {
let tx_hash = change.tx.unwrap().hash.clone();
let balance_tx_entry = balance_map
.entry(hex::encode(tx_hash.clone()))
.or_default();
let storage_tx_entry = storage_map
.entry(hex::encode(tx_hash.clone()))
.or_default();
for storage_change in change.storage_changes {
if let Some(native_balance) = storage_change.native_balance {
balance_tx_entry.insert(
hex::encode(storage_change.address.clone()),
hex::encode(native_balance.clone()),
);
}
for slot in storage_change.slots {
let contract_tx_entry = storage_tx_entry
.entry(hex::encode(storage_change.address.clone()))
.or_default();
contract_tx_entry.insert(
hex::encode(slot.slot.clone()),
(hex::encode(slot.previous_value.clone()), hex::encode(slot.value.clone())),
);
}
}
}
// Assertions for https://etherscan.io/tx/0x44a34ba7400fa7004ec5037aeb1103a7c0cd8a83a95c4cd5cf9561c3c38db326#statechange
// Check balance changes
let balance_tx_entry = balance_map
.get("44a34ba7400fa7004ec5037aeb1103a7c0cd8a83a95c4cd5cf9561c3c38db326")
.unwrap();
assert_eq!(balance_tx_entry.len(), 4);
assert_eq!(
balance_tx_entry
.get("dadb0d80178819f2319190d340ce9a924f783711")
.unwrap(),
"052196f442fadb8314"
);
assert_eq!(
balance_tx_entry
.get("c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2")
.unwrap(),
"0207150b274902c5e7871c"
);
assert_eq!(
balance_tx_entry
.get("ad01c20d5886137e056775af56915de824c8fce5")
.unwrap(),
"c83a1d6287cb5e"
);
assert_eq!(
balance_tx_entry
.get("638f1db9881a84af9835c6625d17b0af034234ad")
.unwrap(),
"0f69303da21468"
);
// Check storage changes
let storage_tx_entry = storage_map
.get("44a34ba7400fa7004ec5037aeb1103a7c0cd8a83a95c4cd5cf9561c3c38db326")
.unwrap();
assert_eq!(storage_tx_entry.len(), 3);
let storage_tx_entry_0f9e3401a5155a02c86353c3d9b24214876779dd = HashMap::from([
(
"0000000000000000000000000000000000000000000000000000000000000009".to_string(),
(
"00000000000000000000000000000000009faeae5180599c05015fcfa242d3b0".to_string(),
"00000000000000000000000000000000009faebb96f403f1913f425b3ea446e0".to_string(),
),
),
(
"000000000000000000000000000000000000000000000000000000000000000a".to_string(),
(
"00000000000000000000000000f94f053f65617829584571d9de584cd219fb88".to_string(),
"00000000000000000000000000f94f66e6e9d8f6688d6ca53ff9baae52e11cd8".to_string(),
),
),
(
"0000000000000000000000000000000000000000000000000000000000000008".to_string(),
(
"68de8f37000000000001fb7a6a5bb2b548080000000560989aab8af59d9be89b".to_string(),
"68de8f5b000000000001fb8b2909997ca55100000005606b52e81f19442026af".to_string(),
),
),
]);
assert_eq!(
storage_tx_entry
.get("0f9e3401a5155a02c86353c3d9b24214876779dd")
.unwrap(),
&storage_tx_entry_0f9e3401a5155a02c86353c3d9b24214876779dd
);
let storage_tx_entry_11dfc652eb62c723ad8c2ae731fcede58ab07564 = HashMap::from([
(
"654f44e59f538551b5124259a61eaadb863c6c10cc9d43aa550237a76a7de0b0".to_string(),
(
"000000000000000000000000000000000000000000000077c1c5e25db942af6a".to_string(),
"0000000000000000000000000000000000000000000000a2c5f2bc08a7dea7a4".to_string(),
),
),
(
"6b12653da4ae5b17258ea9b02a62123c9305455af47b7dceea1b7137f7c69671".to_string(),
(
"0000000000000000000000000000000000000000000001454f7d5d0ce8d4a21e".to_string(),
"0000000000000000000000000000000000000000000001479313ef3e53b46bd0".to_string(),
),
),
(
"8f60e36f69a92730149f231ad2475b4aa8a8e50f4072f62a1f099ffc11d0f647".to_string(),
(
"0000000000000000000000000000000000000000000560989aab8af59d9be89b".to_string(),
"00000000000000000000000000000000000000000005606b52e81f19442026af".to_string(),
),
),
]);
assert_eq!(
storage_tx_entry
.get("11dfc652eb62c723ad8c2ae731fcede58ab07564")
.unwrap(),
&storage_tx_entry_11dfc652eb62c723ad8c2ae731fcede58ab07564
);
let storage_tx_entry_c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2 = HashMap::from([(
"77f05379c72cc19907ba9648dcd0bda409fabc68ca111b532de62ffdb67e868f".to_string(),
(
"000000000000000000000000000000000000000000000001fb7a6a5bb2b54808".to_string(),
"000000000000000000000000000000000000000000000001fb8b2909997ca551".to_string(),
),
)]);
assert_eq!(
storage_tx_entry
.get("c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2")
.unwrap(),
&storage_tx_entry_c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2
);
}
}

View File

@@ -4,11 +4,12 @@ pub mod balances;
pub mod block_storage;
pub mod contract;
pub mod entrypoint;
#[cfg(test)]
mod mock_store;
pub mod models;
pub mod pb;
#[cfg(test)]
pub mod testing;
pub mod prelude {
pub use super::models::*;
}

View File

@@ -245,6 +245,9 @@ pub struct StorageChanges {
/// The contract's storage changes
#[prost(message, repeated, tag="2")]
pub slots: ::prost::alloc::vec::Vec<ContractSlot>,
/// \[optional\] The contract's balance change
#[prost(bytes="vec", optional, tag="3")]
pub native_balance: ::core::option::Option<::prost::alloc::vec::Vec<u8>>,
}
// Aggregate entities

View File

@@ -0,0 +1,12 @@
// Read a base64 encoded asset and return a decoded protobuf struct
// Panics if the file does not exist or the base64 decoding fails
pub fn read_block<B: prost::Message + Default>(filename: &str) -> B {
use base64::Engine;
let encoded = std::fs::read_to_string(filename).expect("Failed to read file");
let raw_bytes = base64::prelude::BASE64_STANDARD
.decode(&encoded)
.expect("Failed to decode base64");
B::decode(&*raw_bytes).expect("Not able to decode Block")
}

View File

@@ -0,0 +1,2 @@
pub mod assets;
pub mod mock_store;

View File

@@ -1,6 +1,6 @@
[package]
name = "ethereum-balancer-v2"
version = "0.4.0"
version = "0.4.1"
edition = "2021"
[lib]
@@ -15,7 +15,7 @@ hex = "0.4.3"
anyhow = "1.0.75"
num-bigint = "0.4.4"
itertools = "0.12.0"
tycho-substreams = "0.5.0"
tycho-substreams = "0.5.1"
[build-dependencies]
anyhow = "1"

View File

@@ -1,6 +1,5 @@
substreams_yaml_path: ./substreams.yaml
protocol_system: "vm:balancer_v2"
module_name: "map_protocol_changes"
protocol_type_names:
- "balancer_v2_pool"
adapter_contract: "BalancerV2SwapAdapter"

View File

@@ -1,7 +1,7 @@
specVersion: v0.1.0
package:
name: "ethereum_balancer_v2"
version: v0.4.0
version: v0.4.1
url: "https://github.com/propeller-heads/tycho-protocol-sdk/tree/main/substreams/ethereum-balancer-v2"
protobuf:

View File

@@ -1,6 +1,5 @@
substreams_yaml_path: ./substreams.yaml
protocol_system: "vm:curve"
module_name: "map_protocol_changes"
protocol_type_names:
- "curve_pool"
adapter_contract: "CurveAdapter"

View File

@@ -1,7 +1,6 @@
substreams_yaml_path: ./substreams.yaml
protocol_system: "ekubo_v2"
module_name: "map_protocol_changes"
adapter_contract: "EkuboSwapAdapter"
adapter_build_signature: "constructor(address)"
adapter_build_args: "0x16e186ecdc94083fff53ef2a41d46b92a54f61e2"
skip_balance_check: true # Fails because the pool id is not the owner of the tokens. Needs to be address in the testing framework.

View File

@@ -0,0 +1,26 @@
[package]
name = "ethereum-liquidityparty"
version = "0.1.0"
edition = "2021"
[lib]
name = "ethereum_liquidityparty"
crate-type = ["cdylib"]
[dependencies]
substreams = "0.5.22"
substreams-ethereum = "0.9.9"
prost = "0.11"
tycho-substreams = { git = "https://github.com/propeller-heads/tycho-protocol-sdk.git", rev = "52d5021" }
anyhow = "1.0.95"
ethabi = "18.0.0"
num-bigint = "0.4.6"
hex = "0.4.3"
itertools = "0.10.5"
serde = "1.0.217"
serde-sibor = "0.1.0"
serde_qs = "0.13.0"
[build-dependencies]
anyhow = "1"
substreams-ethereum = "0.9.9"

View File

@@ -0,0 +1,11 @@
export PATH := /home/linuxbrew/.linuxbrew/bin:$(PATH)
all: ethereum
build:
cargo build --target wasm32-unknown-unknown --release
ethereum: build
substreams pack ethereum-liquidityparty.yaml -o ethereum-liquidityparty.spkg
.PHONY: build ethereum

View File

@@ -0,0 +1,308 @@
[
{
"type": "constructor",
"inputs": [
{
"name": "mintImpl",
"type": "address",
"internalType": "contract PartyPoolMintImpl"
},
{
"name": "swapImpl_",
"type": "address",
"internalType": "contract PartyPoolSwapImpl"
}
],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "burnAmounts",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "lpTokenAmount",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "withdrawAmounts",
"type": "uint256[]",
"internalType": "uint256[]"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "burnSwapAmounts",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "lpAmount",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "outputTokenIndex",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "amountOut",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "outFee",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "flashFee",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "",
"type": "address",
"internalType": "address"
},
{
"name": "amount",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "fee",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "maxFlashLoan",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "token",
"type": "address",
"internalType": "address"
}
],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "mintAmounts",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "lpTokenAmount",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "depositAmounts",
"type": "uint256[]",
"internalType": "uint256[]"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "poolPrice",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "quoteTokenIndex",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "",
"type": "int128",
"internalType": "int128"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "price",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "baseTokenIndex",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "quoteTokenIndex",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "swapMintAmounts",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "inputTokenIndex",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "maxAmountIn",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "amountInUsed",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "lpMinted",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "inFee",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "swapToLimitAmounts",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "inputTokenIndex",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "outputTokenIndex",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "limitPrice",
"type": "int128",
"internalType": "int128"
}
],
"outputs": [
{
"name": "amountIn",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "amountOut",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "inFee",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "working",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
}
],
"outputs": [
{
"name": "",
"type": "bool",
"internalType": "bool"
}
],
"stateMutability": "view"
}
]

View File

@@ -0,0 +1,705 @@
[
{
"type": "constructor",
"inputs": [
{
"name": "owner_",
"type": "address",
"internalType": "address"
},
{
"name": "wrapper_",
"type": "address",
"internalType": "contract NativeWrapper"
},
{
"name": "swapImpl_",
"type": "address",
"internalType": "contract PartyPoolSwapImpl"
},
{
"name": "mintImpl_",
"type": "address",
"internalType": "contract PartyPoolMintImpl"
},
{
"name": "poolInitCodeStorage_",
"type": "address",
"internalType": "contract PartyPoolInitCode"
},
{
"name": "balancedPairInitCodeStorage_",
"type": "address",
"internalType": "contract PartyPoolBalancedPairInitCode"
},
{
"name": "protocolFeePpm_",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "protocolFeeAddress_",
"type": "address",
"internalType": "address"
}
],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "getAllPools",
"inputs": [
{
"name": "offset",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "limit",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "pools",
"type": "address[]",
"internalType": "contract IPartyPool[]"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "getAllTokens",
"inputs": [
{
"name": "offset",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "limit",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "tokens",
"type": "address[]",
"internalType": "address[]"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "getPoolSupported",
"inputs": [
{
"name": "pool",
"type": "address",
"internalType": "address"
}
],
"outputs": [
{
"name": "",
"type": "bool",
"internalType": "bool"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "getPoolsByToken",
"inputs": [
{
"name": "token",
"type": "address",
"internalType": "contract IERC20"
},
{
"name": "offset",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "limit",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "pools",
"type": "address[]",
"internalType": "contract IPartyPool[]"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "mintImpl",
"inputs": [],
"outputs": [
{
"name": "",
"type": "address",
"internalType": "contract PartyPoolMintImpl"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "newPool",
"inputs": [
{
"name": "name_",
"type": "string",
"internalType": "string"
},
{
"name": "symbol_",
"type": "string",
"internalType": "string"
},
{
"name": "tokens_",
"type": "address[]",
"internalType": "contract IERC20[]"
},
{
"name": "kappa_",
"type": "int128",
"internalType": "int128"
},
{
"name": "swapFeesPpm_",
"type": "uint256[]",
"internalType": "uint256[]"
},
{
"name": "flashFeePpm_",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "stable_",
"type": "bool",
"internalType": "bool"
},
{
"name": "payer",
"type": "address",
"internalType": "address"
},
{
"name": "receiver",
"type": "address",
"internalType": "address"
},
{
"name": "initialDeposits",
"type": "uint256[]",
"internalType": "uint256[]"
},
{
"name": "initialLpAmount",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "deadline",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "lpAmount",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "newPool",
"inputs": [
{
"name": "name_",
"type": "string",
"internalType": "string"
},
{
"name": "symbol_",
"type": "string",
"internalType": "string"
},
{
"name": "tokens_",
"type": "address[]",
"internalType": "contract IERC20[]"
},
{
"name": "tradeFrac_",
"type": "int128",
"internalType": "int128"
},
{
"name": "targetSlippage_",
"type": "int128",
"internalType": "int128"
},
{
"name": "swapFeePpm_",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "flashFeePpm_",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "stable_",
"type": "bool",
"internalType": "bool"
},
{
"name": "payer",
"type": "address",
"internalType": "address"
},
{
"name": "receiver",
"type": "address",
"internalType": "address"
},
{
"name": "initialDeposits",
"type": "uint256[]",
"internalType": "uint256[]"
},
{
"name": "initialLpAmount",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "deadline",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "lpAmount",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "newPool",
"inputs": [
{
"name": "name_",
"type": "string",
"internalType": "string"
},
{
"name": "symbol_",
"type": "string",
"internalType": "string"
},
{
"name": "tokens_",
"type": "address[]",
"internalType": "contract IERC20[]"
},
{
"name": "kappa_",
"type": "int128",
"internalType": "int128"
},
{
"name": "swapFeePpm_",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "flashFeePpm_",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "stable_",
"type": "bool",
"internalType": "bool"
},
{
"name": "payer",
"type": "address",
"internalType": "address"
},
{
"name": "receiver",
"type": "address",
"internalType": "address"
},
{
"name": "initialDeposits",
"type": "uint256[]",
"internalType": "uint256[]"
},
{
"name": "initialLpAmount",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "deadline",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [
{
"name": "pool",
"type": "address",
"internalType": "contract IPartyPool"
},
{
"name": "lpAmount",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "owner",
"inputs": [],
"outputs": [
{
"name": "",
"type": "address",
"internalType": "address"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "params",
"inputs": [],
"outputs": [
{
"name": "",
"type": "tuple",
"internalType": "struct IPartyPoolDeployer.DeployParams",
"components": [
{
"name": "nonce",
"type": "bytes32",
"internalType": "bytes32"
},
{
"name": "owner",
"type": "address",
"internalType": "address"
},
{
"name": "name",
"type": "string",
"internalType": "string"
},
{
"name": "symbol",
"type": "string",
"internalType": "string"
},
{
"name": "tokens",
"type": "address[]",
"internalType": "contract IERC20[]"
},
{
"name": "kappa",
"type": "int128",
"internalType": "int128"
},
{
"name": "fees",
"type": "uint256[]",
"internalType": "uint256[]"
},
{
"name": "flashFeePpm",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "protocolFeePpm",
"type": "uint256",
"internalType": "uint256"
},
{
"name": "protocolFeeAddress",
"type": "address",
"internalType": "address"
},
{
"name": "wrapper",
"type": "address",
"internalType": "contract NativeWrapper"
},
{
"name": "swapImpl",
"type": "address",
"internalType": "contract PartyPoolSwapImpl"
},
{
"name": "mintImpl",
"type": "address",
"internalType": "contract PartyPoolMintImpl"
}
]
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "poolCount",
"inputs": [],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "poolsByTokenCount",
"inputs": [
{
"name": "token",
"type": "address",
"internalType": "contract IERC20"
}
],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "protocolFeeAddress",
"inputs": [],
"outputs": [
{
"name": "",
"type": "address",
"internalType": "address"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "protocolFeePpm",
"inputs": [],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "renounceOwnership",
"inputs": [],
"outputs": [],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "setProtocolFeeAddress",
"inputs": [
{
"name": "feeAddress",
"type": "address",
"internalType": "address"
}
],
"outputs": [],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "swapImpl",
"inputs": [],
"outputs": [
{
"name": "",
"type": "address",
"internalType": "contract PartyPoolSwapImpl"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "tokenCount",
"inputs": [],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "transferOwnership",
"inputs": [
{
"name": "newOwner",
"type": "address",
"internalType": "address"
}
],
"outputs": [],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "wrapper",
"inputs": [],
"outputs": [
{
"name": "",
"type": "address",
"internalType": "contract NativeWrapper"
}
],
"stateMutability": "view"
},
{
"type": "event",
"name": "OwnershipTransferred",
"inputs": [
{
"name": "previousOwner",
"type": "address",
"indexed": true,
"internalType": "address"
},
{
"name": "newOwner",
"type": "address",
"indexed": true,
"internalType": "address"
}
],
"anonymous": false
},
{
"type": "event",
"name": "PartyStarted",
"inputs": [
{
"name": "pool",
"type": "address",
"indexed": true,
"internalType": "contract IPartyPool"
},
{
"name": "name",
"type": "string",
"indexed": false,
"internalType": "string"
},
{
"name": "symbol",
"type": "string",
"indexed": false,
"internalType": "string"
},
{
"name": "tokens",
"type": "address[]",
"indexed": false,
"internalType": "contract IERC20[]"
}
],
"anonymous": false
},
{
"type": "error",
"name": "OwnableInvalidOwner",
"inputs": [
{
"name": "owner",
"type": "address",
"internalType": "address"
}
]
},
{
"type": "error",
"name": "OwnableUnauthorizedAccount",
"inputs": [
{
"name": "account",
"type": "address",
"internalType": "address"
}
]
},
{
"type": "error",
"name": "SafeERC20FailedOperation",
"inputs": [
{
"name": "token",
"type": "address",
"internalType": "address"
}
]
}
]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,11 @@
#!/bin/bash
LMSR_HOME=../../../lmsr-amm
CHAIN_ID=11155111
abi() {
jq '.abi' $LMSR_HOME/deployment/$CHAIN_ID/v1/out/$1.sol/$1.json > abi/$2.abi.json
echo abi/$2.abi.json
}
abi PartyPlanner party_planner
abi PartyPool party_pool
abi PartyInfo party_info

View File

@@ -0,0 +1,12 @@
version: v1
plugins:
- plugin: buf.build/community/neoeinstein-prost:v0.2.2
out: src/pb
opt:
- file_descriptor_set=false
- plugin: buf.build/community/neoeinstein-prost-crate:v0.3.1
out: src/pb
opt:
- no_features

View File

@@ -0,0 +1,49 @@
use anyhow::Result;
use std::{fs, io::Write};
use substreams_ethereum::Abigen;
fn main() -> Result<()> {
let abi_folder = "abi";
let output_folder = "src/abi";
let abis = fs::read_dir(abi_folder)?;
let mut files = abis.collect::<Result<Vec<_>, _>>()?;
// Sort the files by their name
files.sort_by_key(|a| a.file_name());
let mut mod_rs_content = String::new();
mod_rs_content.push_str("#![allow(clippy::all)]\n");
for file in files {
let file_name = file.file_name();
let file_name = file_name.to_string_lossy();
if !file_name.ends_with(".json") {
continue;
}
let contract_name = file_name.split('.').next().unwrap();
let input_path = format!("{abi_folder}/{file_name}");
let output_path = format!("{output_folder}/{contract_name}.rs");
mod_rs_content.push_str(&format!("pub mod {contract_name};\n"));
if std::path::Path::new(&output_path).exists() {
continue;
}
Abigen::new(contract_name, &input_path)?
.generate()?
.write_to_file(&output_path)?;
}
let mod_rs_path = format!("{output_folder}/mod.rs");
let mut mod_rs_file = fs::File::create(mod_rs_path)?;
mod_rs_file.write_all(mod_rs_content.as_bytes())?;
Ok(())
}

View File

@@ -0,0 +1,68 @@
specVersion: v0.1.0
package:
name: "ethereum_liquidityparty"
version: v0.1.0
protobuf:
files:
- tycho/evm/v1/vm.proto
- tycho/evm/v1/common.proto
- tycho/evm/v1/utils.proto
importPaths:
- ../../proto
binaries:
default:
type: wasm/rust-v1
file: ../target/wasm32-unknown-unknown/release/ethereum_liquidityparty.wasm
network: mainnet
networks:
mainnet:
initialBlock:
map_protocol_components: 23978797
params:
map_protocol_components: planner=0x42977f565971F6D288a05ddEbC87A17276F71A29&info=0x605F803cD27F5c1fa01440B2cbd5D3E4Cf7EE850&mint_impl=0xA0375403921e9B357E1BeD57bef3fA3FCE80acd0&swap_impl=0x6aA001e87F86E83bc4D569883332882cb47E2A13
modules:
- name: map_protocol_components
kind: map
inputs:
- params: string
- source: sf.ethereum.type.v2.Block
output:
type: proto:tycho.evm.v1.BlockTransactionProtocolComponents
- name: store_protocol_components
kind: store
updatePolicy: set
valueType: string
inputs:
- map: map_protocol_components
- name: map_relative_component_balance
kind: map
inputs:
- source: sf.ethereum.type.v2.Block
- store: store_protocol_components
output:
type: proto:tycho.evm.v1.BlockBalanceDeltas
- name: store_balances
kind: store
updatePolicy: add
valueType: bigint
inputs:
- map: map_relative_component_balance
- name: map_protocol_changes
kind: map
inputs:
- source: sf.ethereum.type.v2.Block
- map: map_protocol_components
- map: map_relative_component_balance
- store: store_protocol_components
- store: store_balances
mode: deltas
output:
type: proto:tycho.evm.v1.BlockChanges

View File

@@ -0,0 +1,40 @@
substreams_yaml_path: ./sepolia-liquidityparty.yaml
adapter_contract: "LiquidityPartySwapAdapter"
adapter_build_signature: "constructor(address,address)"
# PartyPlanner, PartyInfo
adapter_build_args: "0x42977f565971F6D288a05ddEbC87A17276F71A29,0x605F803cD27F5c1fa01440B2cbd5D3E4Cf7EE850"
# Liquidity Party protocol fees are retained in the pool until collection, so the pool balance does not match the TVL.
skip_balance_check: true
protocol_type_names:
- "liquidityparty_pool"
protocol_system: "vm:liquidityparty"
initialized_accounts: [
"0x42977f565971F6D288a05ddEbC87A17276F71A29", # PartyPlanner
"0x605F803cD27F5c1fa01440B2cbd5D3E4Cf7EE850", # PartyInfo
"0xA0375403921e9B357E1BeD57bef3fA3FCE80acd0", # PartyPoolMintImpl
"0x6aA001e87F86E83bc4D569883332882cb47E2A13", # PartyPoolSwapImpl
]
tests:
- name: test_pool_creation
start_block: 23978797
stop_block: 23978798
initialized_accounts: []
expected_components:
- id: "0x2A804e94500AE379ee0CcC423a67B07cc0aF548C"
tokens:
# Does this test whether the tokens are in the correct order?
- "0xdAC17F958D2ee523a2206206994597C13D831ec7" # USDT
- "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48" # USDC
- "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599" # WBTC
- "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2" # WETH
- "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984" # UNI
- "0xD31a59c85aE9D8edEFeC411D448f90841571b89c" # SOL
- "0x50327c6c5a14DCaDE707ABad2E27eB517df87AB5" # TRX
- "0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9" # AAVE
- "0x6982508145454Ce325dDbE47a25d4ec3d2311933" # PEPE
- "0x95aD61b0a150d79219dCF64E1E6Cc01f0B64C4cE" # SHIB
static_attributes: {}
creation_tx: "0x5c8b1e1e6ec10143a1252799d14df09c7e84f6a99ccde95fc11295a61c20060e"
skip_simulation: false
skip_execution: false

View File

@@ -0,0 +1,4 @@
[toolchain]
channel = "1.83.0"
components = [ "rustfmt" ]
targets = [ "wasm32-unknown-unknown" ]

View File

@@ -0,0 +1,4 @@
mod.rs
party_planner.rs
party_pool.rs
party_info.rs

View File

@@ -0,0 +1,4 @@
mod abi;
mod modules;
mod pool_factories;
mod params;

View File

@@ -0,0 +1,314 @@
//! Template for Protocols with contract factories
//!
//! This template provides foundational maps and store substream modules for indexing a
//! protocol where each component (e.g., pool) is deployed to a separate contract. Each
//! contract is expected to escrow its ERC-20 token balances.
//!
//! If your protocol supports native ETH, you may need to adjust the balance tracking
//! logic in `map_relative_component_balance` to account for native token handling.
//!
//! ## Assumptions
//! - Assumes each pool has a single newly deployed contract linked to it
//! - Assumes pool identifier equals the deployed contract address
//! - Assumes any price or liquidity updated correlates with a pools contract storage update.
//!
//! ## Alternative Module
//! If your protocol uses a vault-like contract to manage balances, or if pools are
//! registered within a singleton contract, refer to the `ethereum-template-singleton`
//! substream for an appropriate alternative.
//!
//! ## Warning
//! This template provides a general framework for indexing a protocol. However, it is
//! likely that you will need to adapt the steps to suit your specific use case. Use the
//! provided code with care and ensure you fully understand each step before proceeding
//! with your implementation.
//!
//! ## Example Use Case
//! For an Uniswap-like protocol where each liquidity pool is deployed as a separate
//! contract, you can use this template to:
//! - Track relative component balances (e.g., ERC-20 token balances in each pool).
//! - Index individual pool contracts as they are created by the factory contract.
//!
//! Adjustments to the template may include:
//! - Handling native ETH balances alongside token balances.
//! - Customizing indexing logic for specific factory contract behavior.
use crate::params::Params;
use crate::{abi, pool_factories};
use anyhow::Result;
use itertools::Itertools;
use std::collections::HashMap;
use substreams::{pb::substreams::StoreDeltas, prelude::*};
use substreams_ethereum::{pb::eth, Event};
use tycho_substreams::{
balances::aggregate_balances_changes, contract::extract_contract_changes_builder,
prelude::*,
};
/// Find and create all relevant protocol components
///
/// This method maps over blocks and instantiates ProtocolComponents with a unique ids
/// as well as all necessary metadata for routing and encoding.
#[substreams::handlers::map]
fn map_protocol_components(
param_string: String,
block: eth::v2::Block
) -> Result<BlockTransactionProtocolComponents> {
substreams::log::debug!("Processing block {} for protocol components", block.number);
let params = Params::parse(&param_string)?;
Ok(BlockTransactionProtocolComponents {
tx_components: block
.transactions()
.filter_map(|tx| {
let components = tx
.logs_with_calls()
.filter_map(|(log, call)| {
pool_factories::maybe_create_component(&params, call.call, log, tx)
})
.collect::<Vec<_>>();
if !components.is_empty() {
Some(TransactionProtocolComponents { tx: Some(tx.into()), components })
} else {
None
}
})
.collect::<Vec<_>>(),
})
}
/// Stores all protocol components in a store.
///
/// Stores information about components in a key value store. This is only necessary if
/// you need to access the whole set of components within your indexing logic.
///
/// Popular use cases are:
/// - Checking if a contract belongs to a component. In this case suggest to use an address as the
/// store key so lookup operations are O(1).
/// - Tallying up relative balances changes to calcualte absolute erc20 token balances per
/// component.
///
/// Usually you can skip this step if:
/// - You are interested in a static set of components only
/// - Your protocol emits balance change events with absolute values
#[substreams::handlers::store]
fn store_protocol_components(
map_protocol_components: BlockTransactionProtocolComponents,
store: StoreSetRaw,
) {
substreams::log::debug!("Storing {} protocol components", map_protocol_components.tx_components.len());
map_protocol_components
.tx_components
.into_iter()
.for_each(|tx_pc| {
tx_pc
.components
.into_iter()
.for_each(|pc| {
// Assumes that the component id is a hex encoded contract address
let key = pc.id.clone();
// we store the components tokens
let val = serde_sibor::to_bytes(&pc.tokens).unwrap();
store.set(0, key, &val);
})
});
}
/// Extracts balance changes per component
///
/// This template function uses ERC20 transfer events to extract balance changes. It
/// assumes that each component is deployed at a dedicated contract address. If a
/// transfer to the component is detected, its balance is increased and if a transfer
/// from the component is detected its balance is decreased.
///
/// ## Note:
/// Changes are necessary if your protocol uses native ETH, uses a vault contract or if
/// your component burn or mint tokens without emitting transfer events.
///
/// You may want to ignore LP tokens if your protocol emits transfer events for these
/// here.
#[substreams::handlers::map]
fn map_relative_component_balance(
block: eth::v2::Block,
store: StoreGetRaw,
) -> Result<BlockBalanceDeltas, anyhow::Error> {
substreams::log::debug!("Processing block {} for balance changes", block.number);
let mut res = Vec::new();
for log in block.logs() {
let component_id = log.address().to_vec();
if let Some(token_enc) = store.get_last(hex::encode(&component_id)) {
let tokens = serde_sibor::from_bytes::<Vec<Vec<u8>>>(&token_enc)
.map_err(|e| anyhow::anyhow!("Failed to deserialize tokens: {}", e))?;
if let Some(event) = abi::party_pool::events::Mint::match_and_decode(log) {
for (i, amount) in event.amounts.iter().enumerate() {
if !amount.is_zero() {
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: tokens[i].clone(),
delta: amount.to_signed_bytes_be(),
component_id: component_id.clone(),
});
}
}
} else if let Some(event) = abi::party_pool::events::Burn::match_and_decode(log) {
for (i, amount) in event.amounts.iter().enumerate() {
if !amount.is_zero() {
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: tokens[i].clone(),
delta: amount.neg().to_signed_bytes_be(),
component_id: component_id.clone(),
});
}
}
} else if let Some(event) = abi::party_pool::events::Swap::match_and_decode(log) {
// increase by amount_in
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: event.token_in.to_vec(),
delta: (event.amount_in - event.protocol_fee).to_signed_bytes_be(),
component_id: component_id.clone(),
});
// decrease by amount_out plus protocol fees
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: event.token_out.to_vec(),
delta: event.amount_out.neg().to_signed_bytes_be(),
component_id: component_id.clone(),
});
} else if let Some(event) = abi::party_pool::events::SwapMint::match_and_decode(log) {
// increase by amount_in less the protocol fee
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: event.token_in.to_vec(),
delta: (event.amount_in - event.protocol_fee).to_signed_bytes_be(),
component_id: component_id.clone(),
});
} else if let Some(event) = abi::party_pool::events::BurnSwap::match_and_decode(log) {
// decrease by amount_out plus the protocol fee
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: event.token_out.to_vec(),
delta: (event.amount_out + event.protocol_fee).neg().to_signed_bytes_be(),
component_id: component_id.clone(),
});
} else if let Some(event) = abi::party_pool::events::Flash::match_and_decode(log) {
// increase by LP fees
res.push(BalanceDelta {
ord: log.ordinal(),
tx: Some(log.receipt.transaction.into()),
token: event.token.to_vec(),
delta: event.lp_fee.to_signed_bytes_be(),
component_id: component_id.clone(),
});
}
}
}
Ok(BlockBalanceDeltas { balance_deltas: res })
}
/// Aggregates relative balances values into absolute values
///
/// Aggregate the relative balances in an additive store since tycho-indexer expects
/// absolute balance inputs.
///
/// ## Note:
/// This method should usually not require any changes.
#[substreams::handlers::store]
pub fn store_balances(deltas: BlockBalanceDeltas, store: StoreAddBigInt) {
tycho_substreams::balances::store_balance_changes(deltas, store);
}
/// Aggregates protocol components and balance changes by transaction.
///
/// This is the main method that will aggregate all changes as well as extract all
/// relevant contract storage deltas.
///
/// ## Note:
/// You may have to change this method if your components have any default dynamic
/// attributes, or if you need any additional static contracts indexed.
#[substreams::handlers::map]
fn map_protocol_changes(
block: eth::v2::Block,
new_components: BlockTransactionProtocolComponents,
components_store: StoreGetRaw,
balance_store: StoreDeltas,
deltas: BlockBalanceDeltas,
) -> Result<BlockChanges, substreams::errors::Error> {
substreams::log::debug!(
"Processing block {} changes: {} new components, {} balance deltas",
block.number,
new_components.tx_components.len(),
deltas.balance_deltas.len()
);
// We merge contract changes by transaction (identified by transaction index)
// making it easy to sort them at the very end.
let mut transaction_changes: HashMap<_, TransactionChangesBuilder> = HashMap::new();
// Aggregate newly created components per tx
new_components
.tx_components
.iter()
.for_each(|tx_component| {
// initialise builder if not yet present for this tx
let tx = tx_component.tx.as_ref().unwrap();
let builder = transaction_changes
.entry(tx.index)
.or_insert_with(|| TransactionChangesBuilder::new(tx));
// iterate over individual components created within this tx
tx_component
.components
.iter()
.for_each(|component| {
builder.add_protocol_component(component);
});
});
// Aggregate absolute balances per transaction.
aggregate_balances_changes(balance_store, deltas)
.into_iter()
.for_each(|(_, (tx, balances))| {
let builder = transaction_changes
.entry(tx.index)
.or_insert_with(|| TransactionChangesBuilder::new(&tx));
balances
.values()
.for_each(|token_bc_map| {
token_bc_map
.values()
.for_each(|bc| builder.add_balance_change(bc))
});
});
// Extract and insert any storage changes that happened for any of the components.
extract_contract_changes_builder(
&block,
|addr| {
// we assume that the store holds contract addresses as keys and if it
// contains a value, that contract is of relevance.
components_store
.get_last(hex::encode(addr))
.is_some()
},
&mut transaction_changes,
);
// Process all `transaction_changes` for final output in the `BlockChanges`,
// sorted by transaction index (the key).
Ok(BlockChanges {
block: Some((&block).into()),
changes: transaction_changes
.drain()
.sorted_unstable_by_key(|(index, _)| *index)
.filter_map(|(_, builder)| builder.build())
.collect::<Vec<_>>(),
})
}

View File

@@ -0,0 +1,49 @@
use anyhow::anyhow;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct StringParams {
planner: String,
info: String,
mint_impl: String,
swap_impl: String,
}
pub(crate) struct Params {
pub planner: Vec<u8>,
#[allow(dead_code)] // We keep the unused info field for future pricing/view operations
pub info: Vec<u8>,
pub mint_impl: Vec<u8>,
pub swap_impl: Vec<u8>,
}
impl StringParams {
pub fn parse(input: &str) -> anyhow::Result<Self> {
serde_qs::from_str(input).map_err(|e| anyhow!("Failed to parse query params: {}", e))
}
}
impl Params {
pub fn parse(input: &str) -> anyhow::Result<Self> {
let params = StringParams::parse(input)?;
fn decode_addr(s: &str) -> anyhow::Result<Vec<u8>> {
let s = s.strip_prefix("0x").unwrap_or(s);
if s.len() != 40 {
return Err(anyhow!("address must be 20 bytes (40 hex chars), got len={}", s.len()));
}
let bytes = hex::decode(s)?;
if bytes.len() != 20 {
return Err(anyhow!("decoded address is not 20 bytes"));
}
Ok(bytes)
}
Ok(Self {
planner: decode_addr(&params.planner)?,
info: decode_addr(&params.info)?,
mint_impl: decode_addr(&params.mint_impl)?,
swap_impl: decode_addr(&params.swap_impl)?,
})
}
}

View File

@@ -0,0 +1,31 @@
use crate::abi;
use crate::params::Params;
use substreams_ethereum::pb::eth::v2::{Call, Log, TransactionTrace};
use substreams_ethereum::Event;
use tycho_substreams::models::{
ImplementationType, ProtocolComponent,
};
/// Potentially constructs a new ProtocolComponent given a call
///
/// This method is given each individual call within a transaction, the corresponding
/// logs emitted during that call as well as the full transaction trace.
///
/// If this call creates a component in your protocol please construct and return it
/// here. Otherwise, simply return None.
pub fn maybe_create_component(
params: &Params,
call: &Call,
_log: &Log,
_tx: &TransactionTrace,
) -> Option<ProtocolComponent> {
if call.address.as_slice() == params.planner {
if let Some(event) = abi::party_planner::events::PartyStarted::match_and_decode(_log) {
return Some(ProtocolComponent::new(&format!("0x{}", hex::encode(&event.pool)))
.with_tokens(&event.tokens.clone())
.with_contracts(&vec![event.pool.clone(), params.mint_impl.clone(), params.swap_impl.clone()])
.as_swap_type("liquidityparty_pool", ImplementationType::Vm));
}
}
None
}

View File

@@ -1,6 +1,5 @@
substreams_yaml_path: ./ethereum-maverick-v2.yaml
protocol_system: "vm:maverick_v2"
module_name: "map_protocol_changes"
adapter_contract: "MaverickV2SwapAdapter"
adapter_build_signature: "constructor(address,address)"
adapter_build_args: "0x0A7e848Aca42d879EF06507Fca0E7b33A0a63c1e,0xb40AfdB85a07f37aE217E7D6462e609900dD8D7A"

View File

@@ -0,0 +1,20 @@
substreams_yaml_path: ./ethereum-pancakeswap-v3.yaml
protocol_type_names:
- "pancakeswap_v3_pool"
module_name: "map_protocol_changes"
skip_balance_check: false
initialized_accounts:
tests:
- name: test_usdt_usdf_pool
start_block: 22187893
stop_block: 22187895
expected_components:
- id: "0x0d9ea0d5e3f400b1df8f695be04292308c041e77"
tokens:
- "0xfa2b947eec368f42195f24f36d2af29f7c24cec2" # USDf
- "0xdac17f958d2ee523a2206206994597c13d831ec7" # USDT
static_attributes:
fee: "0x64"
creation_tx: "0x87a9c643b0836ee7e7d7863d4f4d97310d14c438cb49bc8771c3d7a9d5a2749f"
skip_simulation: false
skip_execution: true

View File

@@ -6,7 +6,7 @@ adapter_contract: "SwapAdapter"
adapter_build_signature: "constructor(address)"
# A comma separated list of args to be passed to the contructor of the Adapter contract"
adapter_build_args: "0x0000000000000000000000000000000000000000"
# Whether or not the testing script should skip checking balances of the protocol components.
# Whether the testing script should skip checking balances of the protocol components.
# If set to `true` please always add a reason why it's skipped.
skip_balance_check: false
# Accounts that will be automatically initialized at test start
@@ -29,6 +29,8 @@ protocol_type_names:
- "type_name_1"
- "type_name_2"
# A list of tests.
# The name of the protocol system
protocol_system: "protocol_name"
tests:
# Name of the test
- name: test_pool_creation
@@ -47,9 +49,12 @@ tests:
- "0x6b175474e89094c44da98b954eedeac495271d0f"
static_attributes: { }
creation_tx: "0x20793bbf260912aae189d5d261ff003c9b9166da8191d8f9d63ff1c7722f3ac6"
# Whether or not the script should skip trying to simulate a swap on this component.
# Whether the script should skip trying to simulate a swap on this component.
# If set to `true` please always add a reason why it's skipped.
skip_simulation: false
# Whether the script should skip trying to simulate execution of a swap on this component.
# If set to `true` please always add a reason why it's skipped.
skip_execution: false
- name: test_something_else
start_block: 123
stop_block: 456
@@ -61,3 +66,4 @@ tests:
static_attributes: { }
creation_tx: "0xfac67ecbd423a5b915deff06045ec9343568edaec34ae95c43d35f2c018afdaa"
skip_simulation: true # If true, always add a reason
skip_execution: true # If true, always add a reason

View File

@@ -6,7 +6,7 @@ adapter_contract: "SwapAdapter"
adapter_build_signature: "constructor(address)"
# A comma separated list of args to be passed to the contructor of the Adapter contract"
adapter_build_args: "0x0000000000000000000000000000000000000000"
# Whether or not the testing script should skip checking balances of the protocol components.
# Whether the testing script should skip checking balances of the protocol components.
# If set to `true` please always add a reason why it's skipped.
skip_balance_check: false
# A list of accounts that need to be indexed to run the tests properly.
@@ -20,6 +20,8 @@ initialized_accounts:
protocol_type_names:
- "type_name_1"
- "type_name_2"
# The name of the protocol system
protocol_system: "protocol_name"
# A list of tests.
tests:
# Name of the test
@@ -39,9 +41,12 @@ tests:
- "0x6b175474e89094c44da98b954eedeac495271d0f"
static_attributes: { }
creation_tx: "0x20793bbf260912aae189d5d261ff003c9b9166da8191d8f9d63ff1c7722f3ac6"
# Whether or not the script should skip trying to simulate a swap on this component.
# Whether the script should skip trying to simulate a swap on this component.
# If set to `true` please always add a reason why it's skipped.
skip_simulation: false
# Whether the script should skip trying to simulate execution of a swap on this component.
# If set to `true` please always add a reason why it's skipped.
skip_execution: false
- name: test_something_else
start_block: 123
stop_block: 456
@@ -53,3 +58,4 @@ tests:
static_attributes: { }
creation_tx: "0xfac67ecbd423a5b915deff06045ec9343568edaec34ae95c43d35f2c018afdaa"
skip_simulation: true # If true, always add a reason
skip_execution: true # If true, always add a reason

View File

@@ -3,18 +3,18 @@ protocol_system: "uniswap_v2"
protocol_type_names:
- "uniswap_v2_pool"
module_name: "map_pool_events"
skip_balance_check: true
skip_balance_check: false
initialized_accounts:
tests:
- name: test_spx_weth_pool
start_block: 17924533
stop_block: 17924534
- name: test_wolf_weth_pool
start_block: 19701395
stop_block: 19701397
expected_components:
- id: "0x52c77b0CB827aFbAD022E6d6CAF2C44452eDbc39"
- id: "0x67324985b5014b36b960273353deb3d96f2f18c2"
tokens:
- "0xe0f63a424a4439cbe457d80e4f4b51ad25b2c56c"
- "0x67466BE17df832165F8C80a5A120CCc652bD7E69"
- "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"
static_attributes:
fee: "0x1e"
creation_tx: "0xf09ac8ad7e21d15ded627a176ec718903baae5e5a9ce671a611bd852691b24f9"
creation_tx: "0x5e01ae1522722340871708a3c55e1395dda647a57767781230317319fa36ea7b"
skip_simulation: false

View File

@@ -1,2 +0,0 @@
export RPC_URL=https://mainnet.infura.io/v3/your-infura-key
export SUBSTREAMS_API_TOKEN="changeme"

View File

@@ -1,6 +0,0 @@
# Substreams Testing
This package provides a comprehensive testing suite for Substreams modules. The testing suite is designed to facilitate
end-to-end testing, ensuring that your Substreams modules function as expected.
For more information on Substreams, please refer to the [Testing documentation](https://docs.propellerheads.xyz/tycho/for-dexs/protocol-integration-sdk/indexing/general-integration-steps/4.-testing)

View File

View File

@@ -1,19 +0,0 @@
version: "3.1"
services:
db:
build:
context: .
dockerfile: postgres.Dockerfile
restart: "always"
environment:
POSTGRES_PASSWORD: mypassword
POSTGRES_DATABASE: tycho_indexer_0
POSTGRES_USERNAME: postgres
POSTGRESQL_SHARED_PRELOAD_LIBRARIES: pg_cron
ports:
- "5431:5432"
shm_size: "1gb"
volumes:
- postgres_data:/var/lib/postgresql/data
volumes:
postgres_data:

View File

@@ -1,25 +0,0 @@
# This Dockerfile creates a custom postgres image used for CI and local deployment.
# This is required because we use some postgres extensions that aren't in the generic
# Postgres image such as pg_partman or pg_cron.
# As an image with pg_partman already exist, we start from this one and add pg_cron
# and possibly other extensions on top of that.
FROM ghcr.io/dbsystel/postgresql-partman:15-5
ARG PGCRON_VERSION="1.6.2"
USER root
RUN apk update && apk add --no-cache wget build-base clang19 llvm19
RUN cd /tmp \
&& wget "https://github.com/citusdata/pg_cron/archive/refs/tags/v${PGCRON_VERSION}.tar.gz" \
&& tar zxf v${PGCRON_VERSION}.tar.gz \
&& cd pg_cron-${PGCRON_VERSION} \
&& make \
&& make install \
&& cd .. && rm -r pg_cron-${PGCRON_VERSION} v${PGCRON_VERSION}.tar.gz
# Add configuration to postgresql.conf template
# Start with postgres database, then switch to tycho_indexer_0 after it's created
RUN echo "shared_preload_libraries = 'pg_partman_bgw,pg_cron'" >> /usr/local/share/postgresql/postgresql.conf.sample \
&& echo "cron.database_name = 'tycho_indexer_0'" >> /usr/local/share/postgresql/postgresql.conf.sample
# Stay as root user for PostgreSQL to work properly
# USER 1001

View File

@@ -1,6 +0,0 @@
psycopg2==2.9.9
PyYAML==6.0.1
Requests==2.32.2
web3==5.31.3
git+https://github.com/propeller-heads/tycho-indexer.git@0.74.0#subdirectory=tycho-client-py
git+https://github.com/propeller-heads/tycho-simulation.git@0.118.0#subdirectory=tycho_simulation_py

View File

@@ -1,45 +0,0 @@
#!/bin/bash
# To run: ./setup_env.sh
set -e
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Check each dependency is installed
deps=("git" "rustc" "gcc" "openssl" "conda" "pip" "pg_config")
names=("git" "rust" "gcc" "openssl" "conda" "pip" "libpq")
for i in "${!deps[@]}"; do
if ! command_exists "${deps[$i]}"; then
echo "Error: '${names[$i]}' is not installed."
exit 1
fi
done
echo "All dependencies are installed. Proceeding with setup..."
# Variables
ENV_NAME="tycho-protocol-sdk-testing"
PYTHON_VERSION="3.9"
# Get the directory where this script is located
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$( cd "$SCRIPT_DIR/.." && pwd )" # Assuming the script is in a subdirectory of the root
REQUIREMENTS_FILE="$ROOT_DIR/testing/requirements.txt"
# Create conda environment
echo "Creating conda environment ${ENV_NAME} with Python ${PYTHON_VERSION}..."
conda create --name $ENV_NAME python=$PYTHON_VERSION -y
# Activate the environment
echo "Activating the environment..."
eval "$(conda shell.bash hook)"
conda activate $ENV_NAME
# Install the requirements
echo "Installing the requirements from ${REQUIREMENTS_FILE}..."
pip install -r $REQUIREMENTS_FILE --index-url https://pypi.org/simple
conda activate $ENV_NAME
echo "----------------------------------------"
echo "SETUP COMPLETE."
echo "Run 'conda activate $ENV_NAME' to activate the environment."

View File

@@ -1,65 +0,0 @@
import os
import subprocess
from typing import Optional
class AdapterContractBuilder:
def __init__(self, src_path: str):
self.src_path = src_path
def find_contract(self, adapter_contract: str):
"""
Finds the contract file in the provided source path.
:param adapter_contract: The contract name to be found.
:return: The path to the contract file.
"""
contract_path = os.path.join(
self.src_path,
"out",
f"{adapter_contract}.sol",
f"{adapter_contract}.evm.runtime",
)
if not os.path.exists(contract_path):
raise FileNotFoundError(f"Contract {adapter_contract} not found.")
return contract_path
def build_target(
self, adapter_contract: str, signature: Optional[str], args: Optional[str]
) -> str:
"""
Runs the buildRuntime Bash script in a subprocess with the provided arguments.
:param src_path: Path to the script to be executed.
:param adapter_contract: The contract name to be passed to the script.
:param signature: The constructor signature to be passed to the script.
:param args: The constructor arguments to be passed to the script.
:return: The path to the contract file.
"""
script_path = "scripts/buildRuntime.sh"
cmd = [script_path, "-c", adapter_contract]
if signature:
cmd.extend(["-s", signature, "-a", args])
try:
# Running the bash script with the provided arguments
result = subprocess.run(
cmd,
cwd=self.src_path,
capture_output=True,
text=True,
check=True,
)
# Print standard output and error for debugging
print("Output:\n", result.stdout)
if result.stderr:
print("Errors:\n", result.stderr)
return self.find_contract(adapter_contract)
except subprocess.CalledProcessError as e:
print(f"An error occurred: {e}")
print("Error Output:\n", e.stderr)

View File

@@ -1,29 +0,0 @@
import argparse
from runner import TestRunner
def main() -> None:
parser = argparse.ArgumentParser(
description="Run indexer within a specified range of blocks"
)
parser.add_argument("--package", type=str, help="Name of the package to test.")
parser.add_argument("--tycho-logs", action="store_true", help="Enable Tycho logs.")
parser.add_argument(
"--db-url",
default="postgres://postgres:mypassword@localhost:5431/tycho_indexer_0",
type=str,
help="Postgres database URL for the Tycho indexer. Default: postgres://postgres:mypassword@localhost:5431/tycho_indexer_0",
)
parser.add_argument(
"--vm-traces", action="store_true", help="Enable tracing during vm simulations."
)
args = parser.parse_args()
test_runner = TestRunner(
args.package, args.tycho_logs, db_url=args.db_url, vm_traces=args.vm_traces
)
test_runner.run_tests()
if __name__ == "__main__":
main()

View File

@@ -1,61 +0,0 @@
import os
from web3 import Web3
native_aliases = [
"0x0000000000000000000000000000000000000000",
"0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee",
]
erc20_abi = [
{
"constant": True,
"inputs": [{"name": "_owner", "type": "address"}],
"name": "balanceOf",
"outputs": [{"name": "balance", "type": "uint256"}],
"type": "function",
}
]
def get_token_balance(token_address, wallet_address, block_number):
rpc_url = os.getenv("RPC_URL")
if rpc_url is None:
raise EnvironmentError("RPC_URL environment variable not set")
web3 = Web3(Web3.HTTPProvider(rpc_url))
if not web3.isConnected():
raise ConnectionError("Failed to connect to the Ethereum node")
# Check if the token_address is a native token alias
if token_address.lower() in native_aliases:
balance = web3.eth.get_balance(
Web3.toChecksumAddress(wallet_address), block_identifier=block_number
)
else:
contract = web3.eth.contract(
address=Web3.toChecksumAddress(token_address), abi=erc20_abi
)
balance = contract.functions.balanceOf(
Web3.toChecksumAddress(wallet_address)
).call(block_identifier=block_number)
return balance
def get_block_header(block_number):
rpc_url = os.getenv("RPC_URL")
if rpc_url is None:
raise EnvironmentError("RPC_URL environment variable not set")
web3 = Web3(Web3.HTTPProvider(rpc_url))
if not web3.isConnected():
raise ConnectionError("Failed to connect to the Ethereum node")
block = web3.eth.get_block(block_number)
return block

View File

@@ -1,128 +0,0 @@
import difflib
from hexbytes import HexBytes
from pydantic import BaseModel, Field, validator
from typing import List, Dict, Optional
class ProtocolComponentExpectation(BaseModel):
"""Represents a ProtocolComponent with its main attributes."""
id: str = Field(..., description="Identifier of the protocol component")
tokens: List[HexBytes] = Field(
...,
description="List of token addresses associated with the protocol component",
)
static_attributes: Optional[Dict[str, HexBytes]] = Field(
default_factory=dict, description="Static attributes of the protocol component"
)
creation_tx: HexBytes = Field(
..., description="Hash of the transaction that created the protocol component"
)
@validator("id", pre=True, always=True)
def lower_id(cls, v):
return v.lower()
@validator("tokens", pre=True, always=True)
def convert_tokens_to_hexbytes(cls, v):
return sorted(HexBytes(t.lower()) for t in v)
@validator("static_attributes", pre=True, always=True)
def convert_static_attributes_to_hexbytes(cls, v):
if v:
return {
k: v[k] if isinstance(v[k], HexBytes) else HexBytes(v[k].lower())
for k in v
}
return {}
@validator("creation_tx", pre=True, always=True)
def convert_creation_tx_to_hexbytes(cls, v):
return HexBytes(v.lower())
def compare(
self, other: "ProtocolComponentExpectation", colorize_output: bool = True
) -> Optional[str]:
"""Compares the current instance with another ProtocolComponent instance and returns a message with the
differences or None if there are no differences."""
def colorize_diff(diff):
colored_diff = []
for line in diff:
if line.startswith("-"):
colored_diff.append(f"\033[91m{line}\033[0m") # Red
elif line.startswith("+"):
colored_diff.append(f"\033[92m{line}\033[0m") # Green
elif line.startswith("?"):
colored_diff.append(f"\033[93m{line}\033[0m") # Yellow
else:
colored_diff.append(line)
return "\n".join(colored_diff)
differences = []
for field_name, field_value in self.__dict__.items():
other_value = getattr(other, field_name, None)
if field_value != other_value:
diff = list(difflib.ndiff([str(field_value)], [str(other_value)]))
highlighted_diff = (
colorize_diff(diff) if colorize_output else "\n".join(diff)
)
differences.append(
f"Field '{field_name}' mismatch for {self.id}:\n{highlighted_diff}"
)
if not differences:
return None
return "\n".join(differences)
class ProtocolComponentWithTestConfig(ProtocolComponentExpectation):
"""Represents a ProtocolComponent with its main attributes and test configuration."""
skip_simulation: Optional[bool] = Field(
False,
description="Flag indicating whether to skip simulation for this component",
)
class IntegrationTest(BaseModel):
"""Configuration for an individual test."""
name: str = Field(..., description="Name of the test")
start_block: int = Field(..., description="Starting block number for the test")
stop_block: int = Field(..., description="Stopping block number for the test")
initialized_accounts: Optional[List[str]] = Field(
None, description="List of initialized account addresses"
)
expected_components: List[ProtocolComponentWithTestConfig] = Field(
..., description="List of protocol components expected in the indexed state"
)
class IntegrationTestsConfig(BaseModel):
"""Main integration test configuration."""
substreams_yaml_path: str = Field(
"./substreams.yaml", description="Path of the Substreams YAML file"
)
adapter_contract: str = Field(
..., description="Name of the SwapAdapter contract for this protocol"
)
adapter_build_signature: Optional[str] = Field(
None, description="SwapAdapter's constructor signature"
)
adapter_build_args: Optional[str] = Field(
None, description="Arguments for the SwapAdapter constructor"
)
initialized_accounts: Optional[List[str]] = Field(
None,
description="List of initialized account addresses. These accounts will be initialized for every tests",
)
skip_balance_check: bool = Field(
..., description="Flag to skip balance check for all tests"
)
protocol_type_names: List[str] = Field(
..., description="List of protocol type names for the tested protocol"
)
tests: List[IntegrationTest] = Field(..., description="List of integration tests")

View File

@@ -1,418 +0,0 @@
import itertools
import os
import shutil
import subprocess
import traceback
from datetime import datetime
from decimal import Decimal
from pathlib import Path
from typing import Optional, Callable, Any
import yaml
from tycho_simulation_py.evm.decoders import ThirdPartyPoolTychoDecoder
from tycho_simulation_py.evm.storage import TychoDBSingleton
from tycho_simulation_py.models import EVMBlock
from pydantic import BaseModel
from tycho_indexer_client.dto import (
Chain,
ProtocolComponentsParams,
ProtocolStateParams,
ContractStateParams,
ProtocolComponent,
ResponseProtocolState,
HexBytes,
ResponseAccount,
Snapshot,
TracedEntryPointParams,
)
from tycho_indexer_client.rpc_client import TychoRPCClient
from models import (
IntegrationTestsConfig,
ProtocolComponentWithTestConfig,
ProtocolComponentExpectation,
)
from adapter_builder import AdapterContractBuilder
from evm import get_token_balance, get_block_header
from tycho import TychoRunner
from utils import build_snapshot_message, token_factory
class TestResult:
def __init__(
self, success: bool, step: Optional[str] = None, message: Optional[str] = None
):
self.success = success
self.step = step
self.message = message
@classmethod
def Passed(cls):
return cls(success=True)
@classmethod
def Failed(cls, step: str, message: str):
return cls(success=False, step=step, message=message)
def parse_config(yaml_path: str) -> IntegrationTestsConfig:
with open(yaml_path, "r") as file:
yaml_content = yaml.safe_load(file)
return IntegrationTestsConfig(**yaml_content)
class SimulationFailure(BaseModel):
pool_id: str
sell_token: str
buy_token: str
error: str
class TestRunner:
def __init__(
self, package: str, with_binary_logs: bool, db_url: str, vm_traces: bool
):
self.repo_root = os.getcwd()
config_path = os.path.join(
self.repo_root, "substreams", package, "integration_test.tycho.yaml"
)
self.config: IntegrationTestsConfig = parse_config(config_path)
self.spkg_src = os.path.join(self.repo_root, "substreams", package)
self.adapter_contract_builder = AdapterContractBuilder(
os.path.join(self.repo_root, "evm")
)
self.tycho_runner = TychoRunner(
db_url, with_binary_logs, self.config.initialized_accounts
)
self.tycho_rpc_client = TychoRPCClient()
self._token_factory_func = token_factory(self.tycho_rpc_client)
self.db_url = db_url
self._vm_traces = vm_traces
self._chain = Chain.ethereum
def run_tests(self) -> None:
"""Run all tests specified in the configuration."""
print(f"Running {len(self.config.tests)} tests ...\n")
print("--------------------------------\n")
failed_tests: list[str] = []
count = 1
for test in self.config.tests:
print(f"TEST {count}: {test.name}")
self.tycho_runner.empty_database(self.db_url)
spkg_path = self.build_spkg(
os.path.join(self.spkg_src, self.config.substreams_yaml_path),
lambda data: self.update_initial_block(data, test.start_block),
)
self.tycho_runner.run_tycho(
spkg_path,
test.start_block,
test.stop_block,
self.config.protocol_type_names,
test.initialized_accounts or [],
)
result: TestResult = self.tycho_runner.run_with_rpc_server(
self.validate_state,
test.expected_components,
test.stop_block,
test.initialized_accounts or [],
)
if result.success:
print(f"\n{test.name} passed.\n")
else:
failed_tests.append(test.name)
print(f"\n❗️ {test.name} failed on {result.step}: {result.message}\n")
print("--------------------------------\n")
count += 1
print(
"\nTests finished! \n"
f"RESULTS: {len(self.config.tests) - len(failed_tests)}/{len(self.config.tests)} passed.\n"
)
if failed_tests:
print("Failed tests:")
for failed_test in failed_tests:
print(f"- {failed_test}")
print("\n")
def validate_state(
self,
expected_components: list[ProtocolComponentWithTestConfig],
stop_block: int,
initialized_accounts: list[str],
) -> TestResult:
"""Validate the current protocol state against the expected state."""
protocol_components = self.tycho_rpc_client.get_protocol_components(
ProtocolComponentsParams(protocol_system="test_protocol")
).protocol_components
protocol_states = self.tycho_rpc_client.get_protocol_state(
ProtocolStateParams(protocol_system="test_protocol")
).states
components_by_id: dict[str, ProtocolComponent] = {
component.id: component for component in protocol_components
}
try:
# Step 1: Validate the protocol components
step = "Protocol component validation"
for expected_component in expected_components:
comp_id = expected_component.id.lower()
if comp_id not in components_by_id:
return TestResult.Failed(
step=step,
message=f"'{comp_id}' not found in protocol components. "
f"Available components: {set(components_by_id.keys())}",
)
diff = ProtocolComponentExpectation(
**components_by_id[comp_id].dict()
).compare(ProtocolComponentExpectation(**expected_component.dict()))
if diff is not None:
return TestResult.Failed(step=step, message=diff)
print(f"\n{step} passed.\n")
# Step 2: Validate the token balances
step = "Token balance validation"
if not self.config.skip_balance_check:
for component in protocol_components:
comp_id = component.id.lower()
for token in component.tokens:
state = next(
(
s
for s in protocol_states
if s.component_id.lower() == comp_id
),
None,
)
if state:
balance_hex = state.balances.get(token, HexBytes("0x00"))
else:
balance_hex = HexBytes("0x00")
tycho_balance = int(balance_hex)
node_balance = get_token_balance(token, comp_id, stop_block)
if node_balance != tycho_balance:
return TestResult.Failed(
step=step,
message=f"Balance mismatch for {comp_id}:{token} at block {stop_block}: got {node_balance} "
f"from rpc call and {tycho_balance} from Substreams",
)
print(f"\n{step} passed.\n")
else:
print(f"\n {step} skipped. \n")
# Step 3: Validate the simulation
step = "Simulation validation"
# Loads from Tycho-Indexer the state of all the contracts that are related to the protocol components.
simulation_components: list[str] = [
c.id for c in expected_components if c.skip_simulation is False
]
related_contracts: set[str] = set()
for account in self.config.initialized_accounts or []:
related_contracts.add(account)
for account in initialized_accounts or []:
related_contracts.add(account)
# Collect all contracts that are related to the simulation components
filtered_components: list[ProtocolComponent] = []
component_related_contracts: set[str] = set()
for component in protocol_components:
# Filter out components that are not set to be used for the simulation
if component.id in simulation_components:
# Collect component contracts
for a in component.contract_ids:
component_related_contracts.add(a.hex())
# Collect DCI detected contracts
traces_results = self.tycho_rpc_client.get_traced_entry_points(
TracedEntryPointParams(
protocol_system="test_protocol",
component_ids=[component.id],
)
).traced_entry_points.values()
for traces in traces_results:
for _, trace in traces:
component_related_contracts.update(
trace["accessed_slots"].keys()
)
filtered_components.append(component)
# Check if any of the initialized contracts are not listed as component contract dependencies
unspecified_contracts: list[str] = [
c for c in related_contracts if c not in component_related_contracts
]
related_contracts.update(component_related_contracts)
contract_states = self.tycho_rpc_client.get_contract_state(
ContractStateParams(contract_ids=list(related_contracts))
).accounts
if len(filtered_components):
if len(unspecified_contracts):
print(
f"⚠️ The following initialized contracts are not listed as component contract dependencies: {unspecified_contracts}. "
f"Please ensure that, if they are required for this component's simulation, they are specified under the Protocol Component's contract field."
)
simulation_failures = self.simulate_get_amount_out(
stop_block, protocol_states, filtered_components, contract_states
)
if len(simulation_failures):
error_msgs: list[str] = []
for pool_id, failures in simulation_failures.items():
failures_formatted: list[str] = [
f"{f.sell_token} -> {f.buy_token}: {f.error}"
for f in failures
]
error_msgs.append(
f"Pool {pool_id} failed simulations: {', '.join(failures_formatted)}"
)
return TestResult.Failed(step=step, message="\n".join(error_msgs))
print(f"\n{step} passed.\n")
else:
print(f"\n {step} skipped.\n")
return TestResult.Passed()
except Exception as e:
error_message = f"An error occurred: {str(e)}\n" + traceback.format_exc()
return TestResult.Failed(step=step, message=error_message)
def simulate_get_amount_out(
self,
block_number: int,
protocol_states: list[ResponseProtocolState],
protocol_components: list[ProtocolComponent],
contract_states: list[ResponseAccount],
) -> dict[str, list[SimulationFailure]]:
TychoDBSingleton.initialize()
block_header = get_block_header(block_number)
block: EVMBlock = EVMBlock(
id=block_number,
ts=datetime.fromtimestamp(block_header.timestamp),
hash_=block_header.hash.hex(),
)
failed_simulations: dict[str, list[SimulationFailure]] = {}
try:
adapter_contract = self.adapter_contract_builder.find_contract(
self.config.adapter_contract
)
except FileNotFoundError:
adapter_contract = self.adapter_contract_builder.build_target(
self.config.adapter_contract,
self.config.adapter_build_signature,
self.config.adapter_build_args,
)
TychoDBSingleton.clear_instance()
decoder = ThirdPartyPoolTychoDecoder(
token_factory_func=self._token_factory_func,
adapter_contract=adapter_contract,
minimum_gas=0,
trace=self._vm_traces,
)
snapshot_message: Snapshot = build_snapshot_message(
protocol_states, protocol_components, contract_states
)
decoded = decoder.decode_snapshot(snapshot_message, block)
for component in protocol_components:
if component.id not in decoded:
failed_simulations[component.id] = [
SimulationFailure(
pool_id=component.id,
sell_token=component.tokens[0].hex(),
buy_token=component.tokens[1].hex(),
error="Pool not found in decoded state.",
)
]
for pool_state in decoded.values():
pool_id = pool_state.id_
if not pool_state.balances:
raise ValueError(f"Missing balances for pool {pool_id}")
for sell_token, buy_token in itertools.permutations(pool_state.tokens, 2):
for prctg in ["0.001", "0.01", "0.1"]:
# Try to sell 0.1% of the protocol balance
try:
sell_amount = (
Decimal(prctg) * pool_state.balances[sell_token.address]
)
amount_out, gas_used, _ = pool_state.get_amount_out(
sell_token, sell_amount, buy_token
)
print(
f"Amount out for {pool_id}: {sell_amount} {sell_token} -> {amount_out} {buy_token} - "
f"Gas used: {gas_used}"
)
except Exception as e:
print(
f"Error simulating get_amount_out for {pool_id}: {sell_token} -> {buy_token} at block {block_number}. "
f"Error: {e}"
)
if pool_id not in failed_simulations:
failed_simulations[pool_id] = []
failed_simulations[pool_id].append(
SimulationFailure(
pool_id=pool_id,
sell_token=str(sell_token),
buy_token=str(buy_token),
error=str(e),
)
)
continue
return failed_simulations
@staticmethod
def build_spkg(
yaml_file_path: str, modify_func: Callable[[dict[str, Any]], None]
) -> str:
"""Build a Substreams package with modifications to the YAML file."""
backup_file_path = f"{yaml_file_path}.backup"
shutil.copy(yaml_file_path, backup_file_path)
with open(yaml_file_path, "r") as file:
data = yaml.safe_load(file)
modify_func(data)
spkg_name = f"{yaml_file_path.rsplit('/', 1)[0]}/{data['package']['name'].replace('_', '-')}-{data['package']['version']}.spkg"
with open(yaml_file_path, "w") as file:
yaml.dump(data, file, default_flow_style=False)
try:
result = subprocess.run(
["substreams", "pack", yaml_file_path], capture_output=True, text=True
)
if result.returncode != 0:
print("Substreams pack command failed:", result.stderr)
except Exception as e:
print(f"Error running substreams pack command: {e}")
shutil.copy(backup_file_path, yaml_file_path)
Path(backup_file_path).unlink()
return spkg_name
@staticmethod
def update_initial_block(data: dict[str, Any], start_block: int) -> None:
"""Update the initial block for all modules in the configuration data."""
for module in data["modules"]:
module["initialBlock"] = start_block

View File

@@ -1,211 +0,0 @@
import signal
import subprocess
import threading
import time
import psycopg2
from psycopg2 import sql
import os
def find_binary_file(file_name):
# Define usual locations for binary files in Unix-based systems
locations = [
"/bin",
"/sbin",
"/usr/bin",
"/usr/sbin",
"/usr/local/bin",
"/usr/local/sbin",
]
# Add user's local bin directory if it exists
home = os.path.expanduser("~")
if os.path.exists(home + "/.local/bin"):
locations.append(home + "/.local/bin")
# Check each location
for location in locations:
potential_path = os.path.join(location, file_name)
if os.path.exists(potential_path):
return potential_path
# If binary is not found in the usual locations, return None
searched_paths = "\n".join(locations)
raise RuntimeError(
f"Unable to locate {file_name} binary. Searched paths:\n{searched_paths}"
)
binary_path = find_binary_file("tycho-indexer")
class TychoRunner:
def __init__(
self,
db_url: str,
with_binary_logs: bool = False,
initialized_accounts: list[str] = None,
):
self.with_binary_logs = with_binary_logs
self._db_url = db_url
self._initialized_accounts = initialized_accounts or []
def run_tycho(
self,
spkg_path: str,
start_block: int,
end_block: int,
protocol_type_names: list,
initialized_accounts: list,
protocol_system: str = "test_protocol",
) -> None:
"""Run the Tycho indexer with the specified SPKG and block range."""
env = os.environ.copy()
env["RUST_LOG"] = "tycho_indexer=info"
all_accounts = self._initialized_accounts + initialized_accounts
try:
process = subprocess.Popen(
[
binary_path,
"--database-url",
self._db_url,
"run",
"--spkg",
spkg_path,
"--module",
"map_protocol_changes",
"--protocol-type-names",
",".join(protocol_type_names),
"--protocol-system",
protocol_system,
"--start-block",
str(start_block),
"--stop-block",
# +2 is to make up for the cache in the index side.
str(end_block + 2),
"--dci-plugin",
"rpc",
]
+ (
[
"--initialized-accounts",
",".join(all_accounts),
"--initialization-block",
str(start_block),
]
if all_accounts
else []
),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1,
env=env,
)
with process.stdout:
for line in iter(process.stdout.readline, ""):
if line and self.with_binary_logs:
print(line.strip())
with process.stderr:
for line in iter(process.stderr.readline, ""):
if line and self.with_binary_logs:
print(line.strip())
process.wait()
except Exception as e:
print(f"Error running Tycho indexer: {e}")
def run_with_rpc_server(self, func: callable, *args, **kwargs):
"""
Run a function with Tycho RPC running in background.
This function is a wrapper around a target function. It starts Tycho RPC as a background task, executes the target function and stops Tycho RPC.
"""
stop_event = threading.Event()
process = None
def run_rpc_server():
nonlocal process
try:
env = os.environ.copy()
env["RUST_LOG"] = "info"
process = subprocess.Popen(
[binary_path, "--database-url", self._db_url, "rpc"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1,
env=env,
)
# Read remaining stdout and stderr
if self.with_binary_logs:
for output in process.stdout:
if output:
print(output.strip())
for error_output in process.stderr:
if error_output:
print(error_output.strip())
process.wait()
if process.returncode != 0:
print("Command failed with return code:", process.returncode)
except Exception as e:
print(f"An error occurred while running the command: {e}")
finally:
if process and process.poll() is None:
process.terminate()
process.wait()
# Start the RPC server in a separate thread
rpc_thread = threading.Thread(target=run_rpc_server)
rpc_thread.start()
time.sleep(3) # Wait for the RPC server to start
try:
# Run the provided function
result = func(*args, **kwargs)
return result
finally:
stop_event.set()
if process and process.poll() is None:
process.send_signal(signal.SIGINT)
if rpc_thread.is_alive():
rpc_thread.join()
@staticmethod
def empty_database(db_url: str) -> None:
"""Drop and recreate the Tycho indexer database."""
try:
conn = psycopg2.connect(db_url[: db_url.rfind("/")])
conn.autocommit = True
cursor = conn.cursor()
cursor.execute(
sql.SQL("DROP DATABASE IF EXISTS {} WITH (FORCE)").format(
sql.Identifier("tycho_indexer_0")
)
)
cursor.execute(
sql.SQL("CREATE DATABASE {}").format(sql.Identifier("tycho_indexer_0"))
)
except psycopg2.Error as e:
print(f"Database error: {e}")
finally:
if cursor:
cursor.close()
if conn:
conn.close()

View File

@@ -1,79 +0,0 @@
from logging import getLogger
from typing import Union
from eth_utils import to_checksum_address
from tycho_simulation_py.models import EthereumToken
from tycho_indexer_client.dto import (
ResponseProtocolState,
ProtocolComponent,
ResponseAccount,
ComponentWithState,
Snapshot,
HexBytes,
TokensParams,
PaginationParams,
)
from tycho_indexer_client.rpc_client import TychoRPCClient
log = getLogger(__name__)
def build_snapshot_message(
protocol_states: list[ResponseProtocolState],
protocol_components: list[ProtocolComponent],
account_states: list[ResponseAccount],
) -> Snapshot:
vm_storage = {state.address: state for state in account_states}
states = {}
for component in protocol_components:
pool_id = component.id
states[pool_id] = {"component": component}
for state in protocol_states:
pool_id = state.component_id
if pool_id not in states:
continue
states[pool_id]["state"] = state
states = {id_: ComponentWithState(**state) for id_, state in states.items()}
return Snapshot(states=states, vm_storage=vm_storage)
def token_factory(rpc_client: TychoRPCClient) -> callable(HexBytes):
_client = rpc_client
_token_cache: dict[str, EthereumToken] = {}
def factory(requested_addresses: Union[str, list[str]]) -> list[EthereumToken]:
if not isinstance(requested_addresses, list):
requested_addresses = [to_checksum_address(requested_addresses)]
else:
requested_addresses = [to_checksum_address(a) for a in requested_addresses]
response = dict()
to_fetch = []
for address in requested_addresses:
if address in _token_cache:
response[address] = _token_cache[address]
else:
to_fetch.append(address)
if to_fetch:
pagination = PaginationParams(page_size=len(to_fetch), page=0)
params = TokensParams(token_addresses=to_fetch, pagination=pagination)
tokens = _client.get_tokens(params).tokens
for token in tokens:
address = to_checksum_address(token.address)
eth_token = EthereumToken(
symbol=token.symbol,
address=address,
decimals=token.decimals,
gas=token.gas,
)
response[address] = eth_token
_token_cache[address] = eth_token
return [response[address] for address in requested_addresses]
return factory