Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4936150c3b |
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -3,7 +3,7 @@ import logging
|
||||
from contextvars import ContextVar
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from typing import Callable, Any
|
||||
from typing import Callable, Any, Union, Optional
|
||||
|
||||
from web3 import AsyncWeb3
|
||||
|
||||
|
||||
@@ -2,12 +2,12 @@ import asyncio
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy import select, func, text
|
||||
from typing_extensions import Optional
|
||||
from web3.exceptions import ContractLogicError
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account
|
||||
from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account, metric
|
||||
from dexorder.base import TransactionReceiptDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blocks import get_block_timestamp, get_block, current_block
|
||||
|
||||
@@ -2,15 +2,16 @@ import logging
|
||||
from typing import TypedDict
|
||||
|
||||
from dexorder import db
|
||||
from dexorder.base import OldPoolDict, OldGMXDict, OldTokenDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blockstate import BlockDict
|
||||
from dexorder.database.model import Pool, Token
|
||||
from dexorder.database.model import Pool
|
||||
from dexorder.database.model.pool import OldPoolDict
|
||||
from dexorder.database.model.token import Token, OldTokenDict
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# address_metadata is a polymorphic BlockDict which maps address keys to a dict of metadata describing the address
|
||||
# used for Tokens and Pools and GMX Markets
|
||||
# used for Tokens and Pools
|
||||
|
||||
|
||||
class AddressMetadata (TypedDict):
|
||||
@@ -44,10 +45,8 @@ def save_addrmeta(address: str, meta: AddressMetadata):
|
||||
pool.quote = updated.quote
|
||||
pool.fee = updated.fee
|
||||
pool.decimals = updated.decimals
|
||||
elif meta['type'] == 'GMX':
|
||||
pass
|
||||
else:
|
||||
log.warning(f'Address {address} had unknown metadata type {meta["type"]}')
|
||||
|
||||
|
||||
address_metadata: BlockDict[str,OldPoolDict|OldTokenDict|OldGMXDict] = BlockDict('a', redis=True, db=True, finalize_cb=save_addrmeta)
|
||||
address_metadata: BlockDict[str,AddressMetadata] = BlockDict('a', redis=True, db=True, finalize_cb=save_addrmeta)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import TypedDict, Union, Any, Callable
|
||||
from dexorder.base.metadecl import OldTokenDict, OldPoolDict, OldGMXDict
|
||||
from typing import TypedDict, Union, Type, Any, Callable
|
||||
|
||||
Address = str
|
||||
Quantity = Union[str,int]
|
||||
|
||||
@@ -42,10 +42,12 @@ class Account (LocalAccount):
|
||||
# log.debug(f'available accounts: {Account._pool.qsize()}')
|
||||
try:
|
||||
async with asyncio.timeout(1):
|
||||
result = await Account._pool.get()
|
||||
result: "Account" = await Account._pool.get()
|
||||
except asyncio.TimeoutError:
|
||||
log.error('waiting for an available account')
|
||||
result = await Account._pool.get()
|
||||
# mark as out of pool
|
||||
result._in_pool = False
|
||||
metric.account_available.set(Account._pool.qsize())
|
||||
return result
|
||||
|
||||
@@ -59,17 +61,20 @@ class Account (LocalAccount):
|
||||
if Account._main_account is None:
|
||||
Account._main_account = account
|
||||
Account._pool.put_nowait(account)
|
||||
account._in_pool = True # this account is now in the pool
|
||||
Account._all.append(account)
|
||||
metric.account_available.set(Account._pool.qsize())
|
||||
metric.account_total.set(len(Account._all))
|
||||
log.info(f'Account pool {[a.address for a in Account._all]}')
|
||||
|
||||
def __init__(self, local_account: LocalAccount): # todo chain_id?
|
||||
super().__init__(local_account._key_obj, local_account._publicapi) # from digging into the source code
|
||||
def __init__(self, local_account: LocalAccount): # todo chain_id?
|
||||
super().__init__(local_account._key_obj, local_account._publicapi) # from digging into the source code
|
||||
self.chain_id = current_chain.get().id
|
||||
self.signing_middleware = construct_sign_and_send_raw_middleware(self)
|
||||
self._nonce: Optional[int] = None
|
||||
self.tx_id: Optional[str] = None # current transaction id
|
||||
# release() idempotency tracking
|
||||
self._in_pool: bool = False
|
||||
|
||||
async def next_nonce(self):
|
||||
if self._nonce is None:
|
||||
@@ -86,8 +91,21 @@ class Account (LocalAccount):
|
||||
return current_w3.get().eth.get_balance(self.address)
|
||||
|
||||
def release(self):
|
||||
metric.account_available.set(Account._pool.qsize() + 1)
|
||||
"""
|
||||
Return this Account to the pool.
|
||||
|
||||
Idempotent: calling release() multiple times without a new acquire()
|
||||
will only enqueue the account once.
|
||||
"""
|
||||
# If we're already in the pool, do nothing.
|
||||
if self._in_pool:
|
||||
# Optional debug log; comment out if too noisy.
|
||||
# log.debug(f'Account {self.address} already in pool; ignoring extra release()')
|
||||
return
|
||||
|
||||
Account._pool.put_nowait(self)
|
||||
self._in_pool = True
|
||||
metric.account_available.set(Account._pool.qsize())
|
||||
|
||||
def __str__(self):
|
||||
return self.address
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
import logging
|
||||
from typing import TypedDict, NotRequired
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenDict (TypedDict):
|
||||
"""
|
||||
Token metadata dictionary
|
||||
|
||||
Fields:
|
||||
a: The address of the token.
|
||||
n: The name of the token.
|
||||
s: The symbol of the token.
|
||||
d: Number of decimals.
|
||||
l: Indicates if approved ("listed").
|
||||
g: gmx synthetic flag
|
||||
x: Optional extra data.
|
||||
"""
|
||||
|
||||
a: str
|
||||
n: str
|
||||
s: str
|
||||
d: int
|
||||
l: NotRequired[bool]
|
||||
g: NotRequired[bool]
|
||||
x: NotRequired[dict]
|
||||
|
||||
|
||||
# OldTokenDict is the primary dict we use in-memory, with basic JSON-able types
|
||||
|
||||
class OldTokenDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
name: str
|
||||
symbol: str
|
||||
decimals: int
|
||||
approved: bool # whether this token is in the whitelist or not
|
||||
x: NotRequired[dict] # extra data
|
||||
|
||||
|
||||
class OldPoolDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
exchange: int
|
||||
base: str
|
||||
quote: str
|
||||
fee: int
|
||||
decimals: int
|
||||
|
||||
|
||||
|
||||
class OldGMXDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
exchange: int
|
||||
index: str
|
||||
long: str
|
||||
short: str
|
||||
leverage: int
|
||||
decimals: int
|
||||
|
||||
@@ -37,10 +37,9 @@ class SwapOrderState (Enum):
|
||||
|
||||
|
||||
class Exchange (Enum):
|
||||
Unknown = -1
|
||||
OTC = 0
|
||||
UniswapV3 = 1
|
||||
GMX = 2
|
||||
Unknown = -1
|
||||
UniswapV2 = 0
|
||||
UniswapV3 = 1
|
||||
|
||||
@dataclass
|
||||
class Route:
|
||||
@@ -76,20 +75,6 @@ class Line:
|
||||
return self.intercept, self.slope
|
||||
|
||||
|
||||
@dataclass
|
||||
class GMXOrder:
|
||||
reserve_amount: int # todo
|
||||
is_long: bool
|
||||
is_increase: bool
|
||||
|
||||
@staticmethod
|
||||
def load(obj: Optional[tuple[int,bool,bool]]):
|
||||
return GMXOrder(*obj) if obj is not None else None
|
||||
|
||||
def dump(self):
|
||||
return self.reserve_amount, self.is_long, self.is_increase
|
||||
|
||||
|
||||
@dataclass
|
||||
class SwapOrder:
|
||||
tokenIn: str
|
||||
@@ -102,7 +87,6 @@ class SwapOrder:
|
||||
inverted: bool
|
||||
conditionalOrder: int
|
||||
tranches: list['Tranche']
|
||||
gmx: Optional[GMXOrder] = None
|
||||
|
||||
@property
|
||||
def min_input_amount(self):
|
||||
@@ -111,7 +95,7 @@ class SwapOrder:
|
||||
@staticmethod
|
||||
def load(obj):
|
||||
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7], obj[8],
|
||||
[Tranche.load(t) for t in obj[9]], GMXOrder.load(obj[10]) if len(obj) > 10 else None)
|
||||
[Tranche.load(t) for t in obj[9]])
|
||||
|
||||
@staticmethod
|
||||
def load_from_chain(obj):
|
||||
@@ -122,8 +106,7 @@ class SwapOrder:
|
||||
return (self.tokenIn, self.tokenOut, self.route.dump(),
|
||||
str(self.amount), str(self.minFillAmount), self.amountIsInput,
|
||||
self.outputDirectlyToOwner, self.inverted, self.conditionalOrder,
|
||||
[t.dump() for t in self.tranches],
|
||||
self.gmx.dump() if self.gmx is not None else None)
|
||||
[t.dump() for t in self.tranches])
|
||||
|
||||
def __str__(self):
|
||||
msg = f'''
|
||||
|
||||
@@ -14,7 +14,7 @@ from dexorder.blockstate.fork import Fork
|
||||
from dexorder.configuration import parse_args
|
||||
from dexorder.contract import get_contract_event
|
||||
from dexorder.database import db
|
||||
from dexorder.event_handler import handle_uniswap_swaps
|
||||
from dexorder.event_handler import check_ohlc_rollover, handle_uniswap_swaps
|
||||
from dexorder.memcache import memcache
|
||||
from dexorder.memcache.memcache_state import RedisState, publish_all
|
||||
from dexorder.ohlc import recent_ohlcs, ohlc_save, ohlcs
|
||||
@@ -58,7 +58,7 @@ async def main():
|
||||
|
||||
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None, timer_period=0)
|
||||
runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
# runner.add_callback(check_ohlc_rollover)
|
||||
runner.add_callback(check_ohlc_rollover)
|
||||
runner.on_promotion.append(finalize_callback)
|
||||
if db:
|
||||
# noinspection PyUnboundLocalVariable
|
||||
|
||||
@@ -13,7 +13,6 @@ from omegaconf import OmegaConf
|
||||
|
||||
from dexorder import configuration, config
|
||||
from dexorder.alert import init_alerts
|
||||
from dexorder.configuration.load import config_file
|
||||
from dexorder.configuration.schema import Config
|
||||
from dexorder.metric.metric_startup import start_metrics_server
|
||||
|
||||
@@ -66,7 +65,6 @@ def execute(main:Callable[...,Coroutine[Any,Any,Any]], shutdown=None, *, parse_l
|
||||
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
|
||||
log.setLevel(logging.DEBUG)
|
||||
log.info('Logging configured to default')
|
||||
log.info(f'Loaded main config from {config_file}')
|
||||
xconf = None
|
||||
if parse_args:
|
||||
# NOTE: there is special command-line argument handling in config/load.py to get a config filename.
|
||||
|
||||
@@ -11,9 +11,8 @@ from dexorder.bin.executable import execute
|
||||
from dexorder.blocks import get_block_timestamp, get_block
|
||||
from dexorder.blockstate.fork import current_fork
|
||||
from dexorder.configuration import parse_args
|
||||
from dexorder.event_handler import wire_dexorder_debug
|
||||
from dexorder.contract import get_contract_event
|
||||
from dexorder.final_ohlc import FinalOHLCRepository
|
||||
from dexorder.gmx import gmx_wire_runner_late, gmx_wire_runner_early
|
||||
from dexorder.pools import get_uniswap_data
|
||||
from dexorder.util import hexstr
|
||||
from dexorder.util.shutdown import fatal
|
||||
@@ -57,13 +56,8 @@ async def main():
|
||||
ohlcs = FinalOHLCRepository()
|
||||
await blockchain.connect()
|
||||
walker = BlockWalker(flush_callback, timedelta(seconds=config.walker_flush_interval))
|
||||
# gmx_wire_runner_early(walker, backfill=ohlcs)
|
||||
gmx_wire_runner_early(walker) # todo re-enable backfill
|
||||
wire_dexorder_debug(walker)
|
||||
# todo re-enable uniswap
|
||||
# walker.add_event_trigger(handle_backfill_uniswap_swaps,
|
||||
# get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
gmx_wire_runner_late(walker)
|
||||
walker.add_event_trigger(handle_backfill_uniswap_swaps,
|
||||
get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
await walker.run()
|
||||
|
||||
|
||||
|
||||
@@ -14,10 +14,7 @@ from dexorder.contract import get_contract_event
|
||||
from dexorder.contract.dexorder import get_dexorder_contract
|
||||
from dexorder.event_handler import (init, dump_log, handle_vault_created, handle_order_placed,
|
||||
handle_transfer, handle_swap_filled, handle_order_canceled, handle_order_cancel_all,
|
||||
handle_uniswap_swaps, handle_vault_impl_changed, update_metrics,
|
||||
activate_new_price_triggers)
|
||||
from dexorder.gmx import gmx_wire_runner_early, gmx_wire_runner_late
|
||||
from dexorder.gmx._handle import gmx_wire_runner_init
|
||||
handle_uniswap_swaps, handle_vault_impl_changed, update_metrics)
|
||||
from dexorder.marks import publish_marks
|
||||
from dexorder.memcache import memcache
|
||||
from dexorder.memcache.memcache_state import RedisState, publish_all
|
||||
@@ -64,23 +61,20 @@ def setup_logevent_triggers(runner):
|
||||
|
||||
runner.add_callback(check_activate_orders)
|
||||
runner.add_callback(init)
|
||||
gmx_wire_runner_init(runner)
|
||||
|
||||
runner.add_event_trigger(handle_transaction_receipts)
|
||||
runner.add_event_trigger(handle_vault_created, get_contract_event('Vault', 'VaultCreated'))
|
||||
runner.add_event_trigger(handle_vault_impl_changed, get_contract_event('Vault', 'VaultImplChanged'))
|
||||
runner.add_event_trigger(handle_order_placed, get_contract_event('VaultImpl', 'DexorderSwapPlaced'))
|
||||
gmx_wire_runner_early(runner) # must come after DexorderSwapPlaced so the GMXOrder event can add data to the existing order
|
||||
runner.add_event_trigger(handle_transfer, get_contract_event('ERC20', 'Transfer'))
|
||||
runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
runner.add_event_trigger(handle_swap_filled, get_contract_event('VaultImpl', 'DexorderSwapFilled'))
|
||||
runner.add_event_trigger(handle_order_canceled, get_contract_event('VaultImpl', 'DexorderSwapCanceled'))
|
||||
runner.add_event_trigger(handle_order_cancel_all, get_contract_event('VaultImpl', 'DexorderCancelAll'))
|
||||
gmx_wire_runner_late(runner)
|
||||
|
||||
runner.add_event_trigger(handle_dexorderexecutions, executions)
|
||||
runner.add_event_trigger(handle_vault_creation_requests)
|
||||
|
||||
runner.add_event_trigger(activate_new_price_triggers)
|
||||
runner.add_callback(end_trigger_updates)
|
||||
runner.add_callback(execute_tranches)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from dexorder import dec
|
||||
from dexorder import blockchain, db, dec
|
||||
from dexorder.bin.executable import execute
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -52,7 +52,7 @@ class BlockData (Generic[T]):
|
||||
def setitem(self, item, value: T, overwrite=True):
|
||||
state = current_blockstate.get()
|
||||
fork = current_fork.get()
|
||||
return state.set(fork, self.series, item, value, overwrite)
|
||||
state.set(fork, self.series, item, value, overwrite)
|
||||
|
||||
def getitem(self, item, default=NARG) -> T:
|
||||
state = current_blockstate.get()
|
||||
@@ -63,11 +63,9 @@ class BlockData (Generic[T]):
|
||||
result = default
|
||||
if self.lazy_getitem:
|
||||
lazy = self.lazy_getitem(self, item)
|
||||
if lazy is not NARG and lazy is not DELETE:
|
||||
if lazy is not NARG:
|
||||
state.set(state.root_fork, self.series, item, lazy, readonly_override=True)
|
||||
result = lazy
|
||||
if result is DELETE:
|
||||
result = default
|
||||
if result is NARG:
|
||||
raise KeyError
|
||||
return result
|
||||
@@ -144,7 +142,7 @@ class BlockSet(Generic[T], Iterable[T], BlockData[T]):
|
||||
return self.contains(item)
|
||||
|
||||
def __iter__(self) -> Iterator[T]:
|
||||
return self.iter_keys(self.series)
|
||||
yield from (k for k,v in self.iter_items(self.series))
|
||||
|
||||
|
||||
class BlockDict(Generic[K,V], BlockData[V]):
|
||||
@@ -164,9 +162,6 @@ class BlockDict(Generic[K,V], BlockData[V]):
|
||||
def __contains__(self, item: K) -> bool:
|
||||
return self.contains(item)
|
||||
|
||||
def __iter__(self) -> Iterator[K]:
|
||||
return self.iter_keys(self.series)
|
||||
|
||||
def items(self) -> Iterable[tuple[K,V]]:
|
||||
return self.iter_items(self.series)
|
||||
|
||||
|
||||
@@ -232,9 +232,8 @@ class BlockState:
|
||||
for diff in diffs:
|
||||
if diff.branch_id == branch.id:
|
||||
# if there's an existing value for this branch, we replace it
|
||||
old_value = diff.value
|
||||
diff.value = value
|
||||
return old_value
|
||||
return
|
||||
elif self._fork_has_diff(fork, diff):
|
||||
# if there's an existing value on this fork, remember it
|
||||
old_value = diff.value
|
||||
|
||||
@@ -10,7 +10,7 @@ from .schema import Config
|
||||
|
||||
schema = OmegaConf.structured(Config(), flags={'struct': False})
|
||||
|
||||
config_file = 'dexorder.toml'
|
||||
_config_file = 'dexorder.toml'
|
||||
|
||||
class ConfigException (Exception):
|
||||
pass
|
||||
@@ -21,7 +21,7 @@ def load_config():
|
||||
result:ConfigDict = OmegaConf.merge(
|
||||
schema,
|
||||
from_toml('.secret.toml'),
|
||||
from_toml(config_file),
|
||||
from_toml(_config_file),
|
||||
from_toml('config.toml'),
|
||||
from_env()
|
||||
)
|
||||
@@ -73,7 +73,7 @@ if len(sys.argv) > 1 and (sys.argv[1] == '-c' or sys.argv[1] == '--config'):
|
||||
if len(sys.argv) < 3:
|
||||
raise ConfigException('Missing config file argument')
|
||||
else:
|
||||
config_file = sys.argv[2]
|
||||
_config_file = sys.argv[2]
|
||||
sys.argv = [sys.argv[0], *sys.argv[3:]]
|
||||
|
||||
config = load_config()
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
|
||||
from .load import config
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ from typing import Optional
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
contract_version: Optional[str] = None # version tag of the contract deployment to use. if None then
|
||||
confirms: Optional[int] = None # number of blocks before data is considered finalized. if None then the chain's default setting is used
|
||||
batch_size: Optional[int] = None # max number of blocks to query in a single backfill rpc request
|
||||
rpc_url: str = 'http://localhost:8545' # may be a comma-separated list. may include names of entries in rpc_urls.
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from eth_abi.exceptions import InsufficientDataBytes
|
||||
@@ -10,7 +9,7 @@ from web3.exceptions import BadFunctionCallOutput, ContractLogicError
|
||||
|
||||
from .abi import abis
|
||||
from .contract_proxy import ContractProxy
|
||||
from .. import current_w3, config
|
||||
from .. import current_w3
|
||||
from ..base.chain import current_chain
|
||||
|
||||
CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOutput)
|
||||
@@ -19,28 +18,10 @@ CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOut
|
||||
# set initially to the string filename, then loaded on demand and set to the parsed JSON result
|
||||
_contract_data: dict[str,Union[str,dict]] = {}
|
||||
|
||||
initialized = False
|
||||
_contract_path = ''
|
||||
|
||||
def get_contract_path():
|
||||
init_contract_data()
|
||||
return _contract_path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def init_contract_data():
|
||||
global initialized, _contract_path
|
||||
if initialized:
|
||||
return
|
||||
subpath = '' if config.contract_version is None else f'/deployment/{config.contract_version}'
|
||||
_contract_path = f'../contract{subpath}'
|
||||
|
||||
# finds all .json files in the out path and sets _contract_data with their pathname
|
||||
for _file in glob.glob(f'{_contract_path}/out/**/*.sol/*.json', recursive=True):
|
||||
if os.path.isfile(_file):
|
||||
_contract_data[os.path.basename(_file)[:-5]] = _file
|
||||
initialized = True
|
||||
log.info(f'Configured contracts from {_contract_path}')
|
||||
# finds all .sol files and sets _contract_data with their pathname
|
||||
for _file in glob.glob('../contract/out/**/*.sol/*.json', recursive=True):
|
||||
if os.path.isfile(_file):
|
||||
_contract_data[os.path.basename(_file)[:-5]] = _file
|
||||
|
||||
|
||||
def get_abi(name):
|
||||
@@ -48,7 +29,6 @@ def get_abi(name):
|
||||
|
||||
|
||||
def get_contract_data(name):
|
||||
init_contract_data()
|
||||
try:
|
||||
return {'abi':abis[name]}
|
||||
except KeyError:
|
||||
@@ -63,10 +43,9 @@ def get_contract_data(name):
|
||||
|
||||
|
||||
def get_deployment_address(deployment_name, contract_name, *, chain_id=None):
|
||||
init_contract_data()
|
||||
if chain_id is None:
|
||||
chain_id = current_chain.get().id
|
||||
with open(f'{_contract_path}/broadcast/{deployment_name}.sol/{chain_id}/run-latest.json', 'rt') as file:
|
||||
with open(f'../contract/broadcast/{deployment_name}.sol/{chain_id}/run-latest.json', 'rt') as file:
|
||||
data = json.load(file)
|
||||
for tx in data.get('transactions',[]):
|
||||
if tx.get('contractName') == contract_name:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
abis = {
|
||||
# Special ERC20 definition where symbol() returns a bytes32 instead of a string
|
||||
# ERC20 where symbol() returns a bytes32 instead of a string
|
||||
'ERC20.sb': '''[{"type":"function","name":"symbol","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"name","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"}]'''
|
||||
# 'WMATIC': '''[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]''',
|
||||
}
|
||||
|
||||
@@ -33,7 +33,8 @@ class ContractTransaction:
|
||||
async def wait(self) -> TxReceipt:
|
||||
if self.receipt is None:
|
||||
self.receipt = await current_w3.get().eth.wait_for_transaction_receipt(self.id)
|
||||
self.account.release()
|
||||
if self.account is not None:
|
||||
self.account.release()
|
||||
return self.receipt
|
||||
|
||||
async def sign(self, account: Account):
|
||||
@@ -60,14 +61,14 @@ class DeployTransaction (ContractTransaction):
|
||||
|
||||
|
||||
def call_wrapper(addr, name, func):
|
||||
async def f(*args, block_identifier=None, kwargs=None):
|
||||
async def f(*args, block_identifier=None, **kwargs):
|
||||
if block_identifier is None:
|
||||
try:
|
||||
block_identifier = current_block.get().height
|
||||
except (LookupError, AttributeError):
|
||||
block_identifier = 'latest'
|
||||
try:
|
||||
return await func(*args).call(block_identifier=block_identifier, **(kwargs or {}))
|
||||
return await func(*args).call(block_identifier=block_identifier, **kwargs)
|
||||
except Web3Exception as e:
|
||||
e.args += addr, name
|
||||
raise e
|
||||
@@ -75,8 +76,8 @@ def call_wrapper(addr, name, func):
|
||||
|
||||
|
||||
def transact_wrapper(addr, name, func):
|
||||
async def f(*args, kwargs=None):
|
||||
tx = await func(*args).build_transaction(kwargs or {})
|
||||
async def f(*args, **kwargs):
|
||||
tx = await func(*args).build_transaction(kwargs)
|
||||
ct = ContractTransaction(tx)
|
||||
account = await Account.acquire()
|
||||
if account is None:
|
||||
@@ -96,8 +97,8 @@ def transact_wrapper(addr, name, func):
|
||||
|
||||
|
||||
def build_wrapper(_addr, _name, func):
|
||||
async def f(*args, kwargs=None):
|
||||
tx = await func(*args).build_transaction(kwargs or {})
|
||||
async def f(*args, **kwargs):
|
||||
tx = await func(*args).build_transaction(kwargs)
|
||||
return ContractTransaction(tx)
|
||||
return f
|
||||
|
||||
@@ -153,10 +154,14 @@ class ContractProxy:
|
||||
def __getattr__(self, item):
|
||||
if item == 'constructor':
|
||||
found = self.contract.constructor
|
||||
elif item in self.contract.functions:
|
||||
found = self.contract.functions[item]
|
||||
else:
|
||||
raise AttributeError(item)
|
||||
funcs = self.contract.functions
|
||||
# In web3.py v6+, contract functions are exposed as attributes, not via __getitem__.
|
||||
# Using getattr ensures we obtain the callable factory for the function; indexing may return None.
|
||||
# Additionally, guard against unexpected None to fail fast with a clear error.
|
||||
found = getattr(funcs, item, None)
|
||||
if not callable(found):
|
||||
raise AttributeError(f"Function '{item}' not found on contract {self._interface_name} at {self.address}")
|
||||
return self._wrapper(self.address, item, found)
|
||||
|
||||
def __repr__(self):
|
||||
|
||||
@@ -6,37 +6,26 @@ from eth_utils import keccak, to_bytes, to_checksum_address
|
||||
from typing_extensions import Optional
|
||||
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.contract import ContractProxy, get_contract_path
|
||||
from dexorder.contract import ContractProxy
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
version = None
|
||||
chain_info = None
|
||||
|
||||
_factory = {}
|
||||
_dexorder = {}
|
||||
_vault_init_code_hash = {}
|
||||
_initialized = False
|
||||
|
||||
def _ensure_init():
|
||||
global version, chain_info
|
||||
with open(f'{get_contract_path()}/version.json') as version_file:
|
||||
version = json.load(version_file)
|
||||
log.info(f'Version: {version}')
|
||||
chain_info = version['chainInfo']
|
||||
for _chain_id, info in chain_info.items():
|
||||
_chain_id = int(_chain_id)
|
||||
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
|
||||
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'DexorderGMX')
|
||||
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
global _initialized
|
||||
if not _initialized:
|
||||
_ensure_init()
|
||||
_initialized = True
|
||||
raise AttributeError()
|
||||
with open('../contract/version.json') as version_file:
|
||||
version = json.load(version_file)
|
||||
log.info(f'Version: {version}')
|
||||
|
||||
chain_info = version['chainInfo']
|
||||
|
||||
for _chain_id, info in chain_info.items():
|
||||
_chain_id = int(_chain_id)
|
||||
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
|
||||
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'Dexorder')
|
||||
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
|
||||
|
||||
def get_by_chain(d):
|
||||
return d[current_chain.get().id]
|
||||
@@ -51,12 +40,11 @@ def get_vault_init_code_hash() -> bytes:
|
||||
return get_by_chain(_vault_init_code_hash)
|
||||
|
||||
def get_mockenv() -> Optional[ContractProxy]:
|
||||
addr = globals()['chain_info'].get(str(current_chain.get().id), {}).get('mockenv')
|
||||
addr = chain_info.get(str(current_chain.get().id),{}).get('mockenv')
|
||||
return ContractProxy(addr, 'MockEnv') if addr is not None else None
|
||||
|
||||
|
||||
def get_mirrorenv() -> Optional[ContractProxy]:
|
||||
addr = globals()['chain_info'].get(str(current_chain.get().id), {}).get('mirrorenv')
|
||||
addr = chain_info.get(str(current_chain.get().id),{}).get('mirrorenv')
|
||||
return ContractProxy(addr, 'MirrorEnv') if addr is not None else None
|
||||
|
||||
def vault_address(owner, num):
|
||||
|
||||
@@ -3,7 +3,6 @@ from typing import TypedDict, Optional
|
||||
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from dexorder.base import OldPoolDict
|
||||
from dexorder.base.orderlib import Exchange
|
||||
from dexorder.database.column import Address, Blockchain
|
||||
from dexorder.database.model import Base
|
||||
@@ -21,6 +20,17 @@ class PoolDict (TypedDict):
|
||||
x: Optional[dict]
|
||||
|
||||
|
||||
class OldPoolDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
exchange: int
|
||||
base: str
|
||||
quote: str
|
||||
fee: int
|
||||
decimals: int
|
||||
|
||||
|
||||
class Pool (Base):
|
||||
__tablename__ = 'pool'
|
||||
|
||||
|
||||
@@ -1,15 +1,37 @@
|
||||
import logging
|
||||
from typing import TypedDict, Optional, NotRequired
|
||||
|
||||
from sqlalchemy import Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from dexorder.base import OldTokenDict
|
||||
from dexorder.database.column import Address, Blockchain, Uint8
|
||||
from dexorder.database.model import Base
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenDict (TypedDict):
|
||||
a: str
|
||||
n: str
|
||||
s: str
|
||||
d: int
|
||||
w: Optional[bool] # approved ("w"hitelisted)
|
||||
x: NotRequired[dict] # extra data
|
||||
|
||||
|
||||
# OldTokenDict is the primary dict we use in-memory, with basic JSON-able types
|
||||
|
||||
class OldTokenDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
name: str
|
||||
symbol: str
|
||||
decimals: int
|
||||
approved: bool # whether this token is in the whitelist or not
|
||||
x: NotRequired[dict] # extra data
|
||||
|
||||
|
||||
# the database object is primarily write-only so we are able to index queries for pools-by-token from the nodejs server
|
||||
|
||||
class Token (Base):
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from eth_utils import keccak
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import db, metric, current_w3, timestamp
|
||||
from dexorder.accounting import accounting_fill, accounting_placement
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.order import TrancheKey, OrderKey
|
||||
from dexorder.base.orderlib import SwapOrderState, Exchange, GMXOrder
|
||||
from dexorder.base.orderlib import SwapOrderState
|
||||
from dexorder.blocks import get_block_timestamp
|
||||
from dexorder.blockstate import current_blockstate
|
||||
from dexorder.contract.dexorder import VaultContract, get_factory_contract
|
||||
@@ -18,8 +17,7 @@ from dexorder.ohlc import ohlcs
|
||||
from dexorder.order.orderstate import Order
|
||||
from dexorder.order.triggers import (OrderTriggers, activate_order, update_balance_triggers, start_trigger_updates,
|
||||
update_price_triggers, TimeTrigger, PriceLineTrigger)
|
||||
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data, get_pool
|
||||
from dexorder.progressor import BlockProgressor
|
||||
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data
|
||||
from dexorder.util import hexstr
|
||||
from dexorder.vault_blockdata import vault_owners, adjust_balance, verify_vault, publish_vaults
|
||||
|
||||
@@ -35,14 +33,6 @@ def init():
|
||||
start_trigger_updates()
|
||||
|
||||
|
||||
def wire_dexorder_debug(runner: BlockProgressor):
|
||||
runner.add_event_trigger(handle_dexorderdebug, None, {"topics":[keccak(text='DexorderDebug(string)')]})
|
||||
|
||||
def handle_dexorderdebug(events: list):
|
||||
for event in events:
|
||||
print(f'DexorderDebug {event}')
|
||||
|
||||
|
||||
async def handle_order_placed(event: EventData):
|
||||
# event DexorderSwapPlaced (uint64 startOrderIndex, uint8 numOrders, uint);
|
||||
addr = event['address']
|
||||
@@ -67,9 +57,6 @@ async def handle_order_placed(event: EventData):
|
||||
obj = await contract.swapOrderStatus(index)
|
||||
log.debug(f'raw order status {obj}')
|
||||
order = Order.create(addr, index, event['transactionHash'], obj)
|
||||
if order.order.route.exchange == Exchange.GMX:
|
||||
gmxStatus = await contract.gmxOrderStatus(index)
|
||||
order.order.gmx = GMXOrder.load(gmxStatus[0])
|
||||
await activate_order(order)
|
||||
log.debug(f'new order {order.key} {await order.pprint()}')
|
||||
|
||||
@@ -94,10 +81,9 @@ async def handle_swap_filled(event: EventData):
|
||||
except KeyError:
|
||||
log.warning(f'DexorderSwapFilled IGNORED due to missing order {vault} {order_index}')
|
||||
return
|
||||
usd_value = await accounting_fill(event, order.order.tokenOut)
|
||||
# from here down is almost the same as a section of handle_gmxorderexecuted()
|
||||
if usd_value is not None:
|
||||
metric.volume.inc(float(usd_value))
|
||||
value = await accounting_fill(event, order.order.tokenOut)
|
||||
if value is not None:
|
||||
metric.volume.inc(float(value))
|
||||
order.status.trancheStatus[tranche_index].activationTime = next_execution_time # update rate limit
|
||||
try:
|
||||
triggers = OrderTriggers.instances[order.key]
|
||||
@@ -172,24 +158,10 @@ async def handle_uniswap_swap(swap: EventData):
|
||||
return
|
||||
pool, time, price = data
|
||||
addr = pool['address']
|
||||
await update_pool_price(addr, time, price, pool['decimals'])
|
||||
# log.debug(f'pool {addr} {minutely(time)} {price}')
|
||||
|
||||
|
||||
async def update_pool_price(addr, time, price, decimals):
|
||||
"""
|
||||
Price should be an adjusted price with decimals, not the raw price from the pool. The decimals are used to
|
||||
convert the price back to blockchain format for the triggers.
|
||||
"""
|
||||
pool_prices[addr] = price # this will update new_pool_prices if necessary
|
||||
pool_prices[addr] = price
|
||||
await ohlcs.update_all(addr, time, price)
|
||||
update_price_triggers(addr, price, decimals)
|
||||
|
||||
|
||||
async def activate_new_price_triggers():
|
||||
for addr, price in new_pool_prices.items():
|
||||
pool = await get_pool(addr)
|
||||
update_price_triggers(addr, price, pool['decimals'])
|
||||
await update_price_triggers(pool, price)
|
||||
# log.debug(f'pool {addr} {minutely(time)} {price}')
|
||||
|
||||
|
||||
async def handle_vault_created(created: EventData):
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncio
|
||||
import logging
|
||||
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.contract.dexorder import get_fee_manager_contract
|
||||
from dexorder.contract.dexorder import get_factory_contract, get_fee_manager_contract
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -242,10 +242,6 @@ class OHLCFileSeries:
|
||||
self.dirty_files = set()
|
||||
self.quote: Optional[tuple[datetime,dec]] = None
|
||||
|
||||
@property
|
||||
def exists(self) -> bool:
|
||||
return self.quote_file is not None or os.path.exists(self.quote_filename)
|
||||
|
||||
|
||||
@property
|
||||
def quote_filename(self):
|
||||
@@ -280,16 +276,6 @@ class OHLCFileSeries:
|
||||
self.dirty_files.add(file)
|
||||
|
||||
|
||||
# noinspection PyShadowingBuiltins
|
||||
def update_ohlc(self, period: timedelta, time: datetime, open: dec, high: dec, low: dec, close: dec):
|
||||
file = OHLCFile.get(self.base_dir, OHLCFilePath(self.symbol, period, time))
|
||||
file.update(time, open)
|
||||
file.update(time, high)
|
||||
file.update(time, low)
|
||||
file.update(time, close)
|
||||
self.dirty_files.add(file)
|
||||
|
||||
|
||||
def _load(self, time):
|
||||
#
|
||||
# load quote file
|
||||
@@ -373,25 +359,14 @@ class FinalOHLCRepository:
|
||||
"""
|
||||
def __init__(self):
|
||||
assert config.ohlc_dir
|
||||
self.dirty_series: set[OHLCFileSeries] = set()
|
||||
self.dirty_series = set()
|
||||
|
||||
def update(self, symbol: str, time: datetime, price: Optional[dec]):
|
||||
series = self.get_series(symbol)
|
||||
series.update(time, price)
|
||||
self.dirty_series.add(series)
|
||||
|
||||
# noinspection PyShadowingBuiltins
|
||||
def update_ohlc(self, symbol: str, period: timedelta, time: datetime, open: dec, high: dec, low: dec, close: dec):
|
||||
series = self.get_series(symbol)
|
||||
series.update_ohlc(period, time, open, high, low, close)
|
||||
self.dirty_series.add(series)
|
||||
|
||||
@staticmethod
|
||||
def get_series(symbol):
|
||||
chain_id = current_chain.get().id
|
||||
base_dir = os.path.join(config.ohlc_dir, str(chain_id))
|
||||
series = OHLCFileSeries.get(base_dir, symbol)
|
||||
return series
|
||||
series.update(time, price)
|
||||
self.dirty_series.add(series)
|
||||
|
||||
def flush(self) -> None:
|
||||
for series in self.dirty_series:
|
||||
@@ -403,6 +378,3 @@ class FinalOHLCRepository:
|
||||
closing.file.close()
|
||||
# noinspection PyProtectedMember
|
||||
OHLCFile._closing.clear()
|
||||
|
||||
def has_symbol(self, symbol: str):
|
||||
return self.get_series(symbol).exists
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from ._base import gmx_prices, gmx_tk_in_flight, tk_gmx_in_flight
|
||||
from ._chaininfo import gmx_chain_info
|
||||
from ._handle import gmx_wire_runner_early, gmx_wire_runner_late
|
||||
from ._metadata import *
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
from eth_utils import keccak
|
||||
|
||||
from dexorder.util import hexbytes, hexstr
|
||||
from dexorder.util.abiencode import abi_decoder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def no_ws(s):
|
||||
return re.sub(r"\s+", "", s)
|
||||
|
||||
EventLogDataType = '''
|
||||
(((string,address)[],(string,address[])[]),
|
||||
((string,uint256)[],(string,uint256[])[]),
|
||||
((string,int256)[], (string,int256[])[] ),
|
||||
((string,bool)[], (string,bool[])[] ),
|
||||
((string,bytes32)[],(string,bytes32[])[]),
|
||||
((string,bytes)[], (string,bytes[])[] ),
|
||||
((string,string)[], (string,string[])[] )
|
||||
)'''
|
||||
|
||||
EventLogType = f'EventLog( address, string, string, {EventLogDataType} )'
|
||||
EventLog1Type = f'EventLog1( address, string, string, bytes32, {EventLogDataType} )'
|
||||
EventLog2Type = f'EventLog2( address, string, string, bytes32, bytes32, {EventLogDataType} )'
|
||||
|
||||
EventLogTopic = hexstr(keccak(text=no_ws(EventLogType)))
|
||||
EventLog1Topic = hexstr(keccak(text=no_ws(EventLog1Type)).hex())
|
||||
EventLog2Topic = hexstr(keccak(text=no_ws(EventLog2Type)).hex())
|
||||
|
||||
|
||||
def topic_hash(signature):
|
||||
return hexstr(keccak(text=no_ws(signature)))
|
||||
|
||||
|
||||
def parse_event_log_data(event_log):
|
||||
event_log_data = event_log['data']
|
||||
if type(event_log_data) is str:
|
||||
event_log_data = hexbytes(event_log_data)
|
||||
sender, event_name, event_log_data = abi_decoder.decode(('address', 'string', no_ws(EventLogDataType),), event_log_data)
|
||||
|
||||
result = {'sender': sender, 'event': event_name, 'tx': hexstr(event_log['transactionHash'])}
|
||||
for items, array_items in event_log_data:
|
||||
for k, v in items:
|
||||
result[k] = v
|
||||
for k, v in array_items:
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import NamedTuple
|
||||
|
||||
import requests
|
||||
from eth_utils import to_checksum_address
|
||||
|
||||
from ._chaininfo import GMX_API_BASE_URLS
|
||||
from .. import dec
|
||||
from ..base.chain import current_chain
|
||||
from ..base.order import TrancheKey
|
||||
from ..blockstate import BlockDict
|
||||
from ..util import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GMXPosition:
|
||||
# compound key fields
|
||||
market_token: str
|
||||
collateral_token: str
|
||||
is_long: bool
|
||||
|
||||
# non-key attrs
|
||||
size: dec = dec(0)
|
||||
|
||||
class Key (NamedTuple):
|
||||
market_token: str
|
||||
collateral_token: str
|
||||
is_long: bool
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.market_token}|{self.collateral_token}|{"L" if self.is_long else "S"}'
|
||||
|
||||
@staticmethod
|
||||
def str2key(keystring: str):
|
||||
market_token, collateral_token, is_long = keystring.split('|')
|
||||
return GMXPosition.Key(market_token.lower(), collateral_token.lower(), is_long == 'L')
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return GMXPosition.Key(self.market_token, self.collateral_token, self.is_long)
|
||||
|
||||
@staticmethod
|
||||
def load(d: dict):
|
||||
return GMXPosition(to_checksum_address(d['m']), to_checksum_address(d['c']), d['l'], dec(d['s']))
|
||||
|
||||
|
||||
def dump(self):
|
||||
return {
|
||||
'm': self.market_token,
|
||||
'c': self.collateral_token,
|
||||
'l': self.is_long,
|
||||
's': str(self.size),
|
||||
}
|
||||
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.key)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.key == other.key
|
||||
|
||||
|
||||
class GMXOrderType (Enum):
|
||||
MarketSwap = 0
|
||||
LimitSwap = 1
|
||||
MarketIncrease = 2
|
||||
LimitIncrease = 3
|
||||
MarketDecrease = 4
|
||||
LimitDecrease = 5
|
||||
StopLossDecrease = 6
|
||||
Liquidation = 7
|
||||
StopIncrease = 8
|
||||
|
||||
|
||||
GMX_API_BASE_URL = None
|
||||
|
||||
def gmx_api(method, **params):
|
||||
global GMX_API_BASE_URL
|
||||
if GMX_API_BASE_URL is None:
|
||||
GMX_API_BASE_URL = GMX_API_BASE_URLS[current_chain.get().id]
|
||||
return requests.get(GMX_API_BASE_URL+method, params=params, timeout=5).json()
|
||||
|
||||
|
||||
gmx_markets_by_index_token: BlockDict[str, list[str]] = BlockDict('gmx_t_m', redis=True, db=True, value2str=lambda mks: json.dumps(mks), str2value=lambda s: json.loads(s))
|
||||
gmx_prices: BlockDict[str, dec] = BlockDict('gmx_p', redis=True, str2value=dec)
|
||||
# open positions by vault
|
||||
gmx_positions: BlockDict[str, list[GMXPosition]] = BlockDict('gmx_pos', redis=True, db=True,
|
||||
value2str=lambda positions: json.dumps([p.dump() for p in positions]),
|
||||
str2value=lambda positions: [GMXPosition.load(p) for p in json.loads(positions)] )
|
||||
|
||||
# dual mappings of our TrancheKey to a GMX Order key exist only when a GMX order has been placed but not yet handled
|
||||
gmx_tk_in_flight: BlockDict[str, TrancheKey] = BlockDict('gmx_tif', db=True, str2value=TrancheKey.str2key)
|
||||
tk_gmx_in_flight: BlockDict[TrancheKey, str] = BlockDict('tk2gmx', db=True, str2key=TrancheKey.str2key)
|
||||
@@ -1,16 +0,0 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
gmx_chain_info = {
|
||||
42161: {
|
||||
'EventEmitter': '0xC8ee91A54287DB53897056e12D9819156D3822Fb',
|
||||
'DataStore': '0xFD70de6b91282D8017aA4E741e9Ae325CAb992d8',
|
||||
'Reader': '0x0537C767cDAC0726c76Bb89e92904fe28fd02fE1',
|
||||
}
|
||||
}
|
||||
|
||||
GMX_API_BASE_URLS={
|
||||
31337: 'https://arbitrum-api.gmxinfra.io/',
|
||||
42161: 'https://arbitrum-api.gmxinfra.io/',
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
import logging
|
||||
from functools import cache
|
||||
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.gmx._datastore import DataStore
|
||||
from dexorder.util import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_gmx_contract_info(name: str):
|
||||
with open(f'./resource/abi/42161/gmx/{name}.json') as file:
|
||||
info = json.load(file)
|
||||
return info
|
||||
|
||||
|
||||
@cache
|
||||
def get_gmx_contract(name: str):
|
||||
info = get_gmx_contract_info(name)
|
||||
if name == 'DataStore':
|
||||
clazz = DataStore
|
||||
else:
|
||||
clazz = ContractProxy
|
||||
return clazz(info['address'], abi=info['abi'])
|
||||
@@ -1,28 +0,0 @@
|
||||
import logging
|
||||
|
||||
from eth_utils import keccak
|
||||
|
||||
from dexorder import dec
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.util.abiencode import abi_encoder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def combo_key(key_str, arg, arg_type='address'):
|
||||
key_bytes = keccak(abi_encoder.encode(['string'], [key_str]))
|
||||
return keccak(abi_encoder.encode(['bytes32', arg_type], [key_bytes, arg]))
|
||||
|
||||
IS_MARKET_DISABLED_KEY = 'IS_MARKET_DISABLED'
|
||||
MIN_COLLATERAL_FACTOR_KEY = 'MIN_COLLATERAL_FACTOR'
|
||||
|
||||
|
||||
class DataStore (ContractProxy):
|
||||
|
||||
async def is_market_disabled(self, market_addr: str):
|
||||
return await self.getBool(combo_key(IS_MARKET_DISABLED_KEY, market_addr))
|
||||
|
||||
async def min_collateral_factor(self, market_addr: str):
|
||||
result = await self.getUint(combo_key(MIN_COLLATERAL_FACTOR_KEY, market_addr))
|
||||
if result == 0:
|
||||
log.warning(f'no min collateral factor for market {market_addr}')
|
||||
return 2 * dec(result) / dec(1e30)
|
||||
@@ -1,292 +0,0 @@
|
||||
import logging
|
||||
|
||||
from dexorder.util.abiencode import abi_decoder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
gmx_error_map = {
|
||||
'b244a107': 'ActionAlreadySignalled()',
|
||||
'94fdaea2': 'ActionNotSignalled()',
|
||||
'3285dc57': 'AdlNotEnabled()',
|
||||
'd06ed8be': 'AdlNotRequired(int256,uint256)',
|
||||
'70657e04': 'ArrayOutOfBoundsBytes(bytes[],uint256,string)',
|
||||
'9d18e63b': 'ArrayOutOfBoundsUint256(uint256[],uint256,string)',
|
||||
'60c5e472': 'AvailableFeeAmountIsZero(address,address,uint256)',
|
||||
'11aeaf6b': 'BlockNumbersNotSorted(uint256,uint256)',
|
||||
'ec775484': 'BuybackAndFeeTokenAreEqual(address,address)',
|
||||
'd6b52b60': 'ChainlinkPriceFeedNotUpdated(address,uint256,uint256)',
|
||||
'ec6d89c8': 'CollateralAlreadyClaimed(uint256,uint256)',
|
||||
'bdec9c0d': 'CompactedArrayOutOfBounds(uint256[],uint256,uint256,string)',
|
||||
'5ebb87c9': 'ConfigValueExceedsAllowedRange(bytes32,uint256)',
|
||||
'413f9a54': 'DataStreamIdAlreadyExistsForToken(address)',
|
||||
'83f2ba20': 'DeadlinePassed(uint256,uint256)',
|
||||
'43e30ca8': 'DepositNotFound(bytes32)',
|
||||
'dd70e0c9': 'DisabledFeature(bytes32)',
|
||||
'09f8c937': 'DisabledMarket(address)',
|
||||
'd4064737': 'DuplicatedIndex(uint256,string)',
|
||||
'91c78b78': 'DuplicatedMarketInSwapPath(address)',
|
||||
'dd7016a2': 'EmptyAccount()',
|
||||
'e474a425': 'EmptyAddressInMarketTokenBalanceValidation(address,address)',
|
||||
'52dfddfd': 'EmptyChainlinkPaymentToken()',
|
||||
'8db88ccf': 'EmptyChainlinkPriceFeed(address)',
|
||||
'b86fffef': 'EmptyChainlinkPriceFeedMultiplier(address)',
|
||||
'616daf1f': 'EmptyClaimFeesMarket()',
|
||||
'62e402cc': 'EmptyDataStreamFeedId(address)',
|
||||
'088405c6': 'EmptyDataStreamMultiplier(address)',
|
||||
'95b66fe9': 'EmptyDeposit()',
|
||||
'01af8c24': 'EmptyDepositAmounts()',
|
||||
'd1c3d5bd': 'EmptyDepositAmountsAfterSwap()',
|
||||
'a14e1b3d': 'EmptyGlv(address)',
|
||||
'bd192971': 'EmptyGlvDeposit()',
|
||||
'03251ce6': 'EmptyGlvDepositAmounts()',
|
||||
'94409f52': 'EmptyGlvMarketAmount()',
|
||||
'93856b1a': 'EmptyGlvTokenSupply()',
|
||||
'0e5be78f': 'EmptyGlvWithdrawal()',
|
||||
'402a866f': 'EmptyGlvWithdrawalAmount()',
|
||||
'e9b78bd4': 'EmptyHoldingAddress()',
|
||||
'05fbc1ae': 'EmptyMarket()',
|
||||
'eb1947dd': 'EmptyMarketPrice(address)',
|
||||
'2ee3d69c': 'EmptyMarketTokenSupply()',
|
||||
'16307797': 'EmptyOrder()',
|
||||
'4dfbbff3': 'EmptyPosition()',
|
||||
'cd64a025': 'EmptyPrimaryPrice(address)',
|
||||
'd551823d': 'EmptyReceiver()',
|
||||
'6af5e96f': 'EmptyShift()',
|
||||
'60d5e84a': 'EmptyShiftAmount()',
|
||||
'3df42531': 'EmptySizeDeltaInTokens()',
|
||||
'9fc297fa': 'EmptyTokenTranferGasLimit(address)',
|
||||
'9231be69': 'EmptyValidatedPrices()',
|
||||
'6d4bb5e9': 'EmptyWithdrawal()',
|
||||
'01d6f7b1': 'EmptyWithdrawalAmount()',
|
||||
'4e48dcda': 'EndOfOracleSimulation()',
|
||||
'59afd6c6': 'ExternalCallFailed(bytes)',
|
||||
'2df6dc23': 'FeeBatchNotFound(bytes32)',
|
||||
'e44992d0': 'GlvAlreadyExists(bytes32,address)',
|
||||
'057058b6': 'GlvDepositNotFound(bytes32)',
|
||||
'30b8a225': 'GlvDisabledMarket(address,address)',
|
||||
'8da31161': 'GlvEnabledMarket(address,address)',
|
||||
'c8b70b2c': 'GlvInsufficientMarketTokenBalance(address,address,uint256,uint256)',
|
||||
'80ad6831': 'GlvInvalidLongToken(address,address,address)',
|
||||
'9673a10b': 'GlvInvalidShortToken(address,address,address)',
|
||||
'3aa9fc91': 'GlvMarketAlreadyExists(address,address)',
|
||||
'af7d3787': 'GlvMaxMarketCountExceeded(address,uint256)',
|
||||
'd859f947': 'GlvMaxMarketTokenBalanceAmountExceeded(address,address,uint256,uint256)',
|
||||
'66560e7d': 'GlvMaxMarketTokenBalanceUsdExceeded(address,address,uint256,uint256)',
|
||||
'155712e1': 'GlvNameTooLong()',
|
||||
'2e3780e5': 'GlvNegativeMarketPoolValue(address,address)',
|
||||
'3afc5e65': 'GlvNonZeroMarketBalance(address,address)',
|
||||
'6c00ed8a': 'GlvNotFound(address)',
|
||||
'232d7165': 'GlvShiftIntervalNotYetPassed(uint256,uint256,uint256)',
|
||||
'c906a05a': 'GlvShiftMaxPriceImpactExceeded(uint256,uint256)',
|
||||
'de45e162': 'GlvShiftNotFound(bytes32)',
|
||||
'9cb4f5c5': 'GlvSymbolTooLong()',
|
||||
'07e9c4d5': 'GlvUnsupportedMarket(address,address)',
|
||||
'20dcb068': 'GlvWithdrawalNotFound(bytes32)',
|
||||
'd90abe06': 'GmEmptySigner(uint256)',
|
||||
'ee6e8ecf': 'GmInvalidBlockNumber(uint256,uint256)',
|
||||
'b8aaa455': 'GmInvalidMinMaxBlockNumber(uint256,uint256)',
|
||||
'c7b44b28': 'GmMaxOracleSigners(uint256,uint256)',
|
||||
'0f885e52': 'GmMaxPricesNotSorted(address,uint256,uint256)',
|
||||
'5b1250e7': 'GmMaxSignerIndex(uint256,uint256)',
|
||||
'dc2a99e7': 'GmMinOracleSigners(uint256,uint256)',
|
||||
'cc7bbd5b': 'GmMinPricesNotSorted(address,uint256,uint256)',
|
||||
'a581f648': 'InsufficientBuybackOutputAmount(address,address,uint256,uint256)',
|
||||
'74cc815b': 'InsufficientCollateralAmount(uint256,int256)',
|
||||
'2159b161': 'InsufficientCollateralUsd(int256)',
|
||||
'5dac504d': 'InsufficientExecutionFee(uint256,uint256)',
|
||||
'bb416f93': 'InsufficientExecutionGas(uint256,uint256,uint256)',
|
||||
'79293964': 'InsufficientExecutionGasForErrorHandling(uint256,uint256)',
|
||||
'19d50093': 'InsufficientFundsToPayForCosts(uint256,string)',
|
||||
'd3dacaac': 'InsufficientGasForCancellation(uint256,uint256)',
|
||||
'79a2abad': 'InsufficientGasLeftForCallback(uint256,uint256)',
|
||||
'3083b9e5': 'InsufficientHandleExecutionErrorGas(uint256,uint256)',
|
||||
'82c8828a': 'InsufficientMarketTokens(uint256,uint256)',
|
||||
'd28d3eb5': 'InsufficientOutputAmount(uint256,uint256)',
|
||||
'23090a31': 'InsufficientPoolAmount(uint256,uint256)',
|
||||
'9cd76295': 'InsufficientRelayFee(uint256,uint256)',
|
||||
'315276c9': 'InsufficientReserve(uint256,uint256)',
|
||||
'b98c6179': 'InsufficientReserveForOpenInterest(uint256,uint256)',
|
||||
'a7aebadc': 'InsufficientSwapOutputAmount(uint256,uint256)',
|
||||
'041b3483': 'InsufficientWntAmount(uint256,uint256)',
|
||||
'3a78cd7e': 'InsufficientWntAmountForExecutionFee(uint256,uint256)',
|
||||
'1d4fc3c0': 'InvalidAdl(int256,int256)',
|
||||
'8ac146e6': 'InvalidAmountInForFeeBatch(uint256,uint256)',
|
||||
'eb19d3f5': 'InvalidBaseKey(bytes32)',
|
||||
'25e5dc07': 'InvalidBlockRangeSet(uint256,uint256)',
|
||||
'752fdb63': 'InvalidBuybackToken(address)',
|
||||
'89736584': 'InvalidCancellationReceiverForSubaccountOrder(address,address)',
|
||||
'5b3043dd': 'InvalidClaimAffiliateRewardsInput(uint256,uint256)',
|
||||
'42c0d1f2': 'InvalidClaimCollateralInput(uint256,uint256,uint256)',
|
||||
'7363cfa5': 'InvalidClaimFundingFeesInput(uint256,uint256)',
|
||||
'74cee48d': 'InvalidClaimUiFeesInput(uint256,uint256)',
|
||||
'6c2738d3': 'InvalidClaimableFactor(uint256)',
|
||||
'839c693e': 'InvalidCollateralTokenForMarket(address,address)',
|
||||
'4a591309': 'InvalidContributorToken(address)',
|
||||
'8d56bea1': 'InvalidDataStreamBidAsk(address,int192,int192)',
|
||||
'a4949e25': 'InvalidDataStreamFeedId(address,bytes32,bytes32)',
|
||||
'2a74194d': 'InvalidDataStreamPrices(address,int192,int192)',
|
||||
'6e0c29ed': 'InvalidDataStreamSpreadReductionFactor(address,uint256)',
|
||||
'9fbe2cbc': 'InvalidDecreaseOrderSize(uint256,uint256)',
|
||||
'751951f9': 'InvalidDecreasePositionSwapType(uint256)',
|
||||
'9b867f31': 'InvalidExecutionFee(uint256,uint256,uint256)',
|
||||
'99e26b44': 'InvalidExecutionFeeForMigration(uint256,uint256)',
|
||||
'831e9f11': 'InvalidExternalCallInput(uint256,uint256)',
|
||||
'be55c895': 'InvalidExternalCallTarget(address)',
|
||||
'e15f2701': 'InvalidExternalReceiversInput(uint256,uint256)',
|
||||
'fa804399': 'InvalidFeeBatchTokenIndex(uint256,uint256)',
|
||||
'cb9339d5': 'InvalidFeeReceiver(address)',
|
||||
'be6514b6': 'InvalidFeedPrice(address,int256)',
|
||||
'fc90fcc3': 'InvalidGlpAmount(uint256,uint256)',
|
||||
'bf16cb0a': 'InvalidGlvDepositInitialLongToken(address)',
|
||||
'df0f9a23': 'InvalidGlvDepositInitialShortToken(address)',
|
||||
'055ab8b9': 'InvalidGlvDepositSwapPath(uint256,uint256)',
|
||||
'993417d5': 'InvalidGmMedianMinMaxPrice(uint256,uint256)',
|
||||
'a54d4339': 'InvalidGmOraclePrice(address)',
|
||||
'8d648a7f': 'InvalidGmSignature(address,address)',
|
||||
'b21c863e': 'InvalidGmSignerMinMaxPrice(uint256,uint256)',
|
||||
'e5feddc0': 'InvalidKeeperForFrozenOrder(address)',
|
||||
'33a1ea6b': 'InvalidMarketTokenBalance(address,address,uint256,uint256)',
|
||||
'9dd026db': 'InvalidMarketTokenBalanceForClaimableFunding(address,address,uint256,uint256)',
|
||||
'808c464f': 'InvalidMarketTokenBalanceForCollateralAmount(address,address,uint256,uint256)',
|
||||
'c08bb8a0': 'InvalidMinGlvTokensForFirstGlvDeposit(uint256,uint256)',
|
||||
'3f9c06ab': 'InvalidMinMarketTokensForFirstDeposit(uint256,uint256)',
|
||||
'1608d41a': 'InvalidMinMaxForPrice(address,uint256,uint256)',
|
||||
'e71a51be': 'InvalidNativeTokenSender(address)',
|
||||
'05d102a2': 'InvalidOracleProvider(address)',
|
||||
'68b49e6c': 'InvalidOracleProviderForToken(address,address)',
|
||||
'f9996e9f': 'InvalidOracleSetPricesDataParam(uint256,uint256)',
|
||||
'dd51dc73': 'InvalidOracleSetPricesProvidersParam(uint256,uint256)',
|
||||
'c1b14c91': 'InvalidOracleSigner(address)',
|
||||
'0481a15a': 'InvalidOrderPrices(uint256,uint256,uint256,uint256)',
|
||||
'253c8c02': 'InvalidOutputToken(address,address)',
|
||||
'3c0ac199': 'InvalidPermitSpender(address,address)',
|
||||
'adaa688d': 'InvalidPoolValueForDeposit(int256)',
|
||||
'90a6af3b': 'InvalidPoolValueForWithdrawal(int256)',
|
||||
'182e30e3': 'InvalidPositionMarket(address)',
|
||||
'bff65b3f': 'InvalidPositionSizeValues(uint256,uint256)',
|
||||
'663de023': 'InvalidPrimaryPricesForSimulation(uint256,uint256)',
|
||||
'9cfea583': 'InvalidReceiver(address)',
|
||||
'77e8e698': 'InvalidReceiverForFirstDeposit(address,address)',
|
||||
'6eedac2f': 'InvalidReceiverForFirstGlvDeposit(address,address)',
|
||||
'4baab816': 'InvalidReceiverForSubaccountOrder(address,address)',
|
||||
'370abac2': 'InvalidRelayParams()',
|
||||
'530b2590': 'InvalidSetContributorPaymentInput(uint256,uint256)',
|
||||
'29a93dc4': 'InvalidSetMaxTotalContributorTokenAmountInput(uint256,uint256)',
|
||||
'2a34f7fe': 'InvalidSignature(string)',
|
||||
'720bb461': 'InvalidSizeDeltaForAdl(uint256,uint256)',
|
||||
'3044992f': 'InvalidSubaccountApprovalNonce(uint256,uint256)',
|
||||
'545e8f2b': 'InvalidSubaccountApprovalSubaccount()',
|
||||
'cb9bd134': 'InvalidSwapMarket(address)',
|
||||
'6ba3dd8b': 'InvalidSwapOutputToken(address,address)',
|
||||
'672e4fba': 'InvalidSwapPathForV1(address[],address)',
|
||||
'e6b0ddb6': 'InvalidTimelockDelay(uint256)',
|
||||
'53f81711': 'InvalidTokenIn(address,address)',
|
||||
'81468139': 'InvalidUiFeeFactor(uint256,uint256)',
|
||||
'f3d06236': 'InvalidUserNonce(uint256,uint256)',
|
||||
'1de2bca4': 'InvalidVersion(uint256)',
|
||||
'bc121108': 'LiquidatablePosition(string,int256,int256,int256)',
|
||||
'a38dfb2a': 'LongTokensAreNotEqual(address,address)',
|
||||
'25e34fa1': 'MarketAlreadyExists(bytes32,address)',
|
||||
'6918f9bf': 'MarketNotFound(address)',
|
||||
'143e2156': 'MaskIndexOutOfBounds(uint256,string)',
|
||||
'f0794a60': 'MaxAutoCancelOrdersExceeded(uint256,uint256)',
|
||||
'4e3f62a8': 'MaxBuybackPriceAgeExceeded(uint256,uint256,uint256)',
|
||||
'10aeb692': 'MaxCallbackGasLimitExceeded(uint256,uint256)',
|
||||
'4f82a998': 'MaxFundingFactorPerSecondLimitExceeded(uint256,uint256)',
|
||||
'2bf127cf': 'MaxOpenInterestExceeded(uint256,uint256)',
|
||||
'dd9c6b9a': 'MaxOracleTimestampRangeExceeded(uint256,uint256)',
|
||||
'6429ff3f': 'MaxPoolAmountExceeded(uint256,uint256)',
|
||||
'46169f04': 'MaxPoolUsdForDepositExceeded(uint256,uint256)',
|
||||
'2b6e7c3f': 'MaxPriceAgeExceeded(uint256,uint256)',
|
||||
'3d1986f7': 'MaxRefPriceDeviationExceeded(address,uint256,uint256,uint256)',
|
||||
'519ba753': 'MaxSubaccountActionCountExceeded(address,address,uint256,uint256)',
|
||||
'9da36043': 'MaxSwapPathLengthExceeded(uint256,uint256)',
|
||||
'faf66f0c': 'MaxTimelockDelayExceeded(uint256)',
|
||||
'c10ceac7': 'MaxTotalCallbackGasLimitForAutoCancelOrdersExceeded(uint256,uint256)',
|
||||
'043038f0': 'MaxTotalContributorTokenAmountExceeded(address,uint256,uint256)',
|
||||
'961b4025': 'MinContributorPaymentIntervalBelowAllowedRange(uint256)',
|
||||
'b9dc7b9d': 'MinContributorPaymentIntervalNotYetPassed(uint256)',
|
||||
'966fea10': 'MinGlvTokens(uint256,uint256)',
|
||||
'f442c0bc': 'MinLongTokens(uint256,uint256)',
|
||||
'6ce23460': 'MinMarketTokens(uint256,uint256)',
|
||||
'85efb31a': 'MinPositionSize(uint256,uint256)',
|
||||
'b4a196af': 'MinShortTokens(uint256,uint256)',
|
||||
'cc32db99': 'NegativeExecutionPrice(int256,uint256,uint256,int256,uint256)',
|
||||
'53410c43': 'NonAtomicOracleProvider(address)',
|
||||
'28f773e9': 'NonEmptyExternalCallsForSubaccountOrder()',
|
||||
'ef2df9b5': 'NonEmptyTokensWithPrices(uint256)',
|
||||
'730293fd': 'OpenInterestCannotBeUpdatedForSwapOnlyMarket(address)',
|
||||
'8cf95e58': 'OracleProviderAlreadyExistsForToken(address)',
|
||||
'd84b8ee8': 'OracleTimestampsAreLargerThanRequestExpirationTime(uint256,uint256,uint256)',
|
||||
'7d677abf': 'OracleTimestampsAreSmallerThanRequired(uint256,uint256)',
|
||||
'730d44b1': 'OrderAlreadyFrozen()',
|
||||
'59485ed9': 'OrderNotFound(bytes32)',
|
||||
'e09ad0e9': 'OrderNotFulfillableAtAcceptablePrice(uint256,uint256)',
|
||||
'9aba92cb': 'OrderNotUpdatable(uint256)',
|
||||
'8a4bd513': 'OrderTypeCannotBeCreated(uint256)',
|
||||
'cf9319d6': 'OrderValidFromTimeNotReached(uint256,uint256)',
|
||||
'b92fb250': 'PnlFactorExceededForLongs(int256,uint256)',
|
||||
'b0010694': 'PnlFactorExceededForShorts(int256,uint256)',
|
||||
'9f0bc7de': 'PnlOvercorrected(int256,uint256)',
|
||||
'426cfff0': 'PositionNotFound(bytes32)',
|
||||
'ee919dd9': 'PositionShouldNotBeLiquidated(string,int256,int256,int256)',
|
||||
'ded099de': 'PriceAlreadySet(address,uint256,uint256)',
|
||||
'd4141298': 'PriceFeedAlreadyExistsForToken(address)',
|
||||
'f0641c92': 'PriceImpactLargerThanOrderSize(int256,uint256)',
|
||||
'e8266438': 'RequestNotYetCancellable(uint256,uint256,string)',
|
||||
'e70f9152': 'SelfTransferNotSupported(address)',
|
||||
'032b3d00': 'SequencerDown()',
|
||||
'113cfc03': 'SequencerGraceDurationNotYetPassed(uint256,uint256)',
|
||||
'950227bb': 'ShiftFromAndToMarketAreEqual(address)',
|
||||
'b611f297': 'ShiftNotFound(bytes32)',
|
||||
'f54d8776': 'ShortTokensAreNotEqual(address,address)',
|
||||
'20b23584': 'SignalTimeNotYetPassed(uint256)',
|
||||
'26025b4e': 'SubaccountApprovalDeadlinePassed(uint256,uint256)',
|
||||
'9b539f07': 'SubaccountApprovalExpired(address,address,uint256,uint256)',
|
||||
'9be0a43c': 'SubaccountNotAuthorized(address,address)',
|
||||
'75885d69': 'SwapPriceImpactExceedsAmountIn(uint256,int256)',
|
||||
'd2e229e6': 'SwapsNotAllowedForAtomicWithdrawal(uint256,uint256)',
|
||||
'7bf8d2b3': 'SyncConfigInvalidInputLengths(uint256,uint256)',
|
||||
'624b5b13': 'SyncConfigInvalidMarketFromData(address,address)',
|
||||
'8b3d4655': 'SyncConfigUpdatesDisabledForMarket(address)',
|
||||
'0798d283': 'SyncConfigUpdatesDisabledForMarketParameter(address,string)',
|
||||
'8ea7eb18': 'SyncConfigUpdatesDisabledForParameter(string)',
|
||||
'b783c88a': 'ThereMustBeAtLeastOneRoleAdmin()',
|
||||
'282b5b70': 'ThereMustBeAtLeastOneTimelockMultiSig()',
|
||||
'979dc780': 'TokenTransferError(address,address,uint256)',
|
||||
'0e92b837': 'Uint256AsBytesLengthExceeds32Bytes(uint256)',
|
||||
'6afad778': 'UnableToGetBorrowingFactorEmptyPoolUsd()',
|
||||
'be4729a2': 'UnableToGetCachedTokenPrice(address,address)',
|
||||
'11423d95': 'UnableToGetFundingFactorEmptyOpenInterest()',
|
||||
'7a0ca681': 'UnableToGetOppositeToken(address,address)',
|
||||
'3a61a4a9': 'UnableToWithdrawCollateral(int256)',
|
||||
'a35b150b': 'Unauthorized(address,string)',
|
||||
'99b2d582': 'UnexpectedBorrowingFactor(uint256,uint256)',
|
||||
'cc3459ff': 'UnexpectedMarket()',
|
||||
'3b42e952': 'UnexpectedPoolValue(int256)',
|
||||
'814991c3': 'UnexpectedPositionState()',
|
||||
'e949114e': 'UnexpectedRelayFeeToken(address,address)',
|
||||
'a9721241': 'UnexpectedRelayFeeTokenAfterSwap(address,address)',
|
||||
'785ee469': 'UnexpectedTokenForVirtualInventory(address,address)',
|
||||
'3af14617': 'UnexpectedValidFromTime(uint256)',
|
||||
'3784f834': 'UnsupportedOrderType(uint256)',
|
||||
'0d0fcc0b': 'UnsupportedRelayFeeToken(address,address)',
|
||||
'eadaf93a': 'UsdDeltaExceedsLongOpenInterest(int256,uint256)',
|
||||
'2e949409': 'UsdDeltaExceedsPoolValue(int256,uint256)',
|
||||
'8af0d140': 'UsdDeltaExceedsShortOpenInterest(int256,uint256)',
|
||||
'60737bc0': 'WithdrawalNotFound(bytes32)',
|
||||
}
|
||||
gmx_error_map = {bytes.fromhex(k):v for k,v in gmx_error_map.items()}
|
||||
|
||||
|
||||
def gmx_parse_reason_bytes(e: bytes) -> str:
|
||||
sig_bytes = e[:4]
|
||||
sig = gmx_error_map.get(e)
|
||||
if sig is None:
|
||||
return f'Unknown GMX error {e.hex()}'
|
||||
name, types = sig.split('(',1)
|
||||
types = types[:-1]
|
||||
if len(e) > 4:
|
||||
data = e[4:]
|
||||
values = abi_decoder.decode(types.split(','), data)
|
||||
return f'{name}({",".join(map(str, values))})'
|
||||
return name
|
||||
@@ -1,446 +0,0 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from copy import copy
|
||||
from datetime import timedelta
|
||||
|
||||
from eth_utils import to_checksum_address
|
||||
from web3.types import EventData
|
||||
|
||||
from ._abi import parse_event_log_data
|
||||
from ._base import GMXPosition, gmx_positions, GMXOrderType, gmx_tk_in_flight, tk_gmx_in_flight, gmx_api, \
|
||||
gmx_markets_by_index_token
|
||||
from ._chaininfo import gmx_chain_info
|
||||
from ._error import gmx_parse_reason_bytes
|
||||
from ._metadata import gmx_update_metadata
|
||||
from .. import dec, from_timestamp
|
||||
from ..addrmeta import address_metadata
|
||||
from ..base import OldTokenDict, OldGMXDict
|
||||
from ..base.chain import current_chain
|
||||
from ..base.order import TrancheKey
|
||||
from ..contract import get_contract_event
|
||||
from ..contract.dexorder import get_dexorder_contract
|
||||
from ..event_handler import update_pool_price
|
||||
from ..final_ohlc import FinalOHLCRepository
|
||||
from ..ohlc import period_name
|
||||
from ..periodic import periodic
|
||||
from ..progressor import BlockProgressor
|
||||
from ..tokens import get_token
|
||||
from ..util import hexstr
|
||||
from ..util.async_util import maywait
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def gmx_wire_runner_init(runner: BlockProgressor):
|
||||
pass
|
||||
|
||||
def gmx_wire_runner_early(runner: BlockProgressor, backfill: FinalOHLCRepository = None):
|
||||
runner.add_event_trigger(handle_gmxcallbackerror_event, get_contract_event('GMXCallbackHandler', 'GMXCallbackError'))
|
||||
runner.add_callback(gmx_handle_metadata_update)
|
||||
if backfill is not None:
|
||||
runner.add_callback(create_backfill_handler(backfill) if backfill else gmx_update_prices)
|
||||
runner.add_event_trigger(handle_gmx_events, log_filter={'address':gmx_chain_info[current_chain.get().id]['EventEmitter'], })
|
||||
runner.add_event_trigger(handle_gmxorderplaced, get_contract_event('GMX', 'GMXOrderPlaced'))
|
||||
|
||||
|
||||
def gmx_wire_runner_late(runner: BlockProgressor):
|
||||
pass
|
||||
|
||||
def handle_gmxcallbackerror_event(event: EventData):
|
||||
log.error(f'GMX callback error {event["args"]["reason"]}')
|
||||
|
||||
# GMX orders wait on-chain a few blocks before the GMX Handlers execute or cancel them. Also, liquidation orders can
|
||||
# occur without any associated vault order. Therefore, we take the following approach:
|
||||
#
|
||||
# When orders are placed, a GMXOrderPlaced event is emitted alongside the DexorderSwapPlaced event, providing a mapping
|
||||
# between vault tranche keys and GMX order keys, as well as an in-flight locking mechanism in both the vault and
|
||||
# backend. In a few blocks' time, the GMX Handlers will deal with the order and emit an OrderCreated or OrderCancelled
|
||||
# event in addition to invoking the corresponding callback method on the vault, which unlocks the tranche, adjusts
|
||||
# rate limits, and emits the regular DexorderSwapFilled event, using amountOut as the USD amount filled and amountIn
|
||||
# as the "price," a virtual amount calculated to make the execution price equal amountOut/amountIn, matching the format
|
||||
# for non-inverted swaps.
|
||||
#
|
||||
# Therefore, the regular backend triggers and fill records act normally on GMX orders without modification.
|
||||
#
|
||||
# The backend in-flight lock and tranche-key to gmx-order-key mapping is maintained in gmx_in_flight using a vault event
|
||||
# to open and a GMX order event to close.
|
||||
#
|
||||
# The Position object is maintained by watching GMX PositionIncrease and PositionDecrease events, which capture
|
||||
# liquidations as well as vault-initiated orders to accurately maintain the Position state.
|
||||
|
||||
|
||||
def invalid_vault(vault):
|
||||
# return vault not in vault_owners
|
||||
return False # todo debug
|
||||
|
||||
|
||||
#
|
||||
# GMXOrderPlaced along with OrderCancelled and OrderExecuted maintain the gmx_in_flight lock and mapping to a tranche key
|
||||
#
|
||||
|
||||
def handle_gmxorderplaced(event: EventData):
|
||||
# This is emitted alongside the DexorderSwapPlaced event in order to provide additional information for GMX.
|
||||
# event GMXOrderPlaced(uint64 orderIndex, uint8 trancheIndex, bytes32 gmxOrderKey);
|
||||
log.info(f'GMXOrderPlaced {event}')
|
||||
vault = event['address']
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
order_index = event['args']['orderIndex']
|
||||
tranche_index = event['args']['trancheIndex']
|
||||
gmx_order_key = event['args']['gmxOrderKey']
|
||||
# register the gmx order key as in-flight
|
||||
keystr = hexstr(gmx_order_key)
|
||||
tk = TrancheKey(vault, order_index, tranche_index)
|
||||
# start gmx in flight. see end_gmx_in_flight()
|
||||
gmx_tk_in_flight[keystr] = tk
|
||||
tk_gmx_in_flight[tk] = keystr
|
||||
|
||||
|
||||
def handle_ordercancelled_event(event: dict, data: dict):
|
||||
log.info(f'GMX order cancelled {data}')
|
||||
vault = data['account']
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
reason = gmx_parse_reason_bytes(data['reasonBytes'])
|
||||
gmx_order_key = data['key']
|
||||
if gmx_order_key not in gmx_tk_in_flight:
|
||||
log.warning(f'GMX order cancelled but not in flight: {gmx_order_key}')
|
||||
return
|
||||
end_gmx_in_flight(gmx_order_key)
|
||||
log.info(f'GMX order cancelled due to {reason} in tx {data['tx']}')
|
||||
|
||||
|
||||
def handle_orderexecuted_event(event: dict, data: dict):
|
||||
log.info(f'GMX order executed {data}')
|
||||
vault = data['account']
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
gmx_order_key = data['key']
|
||||
if gmx_order_key not in gmx_tk_in_flight:
|
||||
# todo handle liquidation either here or with PositionDecrease events
|
||||
log.warning(f'GMX order executed but not in flight: {gmx_order_key}')
|
||||
return
|
||||
end_gmx_in_flight(gmx_order_key)
|
||||
|
||||
|
||||
def end_gmx_in_flight(gmx_order_key):
|
||||
gmx_order_key = hexstr(gmx_order_key)
|
||||
tk = gmx_tk_in_flight[gmx_order_key]
|
||||
del gmx_tk_in_flight[gmx_order_key]
|
||||
del tk_gmx_in_flight[tk]
|
||||
|
||||
#
|
||||
# GMXPositionIncrease and GMXPositionDecrease events maintain our Position records
|
||||
#
|
||||
|
||||
def handle_position_event(event: dict, data: dict, is_increase: bool):
|
||||
log.info(f'GMX position {"increase" if is_increase else "decrease"} {event}')
|
||||
# {'account': '0xdfc16a4247677d723d897aa4fe865a02f5d78746',
|
||||
# 'borrowingFactor': 250545812647447573795593810338,
|
||||
# 'collateralAmount': 1019200,
|
||||
# 'collateralDeltaAmount': 1019200,
|
||||
# 'collateralToken': '0xaf88d065e77c8cc2239327c5edb3a432268e5831',
|
||||
# 'collateralTokenPrice.max': 999856563986601850000000,
|
||||
# 'collateralTokenPrice.min': 999856563986601850000000,
|
||||
# 'event': 'PositionIncrease',
|
||||
# 'executionPrice': 3816407734365198,
|
||||
# 'fundingFeeAmountPerSize': 430546959972637644839,
|
||||
# 'increasedAtTime': 1753748680,
|
||||
# 'indexTokenPrice.max': 3817347116613155,
|
||||
# 'indexTokenPrice.min': 3817347116613155,
|
||||
# 'isLong': True,
|
||||
# 'longTokenClaimableFundingAmountPerSize': 4117446384759965489999004204,
|
||||
# 'market': '0x70d95587d40a2caf56bd97485ab3eec10bee6336',
|
||||
# 'orderKey': b'2\xe6\x8a\x07\xe9x\x839\x8f\xdd\xd5j\x16\x88\x80\xff[HY\xadk\x0f\xb4n3\xfe\xa2.\xd6\x97\x90\x9b',
|
||||
# 'orderType': 2,
|
||||
# 'positionKey': b"\xa8r\xc6\xcf^\x89\xf8k\xfa='\xe9\x19\x12\x11\xb8|;k3Df8\xee^\x9a\x9f)\xef8\x8c\x86",
|
||||
# 'priceImpactAmount': 128960267235,
|
||||
# 'priceImpactUsd': 492286104290598018742093888,
|
||||
# 'sender': '0xe68caaacdf6439628dfd2fe624847602991a31eb',
|
||||
# 'shortTokenClaimableFundingAmountPerSize': 7250294981528901831,
|
||||
# 'sizeDeltaInTokens': 524053020328728,
|
||||
# 'sizeDeltaUsd': 2000000000000000000000000000000,
|
||||
# 'sizeInTokens': 524053020328728,
|
||||
# 'sizeInUsd': 2000000000000000000000000000000,
|
||||
# 'tx': '0x74e3aee1e4a92d3fe4e05d8050197c080c51dc0170ac12e8e90dbbe9fb3cc4b5'}
|
||||
|
||||
vault = to_checksum_address(data['account'])
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
order_type = GMXOrderType(data['orderType'])
|
||||
gmx_order_key = data['orderKey']
|
||||
is_long = data['isLong']
|
||||
size_delta = data['sizeDeltaUsd']
|
||||
size = data['sizeInUsd']
|
||||
market = data['market']
|
||||
collateral_token = data['collateralToken']
|
||||
collateral_amount = data['collateralAmount']
|
||||
collateral_delta = data['collateralDeltaAmount']
|
||||
price = data['executionPrice']
|
||||
|
||||
key = GMXPosition.Key(market, collateral_token, is_long)
|
||||
positions = gmx_positions.get(vault)
|
||||
pos = GMXPosition(key.market_token, key.collateral_token, key.is_long)
|
||||
if positions is None:
|
||||
positions = [pos]
|
||||
else:
|
||||
positions = list(positions)
|
||||
if pos in positions:
|
||||
old = [p for p in positions if p==pos][0]
|
||||
positions.remove(old)
|
||||
pos = copy(old)
|
||||
positions.append(pos)
|
||||
buy = is_long == is_increase
|
||||
if buy:
|
||||
if -size_delta < pos.size < 0:
|
||||
log.error(f'GMX short position becoming positive: {pos} + {size_delta}')
|
||||
pos.size += size_delta
|
||||
else:
|
||||
if 0 < pos.size < size_delta:
|
||||
log.error(f'GMX long position becoming negative: {pos} - {size_delta}')
|
||||
pos.size -= size_delta
|
||||
if pos.size != size:
|
||||
log.error(f'GMX position size mismatch: {pos} != {size}')
|
||||
if not pos.size:
|
||||
positions.remove(pos)
|
||||
if not positions:
|
||||
del gmx_positions[vault]
|
||||
else:
|
||||
gmx_positions[vault] = positions
|
||||
|
||||
|
||||
# todo DANNY: if a position is liquidated, should I cancel pending orders in that market?
|
||||
|
||||
|
||||
def handle_positionincrease_event(event: dict, data: dict):
|
||||
handle_position_event(event, data, True)
|
||||
|
||||
def handle_positiondecrease_event(event: dict, data: dict):
|
||||
handle_position_event(event, data, False)
|
||||
|
||||
# def handle_depositcreated_event(event: dict, data: dict):
|
||||
# log.info(f'GMX deposit created {event}')
|
||||
#
|
||||
# def handle_depositexecuted_event(event: dict, data: dict):
|
||||
# log.info(f'GMX deposit executed {event}')
|
||||
#
|
||||
# def handle_withdrawalcreated_event(event: dict, data: dict):
|
||||
# log.info(f'GMX withdrawal created {event}')
|
||||
#
|
||||
# def handle_withdrawalexecuted_event(event: dict, data: dict):
|
||||
# log.info(f'GMX withdrawal executed {event}')
|
||||
|
||||
|
||||
event_handlers = {
|
||||
'OraclePriceUpdate': None,
|
||||
|
||||
'MarketPoolValueInfo': None,
|
||||
'MarketPoolValueUpdated': None,
|
||||
|
||||
'DepositCreated': None,
|
||||
'DepositExecuted': None,
|
||||
'WithdrawalCreated': None,
|
||||
'WithdrawalExecuted': None,
|
||||
|
||||
'OrderCreated': None,
|
||||
'OrderUpdated': None,
|
||||
'OrderCancelled': handle_ordercancelled_event,
|
||||
'OrderExecuted': handle_orderexecuted_event,
|
||||
'OrderSizeDeltaAutoUpdated': None, # ADL?
|
||||
'OrderCollateralDeltaAmountAutoUpdated': None,
|
||||
|
||||
'PositionIncrease': handle_positionincrease_event,
|
||||
'PositionDecrease': handle_positiondecrease_event,
|
||||
'PositionFeesCollected': None,
|
||||
|
||||
'PositionImpactPoolAmountUpdated': None,
|
||||
'PositionImpactPoolDistributed': None,
|
||||
'VirtualPositionInventoryUpdated': None,
|
||||
|
||||
'ClaimableFeeAmountUpdated': None,
|
||||
'ClaimableFundingUpdated': None,
|
||||
'ClaimableFundingAmountPerSizeUpdated': None,
|
||||
'FundingFeeAmountPerSizeUpdated': None,
|
||||
'FundingFeesClaimed': None,
|
||||
|
||||
'CollateralSumUpdated': None,
|
||||
'CollateralClaimed': None,
|
||||
|
||||
'OpenInterestInTokensUpdated': None,
|
||||
'OpenInterestUpdated': None,
|
||||
|
||||
'SetAvailableFeeAmount': None,
|
||||
'BuybackFees': None,
|
||||
'FeesClaimed': None,
|
||||
|
||||
'ExecutionFeeRefundCallback': None,
|
||||
|
||||
'PoolAmountUpdated': None,
|
||||
|
||||
'SwapInfo': None,
|
||||
'SwapFeesCollected': None,
|
||||
'SwapImpactPoolAmountUpdated': None,
|
||||
'VirtualSwapInventoryUpdated': None,
|
||||
|
||||
'CumulativeBorrowingFactorUpdated': None,
|
||||
|
||||
'KeeperExecutionFee': None,
|
||||
'ExecutionFeeRefund': None,
|
||||
|
||||
'SetUint': None,
|
||||
# SetBytes32 presumably and others...
|
||||
'SyncConfig': None,
|
||||
|
||||
'ShiftCreated': None,
|
||||
'ShiftExecuted': None,
|
||||
|
||||
'GlvValueUpdated': None,
|
||||
'GlvDepositCreated': None,
|
||||
'GlvDepositExecuted': None,
|
||||
'GlvWithdrawalCreated': None,
|
||||
'GlvWithdrawalExecuted': None,
|
||||
'GlvShiftCreated': None,
|
||||
'GlvShiftExecuted': None,
|
||||
|
||||
'AffiliateRewardUpdated': None,
|
||||
'AffiliateRewardClaimed': None,
|
||||
|
||||
'SetMaxAllowedSubaccountActionCount': None,
|
||||
'IncrementSubaccountActionCount': None,
|
||||
'SetSubaccountAutoTopUpAmount': None,
|
||||
'SubaccountAutoTopUp': None,
|
||||
|
||||
}
|
||||
|
||||
|
||||
async def handle_gmx_events(events: list[dict]):
|
||||
for event in events:
|
||||
data = parse_event_log_data(event)
|
||||
log.info(f'GMX Event {data}')
|
||||
event_name = data['event']
|
||||
try:
|
||||
func = event_handlers[event_name]
|
||||
except KeyError:
|
||||
log.debug(f'Unknown event {event_name}')
|
||||
else:
|
||||
if func:
|
||||
await maywait(func(event, data))
|
||||
|
||||
|
||||
#
|
||||
# Metadata update triggers
|
||||
# todo These are here because they used to be blockchain event handlers and should be once again...
|
||||
#
|
||||
|
||||
initialized = False
|
||||
|
||||
@periodic(timedelta(hours=1))
|
||||
async def gmx_handle_metadata_update():
|
||||
global initialized
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
await gmx_update_metadata()
|
||||
initialized = True
|
||||
except:
|
||||
if not initialized:
|
||||
raise
|
||||
log.exception('Exception in gmx_handle_metadata_update()')
|
||||
|
||||
|
||||
# @periodic(timedelta(seconds=1))
|
||||
# async def gmx_handle_price_update():
|
||||
# updates = await fetch_price_updates()
|
||||
# # ticker updates have only one price per addr so we can parallelize setting prices
|
||||
# await asyncio.gather(*[update_pool_price(addr, time, price, 30) for addr, time, price in updates])
|
||||
|
||||
|
||||
def create_backfill_handler(ohlcs: FinalOHLCRepository):
|
||||
|
||||
@periodic(timedelta(seconds=1))
|
||||
async def gmx_handle_price_update_with_backfill():
|
||||
updates = await fetch_price_updates()
|
||||
backfill_addrs = [addr for addr, time, price in updates if not ohlcs.has_symbol(addr)]
|
||||
|
||||
if backfill_addrs:
|
||||
log.info(f'Backfilling {len(backfill_addrs)} new GMX tokens')
|
||||
await asyncio.gather(*[backfill_token(ohlcs, a) for a in backfill_addrs])
|
||||
|
||||
for addr, time, price in updates:
|
||||
ohlcs.update(addr, time, price)
|
||||
|
||||
return gmx_handle_price_update_with_backfill
|
||||
|
||||
|
||||
def push_candle(ohlcs, addr, period, candle):
|
||||
time, *prices = candle
|
||||
time = from_timestamp(time)
|
||||
prices = [dec(p) for p in prices]
|
||||
ohlcs.update_ohlc(addr, period, time, *prices)
|
||||
|
||||
|
||||
GMX_OHLC_PERIODS = [
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(hours=1),
|
||||
timedelta(hours=4),
|
||||
timedelta(days=1),
|
||||
]
|
||||
|
||||
async def backfill_token(ohlcs: FinalOHLCRepository, addr: str):
|
||||
token = await get_token(addr)
|
||||
addr = token['address']
|
||||
for period in GMX_OHLC_PERIODS:
|
||||
# Polling a large window is the only history method GMX provides :( It's also how their web client works!
|
||||
symbol = token['symbol']
|
||||
interval = period_name(period).lower()
|
||||
response = gmx_api('prices/candles', tokenSymbol=symbol, period=interval, limit=10_000)
|
||||
if 'error' in response:
|
||||
if not response['error'].startswith('unsupported period'):
|
||||
log.warning(f'Could not query token backfill for {token["symbol"]}: {response["error"]}')
|
||||
else:
|
||||
for c in reversed(response['candles']):
|
||||
push_candle(ohlcs, addr, period, c)
|
||||
log.info(f'Backfilled new GMX token {token["symbol"]}')
|
||||
|
||||
|
||||
@periodic(timedelta(seconds=1))
|
||||
async def gmx_update_prices():
|
||||
for token, time, price in await fetch_price_updates():
|
||||
for market in gmx_markets_by_index_token.get(token, []):
|
||||
info: OldGMXDict = address_metadata[market]['index']
|
||||
decimals = info['decimals']
|
||||
await update_pool_price(market, time, price*dec(10)**decimals, decimals)
|
||||
|
||||
|
||||
async def fetch_price_updates():
|
||||
tokens = list(gmx_markets_by_index_token.keys())
|
||||
prices = await get_dexorder_contract().getGMXPrices(tokens)
|
||||
factor = dec(10)**-30
|
||||
return [
|
||||
(addr, from_timestamp(timestamp), (dec(bid) + dec(ask)) / 2 * factor)
|
||||
for addr, (timestamp, bid, ask) in zip(tokens, prices)
|
||||
]
|
||||
|
||||
async def fetch_price_updates_using_gmx_api():
|
||||
updates = []
|
||||
# todo use on-chain oracle events
|
||||
for t in gmx_api('prices/tickers'):
|
||||
"""
|
||||
{
|
||||
"tokenAddress": "0x3Eea56A1ccCdbfB70A26aD381C71Ee17E4c8A15F",
|
||||
"tokenSymbol": "BOME",
|
||||
"minPrice": "1621019778803375000000",
|
||||
"maxPrice": "1621534421901125000000",
|
||||
"updatedAt": 1749849326251,
|
||||
"timestamp": 1749849325
|
||||
},
|
||||
"""
|
||||
addr = t['tokenAddress']
|
||||
if addr not in address_metadata:
|
||||
continue
|
||||
# GMX prices use 30 decimal places
|
||||
price = (dec(t['minPrice']) + dec(t['maxPrice'])) / 2 * dec(10) ** dec(-30)
|
||||
time = from_timestamp(t['timestamp'])
|
||||
updates.append((addr, time, price))
|
||||
return updates
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
__all__ = ['gmx_update_metadata']
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from dexorder import ADDRESS_0
|
||||
from dexorder.addrmeta import address_metadata
|
||||
from dexorder.base import OldTokenDict, OldGMXDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.orderlib import Exchange
|
||||
from dexorder.gmx._base import gmx_api, gmx_markets_by_index_token
|
||||
from dexorder.gmx._contract import get_gmx_contract
|
||||
from dexorder.tokens import get_token
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def gmx_update_metadata():
|
||||
log.info('Updating GMX metadata')
|
||||
await gmx_detect_markets()
|
||||
|
||||
|
||||
token_response: Optional[dict] = None
|
||||
|
||||
async def gmx_get_token(addr: str):
|
||||
# The GMX API appears to be the only way to obtain the index token metadata, since there is no corresponding ERC20
|
||||
# on-chain at the synthetic address.
|
||||
found = await get_token(addr, squelch=True) # use our normal lookup first
|
||||
if found is not None:
|
||||
return found
|
||||
global token_response
|
||||
if token_response is None or addr not in token_response['tokens']:
|
||||
token_response = gmx_api('tokens')
|
||||
for info in token_response['tokens']:
|
||||
if info['address'] == addr:
|
||||
synthetic = info.get('synthetic',False)
|
||||
if not synthetic:
|
||||
log.warning('loading non-synthetic token via GMX API')
|
||||
name = f'GMX {info["symbol"]}'
|
||||
if synthetic:
|
||||
name += ' Synthetic'
|
||||
chain_id = current_chain.get().id
|
||||
approved = not re.search(r'deprecated', info['symbol'], re.IGNORECASE)
|
||||
token = OldTokenDict(type='Token', chain=chain_id, address=info['address'], name=name,
|
||||
symbol=info['symbol'], decimals=info['decimals'],
|
||||
approved=approved)
|
||||
address_metadata[info['address']] = token
|
||||
return token
|
||||
log.error(f'Could not find index token {addr} in GMX tokens API')
|
||||
return None
|
||||
|
||||
|
||||
async def gmx_detect_markets():
|
||||
ds = get_gmx_contract('DataStore')
|
||||
reader = get_gmx_contract('Reader')
|
||||
market_info = await reader.getMarkets(ds.address, 0, 1000)
|
||||
markets = [
|
||||
OldGMXDict(type='GMX', chain=current_chain.get().id, exchange=Exchange.GMX.value, address=market_token,
|
||||
index=index_token, long=long_token, short=short_token, decimals=0, leverage=0)
|
||||
for market_token, index_token, long_token, short_token in market_info
|
||||
# discard spot-only markets that do not have an index token
|
||||
# todo support single-asset markets
|
||||
if market_token != ADDRESS_0 and index_token != ADDRESS_0 and
|
||||
long_token != ADDRESS_0 and short_token != ADDRESS_0 and market_token not in address_metadata
|
||||
]
|
||||
market_disabled = await asyncio.gather(*[ds.is_market_disabled(m['address']) for m in markets])
|
||||
new_markets = [m for m,d in zip(markets, market_disabled) if not d and m['address'] not in address_metadata]
|
||||
|
||||
async def init_market(m: OldGMXDict):
|
||||
min_collateral_factor, token = await asyncio.gather(
|
||||
ds.min_collateral_factor(m['address']), gmx_get_token(m['index']))
|
||||
m['decimals'] = token['decimals']
|
||||
m['leverage'] = round(1 / min_collateral_factor)
|
||||
address_metadata[m['address']] = m
|
||||
cur = gmx_markets_by_index_token.get(m['index'])
|
||||
if cur is None:
|
||||
gmx_markets_by_index_token[m['index']] = [m['address']]
|
||||
else:
|
||||
if m['address'] not in cur:
|
||||
gmx_markets_by_index_token[m['index']] = cur + [m['address']]
|
||||
await asyncio.gather(*[init_market(m) for m in new_markets])
|
||||
token_addrs = set(t for m in new_markets for t in (m['address'], m['long'], m['short']))
|
||||
await asyncio.gather(*[get_token(t) for t in token_addrs])
|
||||
|
||||
|
||||
# Log the markets
|
||||
def t(addr):
|
||||
# noinspection PyTypedDict
|
||||
return address_metadata[addr]['symbol'] if addr in address_metadata and address_metadata[addr] else addr
|
||||
for m in new_markets:
|
||||
log.info(f'GMX:{m["address"]} {t(m["index"])}/USD [{t(m["long"])}-{t(m["short"])}] {m["leverage"]}x')
|
||||
@@ -1,3 +1,4 @@
|
||||
import itertools
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from contextvars import ContextVar
|
||||
|
||||
@@ -26,10 +26,10 @@ import sys
|
||||
from typing import Union, Iterable, Optional
|
||||
|
||||
from dexorder import config, NARG
|
||||
from dexorder.base import OldPoolDict, OldTokenDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.database.model import Token, Pool
|
||||
from dexorder.database.model.pool import PoolDict
|
||||
from dexorder.database.model.pool import OldPoolDict, PoolDict
|
||||
from dexorder.database.model.token import OldTokenDict, TokenDict
|
||||
from dexorder.util import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -50,6 +50,7 @@ def dump_tokens(out, tokens, include_unapproved=False):
|
||||
approved_addrs = set()
|
||||
had_output = False
|
||||
for token in tokens:
|
||||
token: Token
|
||||
if isinstance(token, Token):
|
||||
token: Token
|
||||
a = token.address
|
||||
|
||||
@@ -342,17 +342,11 @@ class OHLCRepository:
|
||||
def add_symbol(symbol: str, period: timedelta = None):
|
||||
if period is not None:
|
||||
if (symbol, period) not in recent_ohlcs:
|
||||
recent_ohlcs[OHLCKey(symbol, period)] = [] # setting an empty value will initiate price capture
|
||||
recent_ohlcs[(symbol, period)] = [] # setting an empty value will initiate price capture
|
||||
else:
|
||||
for period in OHLC_PERIODS:
|
||||
if (symbol, period) not in recent_ohlcs:
|
||||
recent_ohlcs[OHLCKey(symbol, period)] = []
|
||||
|
||||
|
||||
@staticmethod
|
||||
def has_symbol(symbol: str, period: timedelta):
|
||||
return OHLCKey(symbol, period) in recent_ohlcs
|
||||
|
||||
recent_ohlcs[(symbol, period)] = []
|
||||
|
||||
async def update_all(self, symbol: str, time: datetime, price: dec, *, create: bool = True):
|
||||
""" the update_all() and update() methods generate bars for the recent_ohlcs BlockDict """
|
||||
@@ -370,7 +364,7 @@ class OHLCRepository:
|
||||
# log.debug(f'Updating OHLC {logname} {minutely(time)} {price}')
|
||||
if price is not None:
|
||||
self.quotes[symbol] = timestamp(time), str(price)
|
||||
key = OHLCKey(symbol, period)
|
||||
key = symbol, period
|
||||
# recent_ohlcs holds a list of "recent" NativeOHLC's stored as blockdata. we try to keep the recent array long
|
||||
# enough to extend prior the root block time
|
||||
historical: Optional[list[NativeOHLC]] = recent_ohlcs.get(key)
|
||||
@@ -435,7 +429,7 @@ class OHLCRepository:
|
||||
return found
|
||||
|
||||
def flush(self) -> None:
|
||||
# log.debug(f'flushing {len(self.dirty_chunks)} chunks')
|
||||
log.debug(f'flushing {len(self.dirty_chunks)} chunks')
|
||||
for chunk in self.dirty_chunks:
|
||||
chunk.save()
|
||||
self.dirty_chunks.clear()
|
||||
|
||||
@@ -10,13 +10,10 @@ from dexorder import db, metric, config
|
||||
from dexorder.accounting import accounting_transaction_gas
|
||||
from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_deserializers
|
||||
from dexorder.base.order import TrancheKey, OrderKey
|
||||
from dexorder.base.orderlib import PriceProof, Exchange
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.contract.contract_proxy import ContractTransaction
|
||||
from dexorder.base.orderlib import PriceProof
|
||||
from dexorder.contract.dexorder import get_dexorder_contract
|
||||
from dexorder.database.model.accounting import AccountingSubcategory
|
||||
from dexorder.database.model.transaction import TransactionJob
|
||||
from dexorder.gmx import tk_gmx_in_flight
|
||||
from dexorder.order.orderstate import Order
|
||||
from dexorder.order.triggers import (OrderTriggers,
|
||||
TrancheState, active_tranches, order_error)
|
||||
@@ -71,18 +68,10 @@ class TrancheExecutionHandler (TransactionHandler):
|
||||
def __init__(self):
|
||||
super().__init__('te')
|
||||
|
||||
async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> Optional[ContractTransaction]:
|
||||
async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> dict:
|
||||
tk = req.tranche_key
|
||||
try:
|
||||
kwargs = {}
|
||||
if Order.of(tk).order.route.exchange == Exchange.GMX:
|
||||
if tk_gmx_in_flight.get(tk):
|
||||
return None # a GMX order is already in flight
|
||||
fee = await ContractProxy(req.vault, 'IVaultGMX').gmxExecutionFee(False)
|
||||
kwargs['value'] = round(fee * 1.1) # extra 10% because gas prices can change quickly
|
||||
return await get_dexorder_contract().build.execute(
|
||||
job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof),
|
||||
kwargs=kwargs)
|
||||
return await get_dexorder_contract().build.execute(job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof))
|
||||
except ContractPanicError as x:
|
||||
exception = x
|
||||
errcode = ''
|
||||
@@ -244,10 +233,6 @@ async def handle_dexorderexecutions(event: EventData):
|
||||
if job is None:
|
||||
log.warning(f'Job {exe_id} not found!')
|
||||
return
|
||||
# verify that the transaction hash of the event is the same as that of our request
|
||||
if job.tx_id != event['transactionHash']:
|
||||
log.warning(f'Ignoring rogue DexorderExecutions {exe_id} with wrong txid {job.tx_id} != {event["transactionHash"]}')
|
||||
return
|
||||
# noinspection PyTypeChecker
|
||||
req: TrancheExecutionRequest = job.request
|
||||
tk = TrancheKey(req.vault, req.order_index, req.tranche_index)
|
||||
|
||||
@@ -304,13 +304,6 @@ SwapOrder {self.key}
|
||||
amount: {"input" if self.order.amountIsInput else "output"} {await adjust_decimals(amount_token, self.filled):f}/{await adjust_decimals(amount_token, self.amount):f}{" to owner" if self.order.outputDirectlyToOwner else ""}
|
||||
minFill: {await adjust_decimals(amount_token, self.min_fill_amount):f}
|
||||
inverted: {self.order.inverted}
|
||||
'''
|
||||
if self.order.gmx:
|
||||
msg += f'''
|
||||
gmx order: {"increase" if self.order.gmx.is_increase else "decrease"} {"long" if self.order.gmx.is_long else "short"}
|
||||
collateral: {self.order.gmx.reserve_amount}
|
||||
'''
|
||||
msg += '''
|
||||
tranches:
|
||||
'''
|
||||
for i in range(len(self.order.tranches)):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
from enum import Enum, auto
|
||||
@@ -12,11 +13,11 @@ from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, D
|
||||
MIN_SLIPPAGE_EPSILON
|
||||
from dexorder.blockstate import BlockDict
|
||||
from .orderstate import Order
|
||||
from .. import dec, order_log, timestamp, config
|
||||
from ..base import OldPoolDict
|
||||
from .. import dec, order_log, timestamp, from_timestamp, config
|
||||
from ..base.chain import current_clock
|
||||
from ..base.order import OrderKey, TrancheKey
|
||||
from ..contract import ERC20
|
||||
from ..database.model.pool import OldPoolDict
|
||||
from ..pools import ensure_pool_price, pool_prices, get_pool
|
||||
from ..routing import pool_address
|
||||
from ..vault_blockdata import vault_balances, adjust_balance
|
||||
@@ -37,7 +38,7 @@ execution should be attempted on the tranche.
|
||||
"""
|
||||
|
||||
|
||||
# tranches which have passed all constraints and should be executed. This set gets checked against already in-
|
||||
# tranches which have passed all constraints and should be executed
|
||||
active_tranches: BlockDict[TrancheKey, Optional[PriceProof]] = BlockDict('at')
|
||||
|
||||
|
||||
@@ -110,10 +111,10 @@ async def update_balance_triggers(vault: str, token: str):
|
||||
await asyncio.gather(*updates)
|
||||
|
||||
|
||||
def update_price_triggers(addr: str, price: dec, decimals: int):
|
||||
price = price * dec(10) ** dec(-decimals) # adjust for pool decimals to get onchain price
|
||||
async def update_price_triggers(pool: OldPoolDict, price: dec):
|
||||
price = price * dec(10) ** dec(-pool['decimals']) # adjust for pool decimals to get onchain price
|
||||
price = float(price) # since we use SIMD operations to evaluate lines, we must convert to float
|
||||
for pt in PriceLineTrigger.by_pool.get(addr, []):
|
||||
for pt in PriceLineTrigger.by_pool.get(pool['address'], []):
|
||||
pt.update(price)
|
||||
|
||||
|
||||
@@ -177,7 +178,6 @@ class Trigger:
|
||||
Expiration = 2
|
||||
MinLine = 3
|
||||
MaxLine = 4
|
||||
GMXInFlight = 5
|
||||
|
||||
def __init__(self, trigger_type: TriggerType, tk: TrancheKey, value: bool):
|
||||
"""
|
||||
@@ -211,7 +211,9 @@ class Trigger:
|
||||
|
||||
def _value_changed(self): pass
|
||||
|
||||
def remove(self): pass
|
||||
|
||||
@abstractmethod
|
||||
def remove(self): ...
|
||||
|
||||
|
||||
async def has_funds(tk: TrancheKey):
|
||||
@@ -231,7 +233,6 @@ async def has_funds(tk: TrancheKey):
|
||||
|
||||
|
||||
async def input_amount_is_sufficient(order, token_balance):
|
||||
# todo modify for GMX
|
||||
# log.debug(f'input is sufficient? {order.min_fill_amount}')
|
||||
if order.amount_is_input:
|
||||
# log.debug(f'amount is input: {token_balance} >= {order.min_fill_amount}')
|
||||
@@ -592,7 +593,7 @@ class TrancheTrigger:
|
||||
|
||||
|
||||
def fill(self, _amount_in, _amount_out, _next_activation_time ):
|
||||
if _next_activation_time != 0:
|
||||
if _next_activation_time != DISTANT_PAST:
|
||||
# rate limit
|
||||
if self.activation_trigger is None:
|
||||
self.activation_trigger = TimeTrigger(True, self.tk, _next_activation_time, timestamp())
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
from datetime import timedelta
|
||||
import time
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
|
||||
def periodic(period: timedelta|float):
|
||||
"""
|
||||
Decorator to allow only one execution of a function or coroutine per period.
|
||||
Works for both sync and async functions.
|
||||
"""
|
||||
def decorator(func):
|
||||
last_called = {'time': 0.}
|
||||
period_seconds = period.total_seconds() if isinstance(period, timedelta) else period
|
||||
|
||||
@wraps(func)
|
||||
def sync_wrapper(*args, **kwargs):
|
||||
now = time.monotonic()
|
||||
if now - last_called['time'] >= period_seconds:
|
||||
last_called['time'] = now
|
||||
return func(*args, **kwargs)
|
||||
return None
|
||||
|
||||
@wraps(func)
|
||||
async def async_wrapper(*args, **kwargs):
|
||||
now = time.monotonic()
|
||||
if now - last_called['time'] >= period_seconds:
|
||||
last_called['time'] = now
|
||||
return await func(*args, **kwargs)
|
||||
return None
|
||||
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
return async_wrapper
|
||||
else:
|
||||
return sync_wrapper
|
||||
|
||||
return decorator
|
||||
@@ -4,18 +4,18 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from web3.exceptions import ContractLogicError, BadFunctionCallOutput
|
||||
from web3.exceptions import ContractLogicError
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import dec, ADDRESS_0, from_timestamp, db, config, NATIVE_TOKEN
|
||||
from dexorder.addrmeta import address_metadata
|
||||
from dexorder.base import OldPoolDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.orderlib import Exchange
|
||||
from dexorder.blocks import get_block_timestamp
|
||||
from dexorder.blockstate import BlockDict
|
||||
from dexorder.blockstate.blockdata import K, V
|
||||
from dexorder.database.model import Pool
|
||||
from dexorder.database.model.pool import OldPoolDict
|
||||
from dexorder.tokens import get_token, adjust_decimals as adj_dec
|
||||
from dexorder.uniswap import UniswapV3Pool, uniswapV3_pool_address
|
||||
|
||||
@@ -64,7 +64,7 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
|
||||
log.debug(f'new UniswapV3 pool {token0["symbol"]}/{token1["symbol"]} {fee/1_000_000:.2%} '
|
||||
f'{("."+str(decimals)) if decimals >= 0 else (str(-decimals)+".")} {address}')
|
||||
add_mark_pool(address, t0, t1, fee)
|
||||
except (ContractLogicError, BadFunctionCallOutput):
|
||||
except ContractLogicError:
|
||||
pass
|
||||
except ValueError as v:
|
||||
try:
|
||||
@@ -85,9 +85,8 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
|
||||
|
||||
class PoolPrices (BlockDict[str, dec]):
|
||||
def __setitem__(self, item: K, value: V) -> None:
|
||||
old = self.setitem(item, value)
|
||||
if value != old:
|
||||
new_pool_prices[item] = value
|
||||
super().__setitem__(item, value)
|
||||
new_pool_prices[item] = value
|
||||
|
||||
|
||||
def pub_pool_price(_s,k,v):
|
||||
|
||||
@@ -40,8 +40,8 @@ class BlockProgressor(metaclass=ABCMeta):
|
||||
def add_event_trigger(self,
|
||||
# callback takes either a single event if multi=False, or if multi=True then a list of all events in the processing range
|
||||
callback: Union[
|
||||
Callable[[EventData|dict], Maywaitable[None]],
|
||||
Callable[[list[EventData|dict]], Maywaitable[None]],
|
||||
Callable[[EventData], Maywaitable[None]],
|
||||
Callable[[list[EventData]], Maywaitable[None]],
|
||||
Callable[[], Maywaitable[None]],
|
||||
],
|
||||
event: ContractEvents = None,
|
||||
|
||||
@@ -316,7 +316,7 @@ class BlockStateRunner(BlockProgressor):
|
||||
else:
|
||||
lf = dict(log_filter)
|
||||
lf['blockHash'] = hexstr(block.hash)
|
||||
has_logs = 'topics' not in lf or any(bytes(hexbytes(topic)) in bloom for topic in lf['topics'])
|
||||
has_logs = any(bytes(hexbytes(topic)) in bloom for topic in lf['topics'])
|
||||
# log.debug(f'has {event.__class__.__name__}? {has_logs}')
|
||||
if not has_logs:
|
||||
get_logs = None
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
@@ -7,11 +6,11 @@ from web3.exceptions import BadFunctionCallOutput
|
||||
|
||||
from dexorder import ADDRESS_0, db, NATIVE_TOKEN, dec, current_w3
|
||||
from dexorder.addrmeta import address_metadata
|
||||
from dexorder.base import OldTokenDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blocks import current_block
|
||||
from dexorder.contract import ERC20, ContractProxy, CONTRACT_ERRORS
|
||||
from dexorder.database.model import Token
|
||||
from dexorder.database.model.token import OldTokenDict
|
||||
from dexorder.metadata import get_metadata
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -46,19 +45,18 @@ async def get_native_balance(addr, *, adjust_decimals=True) -> dec:
|
||||
return value
|
||||
|
||||
|
||||
async def get_token(address, *, squelch=False) -> Optional[OldTokenDict]:
|
||||
async def get_token(address) -> Optional[OldTokenDict]:
|
||||
if address == ADDRESS_0:
|
||||
raise ValueError('No token at address 0')
|
||||
try:
|
||||
# noinspection PyTypeChecker
|
||||
return address_metadata[address]
|
||||
except KeyError:
|
||||
# noinspection PyTypeChecker
|
||||
result = address_metadata[address] = await load_token(address, squelch=squelch)
|
||||
result = address_metadata[address] = await load_token(address)
|
||||
return result
|
||||
|
||||
|
||||
async def load_token(address: str, *, squelch=False) -> Optional[OldTokenDict]:
|
||||
async def load_token(address: str) -> Optional[OldTokenDict]:
|
||||
contract = ERC20(address)
|
||||
chain_id = current_chain.get().id
|
||||
if db:
|
||||
@@ -76,8 +74,7 @@ async def load_token(address: str, *, squelch=False) -> Optional[OldTokenDict]:
|
||||
try:
|
||||
rb: bytes = await ContractProxy(address, 'ERC20.sb').symbol()
|
||||
except CONTRACT_ERRORS:
|
||||
if not squelch:
|
||||
log.warning(f'token {address} has broken {func_name}()')
|
||||
log.warning(f'token {address} has broken {func_name}()')
|
||||
return None
|
||||
end = rb.find(b'\x00')
|
||||
if end == -1:
|
||||
@@ -85,20 +82,22 @@ async def load_token(address: str, *, squelch=False) -> Optional[OldTokenDict]:
|
||||
try:
|
||||
return rb[:end].decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
if not squelch:
|
||||
log.warning(f'token {address} has an invalid {func_name}() {rb}')
|
||||
log.warning(f'token {address} has an invalid {func_name}() {rb}')
|
||||
return None
|
||||
|
||||
dec_prom = contract.decimals()
|
||||
symbol_prom = get_string_or_bytes32('symbol')
|
||||
name_prom = get_string_or_bytes32('name')
|
||||
try:
|
||||
decimals = await dec_prom
|
||||
except CONTRACT_ERRORS:
|
||||
if not squelch:
|
||||
log.info(f'token {address} has no decimals()')
|
||||
log.info(f'token {address} has no decimals()')
|
||||
decimals = 0
|
||||
return None # we do not support coins that don't specify decimals.
|
||||
approved = False # never approve new coins
|
||||
chain_id = current_chain.get().id
|
||||
name, symbol = await asyncio.gather(get_string_or_bytes32('name'), get_string_or_bytes32('symbol'))
|
||||
symbol = await symbol_prom
|
||||
name = await name_prom
|
||||
td = OldTokenDict(type='Token', chain=chain_id, address=address,
|
||||
name=name, symbol=symbol, decimals=decimals, approved=approved)
|
||||
md = get_metadata(address, chain_id=chain_id)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import abstractmethod, ABC
|
||||
from abc import abstractmethod
|
||||
from typing import Optional
|
||||
from uuid import uuid4
|
||||
|
||||
@@ -18,7 +18,7 @@ from dexorder.util import hexstr
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TransactionHandler (ABC):
|
||||
class TransactionHandler:
|
||||
instances: dict[str,'TransactionHandler'] = {}
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -37,7 +37,7 @@ def dumps(obj):
|
||||
return dumpb(obj).decode('utf8')
|
||||
|
||||
def dumpb(obj):
|
||||
opts = orjson.OPT_PASSTHROUGH_SUBCLASS | orjson.OPT_SERIALIZE_DATACLASS
|
||||
opts = orjson.OPT_PASSTHROUGH_SUBCLASS
|
||||
return orjson.dumps(obj, default=_serialize, option=opts)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user