Compare commits

...

39 Commits

Author SHA1 Message Date
tim
8127a6c900 basic gmx impl running 2025-08-19 17:43:10 -04:00
tim
046903aab4 remove ohlc debug logs 2025-06-17 12:23:24 -04:00
tim
a4a2f6e318 gmx metadata and backfill in finaldata 2025-06-17 00:37:52 -04:00
tim
97f98ba7cf gmx metadata and backfill in finaldata 2025-06-16 20:06:12 -04:00
tim
eef803d3d6 gmx metadata and backfill in finaldata 2025-06-16 20:04:28 -04:00
tim
88057607d5 put app back on app.dexorder.com and corp site on dexorder.com with www redirecting to apex 2025-05-19 15:19:20 -04:00
tim
36d0a863c6 remove spammy debug logs 2025-05-07 16:02:37 -04:00
tim
89ce46793e dotcom 2025-05-06 13:56:05 -04:00
tim
2bcf5d043c redis pipeline overflow fix 2025-04-23 15:20:00 -04:00
tim
71942d5b8f memcache init doesn't use transaction 2025-04-23 14:13:58 -04:00
tim
ef44973646 sharedata 2025-04-23 12:51:14 -04:00
tim
ce55609297 examine open orders 2025-04-07 01:32:19 -04:00
tim
a27300b5e4 info log for websocket connection drops 2025-04-03 18:15:16 -04:00
tim
f3faaa3dd6 tranchestatus tostring touchup 2025-04-01 14:20:58 -04:00
tim
0bb670b356 redis initial state push fix 2025-04-01 13:52:49 -04:00
tim
52b406ba17 ohlc retained length fix 2025-04-01 13:52:39 -04:00
tim
3d0342d19d price line metrics fix 2025-04-01 13:52:29 -04:00
tim
dbf960bae9 initial TrancheState fix 2025-04-01 13:52:21 -04:00
tim
d49f142fe3 redis pipeline autoflush after 10000 entries 2025-04-01 10:54:25 -04:00
tim
34fa439b3c USD marks 2025-03-29 15:27:13 -04:00
tim
41a1e2d9fe MIN_SLIPPAGE epsilon leeway 2025-03-28 20:05:52 -04:00
tim
66229e67bb bugfix for 0 slippage market orders 2025-03-26 23:48:43 -04:00
tim
31b6ddd314 initial redis state load doesn't use pipeline now, because it overflowed. 2025-03-26 23:25:10 -04:00
tim
07c6423fd5 USDC/USDC.e naming update 2025-03-26 17:17:54 -04:00
tim
4740687167 account release bugfix 2025-03-19 21:05:19 -04:00
tim
a06eeeb10d bugfix 2025-03-19 17:31:34 -04:00
tim
4492d23c47 better "addrmeta is None" fix 2025-03-16 21:17:19 -04:00
tim
1c0c2f0e63 "address_meta None" fix 2025-03-15 06:26:01 -04:00
tim
f3bdfdf97b trigger fixes 2025-03-10 21:09:40 -04:00
tim
be8c8bf019 order pprint touchup 2025-03-10 14:31:55 -04:00
tim
ecf1d21d5f bin/examine.py; readonly state; debug logs for Underfunded 2025-03-10 14:18:40 -04:00
tim
b7ed91d1c0 start of kraken accounting (unfinished) 2025-03-07 19:00:42 -04:00
tim
646449e456 underfunded state 2025-03-03 21:43:17 -04:00
tim
1bcf73de22 execute refactor for extraconf; accounting fixes 2025-02-28 01:04:12 -04:00
tim
af0f35eba5 execute refactor for extraconf; accounting fixes 2025-02-28 01:02:36 -04:00
tim
e868ea5a4b composable cli config 2025-02-27 17:51:07 -04:00
tim
c132f40164 transfer accounting fix 2025-02-27 14:23:07 -04:00
tim
eccf81c3c8 bugfixes; pagerduty client lib change; requirements bump 2025-02-26 16:58:57 -04:00
tim
61ab34a9f7 arb1 accounting config 2025-02-26 14:01:00 -04:00
77 changed files with 11816 additions and 409 deletions

View File

@@ -28,7 +28,7 @@ def upgrade() -> None:
sa.Column('time', sa.DateTime(), nullable=False), sa.Column('time', sa.DateTime(), nullable=False),
sa.Column('account', sa.String(), nullable=False), sa.Column('account', sa.String(), nullable=False),
sa.Column('category', sa.Enum('Transfer', 'Income', 'Expense', 'Trade', 'Special', name='accountingcategory'), nullable=False), sa.Column('category', sa.Enum('Transfer', 'Income', 'Expense', 'Trade', 'Special', name='accountingcategory'), nullable=False),
sa.Column('subcategory', sa.Enum('OrderFee', 'GasFee', 'FillFee', 'VaultCreation', 'Execution', 'FeeAdjustment', 'InitialBalance', name='accountingsubcategory'), nullable=True), sa.Column('subcategory', sa.Enum('OrderFee', 'GasFee', 'FillFee', 'Admin', 'TransactionGas', 'VaultCreation', 'Execution', 'FeeAdjustment', 'InitialBalance', name='accountingsubcategory'), nullable=True),
sa.Column('token', sa.String(), nullable=False), sa.Column('token', sa.String(), nullable=False),
sa.Column('amount', dexorder.database.column_types.DecimalNumeric(), nullable=False), sa.Column('amount', dexorder.database.column_types.DecimalNumeric(), nullable=False),
sa.Column('value', dexorder.database.column_types.DecimalNumeric(), nullable=True), sa.Column('value', dexorder.database.column_types.DecimalNumeric(), nullable=True),

View File

@@ -0,0 +1,30 @@
"""sharedata
Revision ID: e47d1bca4b3d
Revises: 509010f13e8b
Create Date: 2025-04-23 11:23:10.809341
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'e47d1bca4b3d'
down_revision: Union[str, None] = '509010f13e8b'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table('sharedata',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id')
)
def downgrade() -> None:
op.drop_table('sharedata')

15
bin/examine Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
kubectl port-forward postgres-0 5431:5432 &
PF_PID=$!
shutdown () {
kill $PF_PID
wait
}
trap shutdown INT TERM
PYTHONPATH=src python -m dexorder.bin.examine rpc_url=arbitrum_dxod db_url=postgres://dexorder@localhost:5431/dexorder "$@"
shutdown

View File

@@ -17,4 +17,4 @@ quotecoins = [
'0x82aF49447D8a07e3bd95BD0d56f35241523fBab1', # WETH '0x82aF49447D8a07e3bd95BD0d56f35241523fBab1', # WETH
] ]
nativecoin = ['0x82aF49447D8a07e3bd95BD0d56f35241523fBab1'] # WETH nativecoin = '0x82aF49447D8a07e3bd95BD0d56f35241523fBab1' # WETH

File diff suppressed because one or more lines are too long

View File

@@ -1,21 +1,24 @@
aiohappyeyeballs==2.4.3 aiohappyeyeballs==2.4.3
aiohttp==3.11.12 aiohttp==3.11.13
aiosignal==1.3.1 aiosignal==1.3.1
alembic==1.14.1 alembic==1.15.1
annotated-types==0.7.0 annotated-types==0.7.0
antlr4-python3-runtime==4.9.3 antlr4-python3-runtime==4.9.3
asn1crypto==1.5.1 asn1crypto==1.5.1
async-lru==2.0.4 async-lru==2.0.4
attrs==23.2.0 attrs==23.2.0
bip-utils==2.9.3 bip-utils==2.9.3
bitarray==3.0.0 bitarray==3.1.1
cachetools==5.5.1 cachetools==5.5.2
cattrs==24.1.2
cbor2==5.6.4 cbor2==5.6.4
certifi==2024.2.2 certifi==2024.2.2
cffi==1.16.0 cffi==1.16.0
charset-normalizer==3.4.1 charset-normalizer==3.4.1
ckzg==1.0.2 ckzg==1.0.2
click==8.1.8
coincurve==20.0.0 coincurve==20.0.0
coremltools==8.2
crcmod==1.7 crcmod==1.7
cytoolz==0.12.3 cytoolz==0.12.3
defaultlist==1.0.0 defaultlist==1.0.0
@@ -31,39 +34,73 @@ eth-rlp==1.0.1
eth-typing==4.4.0 eth-typing==4.4.0
eth-utils==4.1.1 eth-utils==4.1.1
eth_abi==5.2.0 eth_abi==5.2.0
filelock==3.17.0
frozenlist==1.4.1 frozenlist==1.4.1
fsspec==2025.2.0
google-auth==2.35.0 google-auth==2.35.0
greenlet==3.0.3 greenlet==3.0.3
hexbytes==0.3.1 hexbytes==0.3.1
hiredis==3.0.0 hiredis==3.0.0
idna==3.7 idna==3.7
imageio==2.37.0
importlib_resources==6.5.2
Jinja2==3.1.6
joblib==1.4.2
jsonschema==4.21.1 jsonschema==4.21.1
jsonschema-specifications==2023.12.1 jsonschema-specifications==2023.12.1
kraken==5.3.0
kubernetes==31.0.0 kubernetes==31.0.0
lazy_loader==0.4
lightning==2.4.0
lightning-utilities==0.14.0
lru-dict==1.2.0 lru-dict==1.2.0
lxml==5.3.1
Mako==1.3.3 Mako==1.3.3
markdown-it-py==3.0.0
MarkupSafe==2.1.5 MarkupSafe==2.1.5
mdurl==0.1.2
mpmath==1.3.0
msgpack-python==0.5.6 msgpack-python==0.5.6
multidict==6.0.5 multidict==6.0.5
numpy==2.2.2 networkx==3.4.2
numpy==2.0.2
nvidia-cublas-cu12==12.1.3.1
nvidia-cuda-cupti-cu12==12.1.105
nvidia-cuda-nvrtc-cu12==12.1.105
nvidia-cuda-runtime-cu12==12.1.105
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.0.2.54
nvidia-curand-cu12==10.3.2.106
nvidia-cusolver-cu12==11.4.5.107
nvidia-cusparse-cu12==12.1.0.106
nvidia-nccl-cu12==2.20.5
nvidia-nvjitlink-cu12==12.8.93
nvidia-nvtx-cu12==12.1.105
oauthlib==3.2.2 oauthlib==3.2.2
omegaconf==2.3.0 omegaconf==2.3.0
orjson==3.10.15 orjson==3.10.15
packaging==24.2
pagerduty==1.0.0
parsimonious==0.10.0 parsimonious==0.10.0
pdpyras==5.4.0 pillow==11.1.0
prometheus_client==0.21.1 prometheus_client==0.21.1
propcache==0.2.0 propcache==0.2.0
protobuf==5.26.1 protobuf==5.26.1
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
py-sr25519-bindings==0.2.0 py-sr25519-bindings==0.2.0
pyaml==25.1.0
pyarrow==19.0.1
pyasn1==0.6.1 pyasn1==0.6.1
pyasn1_modules==0.4.1 pyasn1_modules==0.4.1
pycparser==2.22 pycparser==2.22
pycryptodome==3.20.0 pycryptodome==3.20.0
pydantic==2.9.2 pydantic==2.9.2
pydantic_core==2.23.4 pydantic_core==2.23.4
Pygments==2.19.1
PyNaCl==1.5.0 PyNaCl==1.5.0
python-bidi==0.6.6
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
pytorch-lightning==2.5.0.post0
pytz==2025.1 pytz==2025.1
pyunormalize==15.1.0 pyunormalize==15.1.0
PyYAML==6.0.1 PyYAML==6.0.1
@@ -72,18 +109,32 @@ referencing==0.35.0
regex==2024.4.28 regex==2024.4.28
requests==2.32.3 requests==2.32.3
requests-oauthlib==2.0.0 requests-oauthlib==2.0.0
rich==13.9.4
rlp==4.0.1 rlp==4.0.1
rpds-py==0.18.0 rpds-py==0.18.0
rsa==4.9 rsa==4.9
scikit-image==0.24.0
scikit-learn==1.5.2
scipy==1.13.1
setuptools==75.8.2
shapely==2.0.7
six==1.16.0 six==1.16.0
socket.io-emitter==0.1.5.1 socket.io-emitter==0.1.5.1
sortedcontainers==2.4.0 sortedcontainers==2.4.0
SQLAlchemy==2.0.38 SQLAlchemy==2.0.38
sympy==1.13.3
threadpoolctl==3.5.0
tifffile==2025.2.18
toolz==0.12.1 toolz==0.12.1
torch==2.4.1
torchmetrics==1.6.2
torchvision==0.19.1
tqdm==4.67.1
triton==3.0.0
types-requests==2.32.0.20240914 types-requests==2.32.0.20240914
typing_extensions==4.12.2 typing_extensions==4.12.2
urllib3==2.2.1 urllib3==2.2.1
web3==6.20.3 web3==6.20.4
websocket-client==1.8.0 websocket-client==1.8.0
websockets==14.2 websockets==13.1
yarl==1.17.2 yarl==1.17.2

View File

@@ -21,7 +21,7 @@ eth-keys
eth-account eth-account
eth-utils eth-utils
eth-typing eth-typing
pdpyras # pagerduty pagerduty
numpy numpy
bitarray bitarray
typing_extensions typing_extensions
@@ -30,3 +30,4 @@ aiohttp
charset-normalizer charset-normalizer
pytz pytz
prometheus_client prometheus_client
krakenex

File diff suppressed because one or more lines are too long

View File

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -3,7 +3,7 @@ import logging
from contextvars import ContextVar from contextvars import ContextVar
from datetime import datetime, timezone from datetime import datetime, timezone
from decimal import Decimal from decimal import Decimal
from typing import Callable, Any, Union, Optional from typing import Callable, Any
from web3 import AsyncWeb3 from web3 import AsyncWeb3
@@ -35,14 +35,16 @@ class _Token:
def __repr__(self): return self.__token_name def __repr__(self): return self.__token_name
def __str__(self): return self.__token_name def __str__(self): return self.__token_name
class _FalseToken (_Token): class _FalseyToken (_Token):
def __bool__(self): return False def __bool__(self): return False
NARG = _FalseToken('NARG') NARG = _FalseyToken('NARG')
DELETE = _FalseToken('DELETE') # used as a value token to indicate removal of the key DELETE = _FalseyToken('DELETE') # used as a value token to indicate removal of the key
ADDRESS_0 = '0x0000000000000000000000000000000000000000' ADDRESS_0 = '0x0000000000000000000000000000000000000000'
NATIVE_TOKEN = '0x0000000000000000000000000000000000000001' # We use 0x01 to indicate the use of native ETH wherever a token address is normally required NATIVE_TOKEN = '0x0000000000000000000000000000000000000001' # We use 0x01 to indicate the use of native ETH wherever a token address is normally required
USD_FIAT = '0x0000000000000000000000000000000000000055' # We use 0x55 (ASCII 'U') to indicate the use of fiat USD
CHAIN_ID_OFFCHAIN = -1
WEI = 1 WEI = 1
GWEI = 1_000_000_000 GWEI = 1_000_000_000
ETH = 1_000_000_000_000_000_000 ETH = 1_000_000_000_000_000_000
@@ -57,7 +59,7 @@ _cwd() # do this first so that config has the right current working directory
# ordering here is important! # ordering here is important!
from .base.chain import Blockchain # the singletons are loaded into the dexorder.blockchain.* namespace from .base.chain import Blockchain # the singletons are loaded into the dexorder.blockchain.* namespace
from .util import async_yield from .util import async_yield, json
from .base.fixed import Fixed2, FixedDecimals, Dec18 from .base.fixed import Fixed2, FixedDecimals, Dec18
from .configuration import config from .configuration import config
from .base.account import Account from .base.account import Account

View File

@@ -0,0 +1 @@
from .accounting import *

View File

@@ -2,12 +2,12 @@ import asyncio
import logging import logging
from typing import Union from typing import Union
from sqlalchemy import select, func, text from sqlalchemy import select, func
from typing_extensions import Optional from typing_extensions import Optional
from web3.exceptions import ContractLogicError from web3.exceptions import ContractLogicError
from web3.types import EventData from web3.types import EventData
from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account, metric from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account
from dexorder.base import TransactionReceiptDict from dexorder.base import TransactionReceiptDict
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.blocks import get_block_timestamp, get_block, current_block from dexorder.blocks import get_block_timestamp, get_block, current_block
@@ -34,19 +34,19 @@ class ReconciliationException(Exception):
pass pass
def accounting_lock(): def initialize_accounting():
""" global accounting_initialized
This must be called before accounting_*() calls are made. if not accounting_initialized:
""" load_accounts_cache()
db.session.execute(text("LOCK TABLE account, accounting, reconciliation IN EXCLUSIVE MODE")) accounting_initialized = True
async def initialize_accounting(): async def initialize_accounting_runner():
global accounting_initialized global accounting_initialized
if not accounting_initialized: if not accounting_initialized:
accounting_lock()
await _initialize_mark_to_market() # set up mark-to-market first, so accounts can value their initial balances await _initialize_mark_to_market() # set up mark-to-market first, so accounts can value their initial balances
await _initialize_accounts() await _initialize_accounts()
load_accounts_cache()
accounting_initialized = True accounting_initialized = True
log.info(f'accounting initialized\n\tstablecoins: {config.stablecoins}\n\tquotecoins: {config.quotecoins}\n\tnativecoin: {config.nativecoin}') log.info(f'accounting initialized\n\tstablecoins: {config.stablecoins}\n\tquotecoins: {config.quotecoins}\n\tnativecoin: {config.nativecoin}')
@@ -64,17 +64,23 @@ async def _initialize_accounts():
async def _initialize_accounts_2(): async def _initialize_accounts_2():
fm = await FeeManager.get() fm = await FeeManager.get()
of_account = _ensure_account(fm.order_fee_account_addr, AccountKind.OrderFee) of_account = ensure_account(fm.order_fee_account_addr, AccountKind.OrderFee)
gf_account = _ensure_account(fm.gas_fee_account_addr, AccountKind.GasFee) gf_account = ensure_account(fm.gas_fee_account_addr, AccountKind.GasFee)
ff_account = _ensure_account(fm.fill_fee_account_addr, AccountKind.FillFee) ff_account = ensure_account(fm.fill_fee_account_addr, AccountKind.FillFee)
exe_accounts = [_ensure_account(account.address, AccountKind.Execution) for account in Account.all()] exe_accounts = [ensure_account(account.address, AccountKind.Execution) for account in Account.all()]
if current_chain.get().id in [1337, 31337]: if current_chain.get().id in [1337, 31337]:
log.debug('adjusting debug account balances') log.debug('adjusting debug account balances')
await asyncio.gather( await asyncio.gather(
*map(adjust_balance, (of_account, gf_account, ff_account, *exe_accounts)) *map(adjust_balance, (of_account, gf_account, ff_account, *exe_accounts))
) )
for db_account in db.session.execute(select(DbAccount)).scalars():
def load_accounts_cache(*, chain=None):
if chain is None:
chain = current_chain.get()
for db_account in db.session.execute(select(DbAccount).where(DbAccount.chain==chain)).scalars():
_tracked_addrs.add(db_account.address) _tracked_addrs.add(db_account.address)
log.info(f'tracking account {db_account.chain.id} {db_account.address}')
async def _initialize_mark_to_market(): async def _initialize_mark_to_market():
@@ -124,13 +130,14 @@ async def _initialize_mark_to_market():
add_mark_pool(addr, pool['base'], pool['quote'], pool['fee']) add_mark_pool(addr, pool['base'], pool['quote'], pool['fee'])
def _ensure_account(addr: str, kind: AccountKind) -> DbAccount: def ensure_account(addr: str, kind: AccountKind, *, chain=None) -> DbAccount:
chain = current_chain.get() if chain is None:
chain = current_chain.get()
found = db.session.get(DbAccount, (chain, addr)) found = db.session.get(DbAccount, (chain, addr))
if found: if found:
if found.kind != kind: if found.kind != kind:
log.warning(f'Account {addr} has wrong kind {found.kind} != {kind}') log.warning(f'Account {addr} has wrong kind {found.kind} != {kind}')
found.kind = kind # found.kind = kind
db.session.add(found) db.session.add(found)
_tracked_addrs.add(found.address) _tracked_addrs.add(found.address)
else: else:
@@ -160,20 +167,21 @@ async def accounting_transfer(receipt: TransactionReceiptDict, token: str,
block_hash = hexstr(receipt['blockHash']) block_hash = hexstr(receipt['blockHash'])
tx_id = hexstr(receipt['transactionHash']) tx_id = hexstr(receipt['transactionHash'])
await asyncio.gather( await asyncio.gather(
add_accounting_row( sender, block_hash, tx_id, AccountingCategory.Transfer, None, accounting_transaction_gas(receipt),
token, -amount, receiver, adjust_decimals=adjust_decimals), add_accounting_entry_m2m(sender, block_hash, tx_id, AccountingCategory.Transfer, None,
add_accounting_row( receiver, block_hash, tx_id, AccountingCategory.Transfer, None, token, -amount, receiver, adjust_decimals=adjust_decimals),
token, amount, sender, adjust_decimals=adjust_decimals), add_accounting_entry_m2m(receiver, block_hash, tx_id, AccountingCategory.Transfer, None,
token, amount, sender, adjust_decimals=adjust_decimals),
) )
async def accounting_transaction_gas(receipt: TransactionReceiptDict, subcategory: AccountingSubcategory): async def accounting_transaction_gas(receipt: TransactionReceiptDict, subcategory: AccountingSubcategory = AccountingSubcategory.TransactionGas):
""" Accounts for the gas spent on the given transaction """ """ Accounts for the gas spent on the given transaction """
amount = dec(receipt['gasUsed']) * dec(receipt['effectiveGasPrice']) amount = dec(receipt['gasUsed']) * dec(receipt['effectiveGasPrice'])
await add_accounting_row( receipt['from'], await add_accounting_entry_m2m(receipt['from'],
hexstr(receipt['blockHash']), hexstr(receipt['transactionHash']), hexstr(receipt['blockHash']), hexstr(receipt['transactionHash']),
AccountingCategory.Expense, subcategory, NATIVE_TOKEN, -amount AccountingCategory.Expense, subcategory, NATIVE_TOKEN, -amount
) )
async def accounting_placement(order_placed: EventData): async def accounting_placement(order_placed: EventData):
@@ -186,10 +194,10 @@ async def accounting_placement(order_placed: EventData):
log.warning(f'Rogue DexorderPlacedEvent in tx {hexstr(tx_id)}') log.warning(f'Rogue DexorderPlacedEvent in tx {hexstr(tx_id)}')
return return
fm = await FeeManager.get() fm = await FeeManager.get()
await add_accounting_row( fm.order_fee_account_addr, block_hash, tx_id, AccountingCategory.Income, await add_accounting_entry_m2m(fm.order_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
AccountingSubcategory.OrderFee, NATIVE_TOKEN, order_fee) AccountingSubcategory.OrderFee, NATIVE_TOKEN, order_fee)
await add_accounting_row( fm.gas_fee_account_addr, block_hash, tx_id, AccountingCategory.Income, await add_accounting_entry_m2m(fm.gas_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
AccountingSubcategory.GasFee, NATIVE_TOKEN, gas_fee) AccountingSubcategory.GasFee, NATIVE_TOKEN, gas_fee)
async def accounting_fill(fill: EventData, out_token: str) -> dec: async def accounting_fill(fill: EventData, out_token: str) -> dec:
@@ -200,14 +208,14 @@ async def accounting_fill(fill: EventData, out_token: str) -> dec:
tx_id = hexstr(fill['transactionHash']) tx_id = hexstr(fill['transactionHash'])
fee = int(fill['args']['fillFee']) fee = int(fill['args']['fillFee'])
fm = await FeeManager.get() fm = await FeeManager.get()
return await add_accounting_row(fm.fill_fee_account_addr, block_hash, tx_id, AccountingCategory.Income, return await add_accounting_entry_m2m(fm.fill_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
AccountingSubcategory.FillFee, out_token, fee) AccountingSubcategory.FillFee, out_token, fee)
async def add_accounting_row(account: str, block_hash: Optional[str], tx_id: Optional[str], category, subcategory, token, amount, note=None, async def add_accounting_entry_m2m(account: str, block_hash: Optional[str], tx_id: Optional[str], category, subcategory, token, amount, note=None,
*, adjust_decimals=True) -> dec: *, adjust_decimals=True) -> dec:
""" """
Returns the mark-to-market USD value of the transaction. Returns the mark-to-market USD value of the entry.
""" """
if amount == 0: if amount == 0:
return dec(0) return dec(0)
@@ -221,6 +229,13 @@ async def add_accounting_row(account: str, block_hash: Optional[str], tx_id: Opt
value = mark_to_market(token, amount) value = mark_to_market(token, amount)
log.debug(f'accounting row {time} {account} {category} {subcategory} {token} {amount} ${value}') log.debug(f'accounting row {time} {account} {category} {subcategory} {token} {amount} ${value}')
chain_id = current_chain.get().id chain_id = current_chain.get().id
add_accounting_entry(chain_id, account, time, category, subcategory, token, amount, value, tx_id, note)
return value
def add_accounting_entry(chain_id, account, time, category, subcategory, token, amount, value=None, tx_id=None, note=None):
if not is_tracked_address(account):
return
db.session.add(Accounting(account=account, db.session.add(Accounting(account=account,
time=time, category=category, subcategory=subcategory, time=time, category=category, subcategory=subcategory,
token=token, amount=amount, value=value, note=note, token=token, amount=amount, value=value, note=note,
@@ -229,15 +244,17 @@ async def add_accounting_row(account: str, block_hash: Optional[str], tx_id: Opt
account_db = db.session.get(DbAccount, (current_chain.get(), account)) account_db = db.session.get(DbAccount, (current_chain.get(), account))
new_amount = account_db.balances.get(token, dec(0)) + amount new_amount = account_db.balances.get(token, dec(0)) + amount
if new_amount < 0: if new_amount < 0:
log.error(f'negative balance for account {account} when applying accounting row {time} {category} {subcategory} {token} {amount} ${value}') log.error(
f'negative balance for account {account} when applying accounting row {time} {category} {subcategory} {token} {amount} ${value}')
account_db.balances[token] = new_amount account_db.balances[token] = new_amount
db.session.add(account_db) # deep changes would not be detected by the ORM db.session.add(account_db) # deep changes would not be detected by the ORM
return value db.session.flush()
async def adjust_balance(account: DbAccount, token=NATIVE_TOKEN, subcategory=AccountingSubcategory.InitialBalance, note=None): async def adjust_balance(account: DbAccount, token=NATIVE_TOKEN, subcategory=AccountingSubcategory.InitialBalance, note=None):
true_balance = await get_balance(account.address, token) true_balance = await get_balance(account.address, token)
amount = true_balance - account.balances.get(token, dec(0)) amount = true_balance - account.balances.get(token, dec(0))
await add_accounting_row(account.address, None, None, AccountingCategory.Special, subcategory, NATIVE_TOKEN, amount, note, adjust_decimals=False) await add_accounting_entry_m2m(account.address, None, None, AccountingCategory.Special, subcategory, NATIVE_TOKEN, amount, note, adjust_decimals=False)
async def accounting_reconcile(account: DbAccount, block_id: Optional[str] = None, last_accounting_row_id: Optional[int] = None): async def accounting_reconcile(account: DbAccount, block_id: Optional[str] = None, last_accounting_row_id: Optional[int] = None):

View File

@@ -0,0 +1,65 @@
import logging
import tempfile
from dataclasses import dataclass
from typing import Optional
import krakenex
from dexorder import timestamp
from dexorder.bin.executable import execute
log = logging.getLogger(__name__)
kraken_api_key=r'HqPHnGsAHunFtaP8YZTFsyh+LauVrcgFHi/US+RseR/4DiT+NG/JpONV'
kraken_api_secret=r'4hvdMdaN5TlNlyk2PShdRCsOE/T4sFzeBrR7ZjC+LUGuAXhBehY8vvWDZSUSyna2OFeOJ9GntPvyXOhrpx70Bg=='
kraken = krakenex.API()
# start and end should be timestamps or datetimes. inclusiveness is [start,end) as usual
def kraken_get_ledger(start=None, end=None):
entries = []
offset=1 # 1-based ffs
if start:
start = timestamp(start) - 1 # kraken start is EXCLUSIVE for some reason
if end:
end = timestamp(end) - 1 # kraken end is INCLUSIVE. :/
while True:
kl = kraken.query_private('Ledgers', {'start':start, 'end':end, 'ofs':offset})
print(repr(kl))
break
if kl.empty:
break
for t in kl.itertuples():
print(t)
# noinspection PyShadowingBuiltins
offset += len(kl)
return entries
@dataclass
class KrakenConfig:
kraken_api_key: Optional[str] = None
kraken_api_secret: Optional[str] = None
kraken_start: Optional[str]= None # timestamp or date
kraken_end: Optional[str] = None # timestamp or date
async def main(kconfig: KrakenConfig):
load_kraken_key(kconfig)
kraken_get_ledger()
def load_kraken_key(kconfig):
temp = tempfile.NamedTemporaryFile()
if not kconfig.kraken_api_key or not kconfig.kraken_api_secret:
log.error("Must set kraken_api_key= and kraken_api_secret= on the command line")
exit(1)
temp.write(kconfig.kraken_api_key.encode())
temp.write(b'\n')
temp.write(kconfig.kraken_api_secret.encode())
temp.write(b'\n')
kraken.load_key(temp.name)
if __name__ == '__main__':
execute(main, parse_args=KrakenConfig)

View File

@@ -2,16 +2,15 @@ import logging
from typing import TypedDict from typing import TypedDict
from dexorder import db from dexorder import db
from dexorder.base import OldPoolDict, OldGMXDict, OldTokenDict
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.blockstate import BlockDict from dexorder.blockstate import BlockDict
from dexorder.database.model import Pool from dexorder.database.model import Pool, Token
from dexorder.database.model.pool import OldPoolDict
from dexorder.database.model.token import Token, OldTokenDict
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
# address_metadata is a polymorphic BlockDict which maps address keys to a dict of metadata describing the address # address_metadata is a polymorphic BlockDict which maps address keys to a dict of metadata describing the address
# used for Tokens and Pools # used for Tokens and Pools and GMX Markets
class AddressMetadata (TypedDict): class AddressMetadata (TypedDict):
@@ -19,7 +18,9 @@ class AddressMetadata (TypedDict):
def save_addrmeta(address: str, meta: AddressMetadata): def save_addrmeta(address: str, meta: AddressMetadata):
if meta['type'] == 'Token': if meta is None:
pass
elif meta['type'] == 'Token':
meta: OldTokenDict meta: OldTokenDict
updated = Token.load(meta) updated = Token.load(meta)
token = db.session.get(Token, (current_chain.get().id, address)) token = db.session.get(Token, (current_chain.get().id, address))
@@ -43,8 +44,10 @@ def save_addrmeta(address: str, meta: AddressMetadata):
pool.quote = updated.quote pool.quote = updated.quote
pool.fee = updated.fee pool.fee = updated.fee
pool.decimals = updated.decimals pool.decimals = updated.decimals
elif meta['type'] == 'GMX':
pass
else: else:
log.warning(f'Address {address} had unknown metadata type {meta["type"]}') log.warning(f'Address {address} had unknown metadata type {meta["type"]}')
address_metadata: BlockDict[str,AddressMetadata] = BlockDict('a', redis=True, db=True, finalize_cb=save_addrmeta) address_metadata: BlockDict[str,OldPoolDict|OldTokenDict|OldGMXDict] = BlockDict('a', redis=True, db=True, finalize_cb=save_addrmeta)

View File

@@ -1,30 +1,33 @@
import logging import logging
import socket import socket
import pdpyras import pagerduty
from dexorder import NARG, config from dexorder import NARG, config
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def alert(title, message, dedup_key=NARG, log_level=logging.ERROR, do_log=True): def alert(title, message, dedup_key=NARG, log_level=logging.ERROR, do_log=True, severity='critical'):
if dedup_key is NARG: if dedup_key is NARG:
dedup_key = str(hash(title)) dedup_key = str(hash(title))
if do_log: if do_log:
msg = f'{title}: {message}' msg = f'{title}: {message}'
log.log(log_level, msg) # if log_level=CRITICAL for example, make sure this does not re-alert! log.log(log_level, msg) # if log_level=CRITICAL for example, make sure this does not re-alert!
alert_pagerduty(title, message, dedup_key, log_level) alert_pagerduty(title, message, dedup_key, severity)
def warningAlert(title, message, dedup_key=NARG, log_level=logging.WARNING): def warningAlert(title, message, dedup_key=NARG, log_level=logging.WARNING):
return alert(title, message, dedup_key, log_level) return alert(title, message, dedup_key, log_level, severity='warning')
def infoAlert(title, message, dedup_key=NARG, log_level=logging.INFO):
return alert(title, message, dedup_key, log_level, severity='info')
pagerduty_session = None pagerduty_session = None
hostname = None hostname = None
def alert_pagerduty(title, message, dedup_key, log_level): def alert_pagerduty(title, message, dedup_key, severity):
if not config.pagerduty: if not config.pagerduty:
return return
# noinspection PyBroadException # noinspection PyBroadException
@@ -32,10 +35,9 @@ def alert_pagerduty(title, message, dedup_key, log_level):
global pagerduty_session global pagerduty_session
global hostname global hostname
if pagerduty_session is None: if pagerduty_session is None:
pagerduty_session = pdpyras.EventsAPISession(config.pagerduty) pagerduty_session = pagerduty.EventsApiV2Client(config.pagerduty)
hostname = socket.gethostname() hostname = socket.gethostname()
sev = 'critical' if log_level >= logging.ERROR else 'info' pagerduty_session.trigger(title, hostname, severity=severity, custom_details={'message': message}, dedup_key=dedup_key, payload=dict(severity=severity))
pagerduty_session.trigger(title, hostname, severity=sev, custom_details={'message': message}, dedup_key=dedup_key)
except Exception: except Exception:
log.warning('Could not notify PagerDuty!', exc_info=True) log.warning('Could not notify PagerDuty!', exc_info=True)

View File

@@ -1,6 +1,6 @@
from abc import abstractmethod
from dataclasses import dataclass from dataclasses import dataclass
from typing import TypedDict, Union, Type, Any, Callable from typing import TypedDict, Union, Any, Callable
from dexorder.base.metadecl import OldTokenDict, OldPoolDict, OldGMXDict
Address = str Address = str
Quantity = Union[str,int] Quantity = Union[str,int]

View File

@@ -0,0 +1,65 @@
import logging
from typing import TypedDict, NotRequired
log = logging.getLogger(__name__)
class TokenDict (TypedDict):
"""
Token metadata dictionary
Fields:
a: The address of the token.
n: The name of the token.
s: The symbol of the token.
d: Number of decimals.
l: Indicates if approved ("listed").
g: gmx synthetic flag
x: Optional extra data.
"""
a: str
n: str
s: str
d: int
l: NotRequired[bool]
g: NotRequired[bool]
x: NotRequired[dict]
# OldTokenDict is the primary dict we use in-memory, with basic JSON-able types
class OldTokenDict (TypedDict):
type: str
chain: int
address: str
name: str
symbol: str
decimals: int
approved: bool # whether this token is in the whitelist or not
x: NotRequired[dict] # extra data
class OldPoolDict (TypedDict):
type: str
chain: int
address: str
exchange: int
base: str
quote: str
fee: int
decimals: int
class OldGMXDict (TypedDict):
type: str
chain: int
address: str
exchange: int
index: str
long: str
short: str
leverage: int
decimals: int

View File

@@ -4,7 +4,7 @@ from dataclasses import dataclass
from enum import Enum from enum import Enum
from typing import Optional from typing import Optional
from dexorder import timestamp from dexorder import timestamp, from_timestamp
from dexorder.util import hexbytes from dexorder.util import hexbytes
from dexorder.util.convert import decode_IEEE754 from dexorder.util.convert import decode_IEEE754
@@ -37,9 +37,10 @@ class SwapOrderState (Enum):
class Exchange (Enum): class Exchange (Enum):
Unknown = -1 Unknown = -1
UniswapV2 = 0 OTC = 0
UniswapV3 = 1 UniswapV3 = 1
GMX = 2
@dataclass @dataclass
class Route: class Route:
@@ -75,6 +76,20 @@ class Line:
return self.intercept, self.slope return self.intercept, self.slope
@dataclass
class GMXOrder:
reserve_amount: int # todo
is_long: bool
is_increase: bool
@staticmethod
def load(obj: Optional[tuple[int,bool,bool]]):
return GMXOrder(*obj) if obj is not None else None
def dump(self):
return self.reserve_amount, self.is_long, self.is_increase
@dataclass @dataclass
class SwapOrder: class SwapOrder:
tokenIn: str tokenIn: str
@@ -87,6 +102,7 @@ class SwapOrder:
inverted: bool inverted: bool
conditionalOrder: int conditionalOrder: int
tranches: list['Tranche'] tranches: list['Tranche']
gmx: Optional[GMXOrder] = None
@property @property
def min_input_amount(self): def min_input_amount(self):
@@ -95,7 +111,7 @@ class SwapOrder:
@staticmethod @staticmethod
def load(obj): def load(obj):
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7], obj[8], return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7], obj[8],
[Tranche.load(t) for t in obj[9]]) [Tranche.load(t) for t in obj[9]], GMXOrder.load(obj[10]) if len(obj) > 10 else None)
@staticmethod @staticmethod
def load_from_chain(obj): def load_from_chain(obj):
@@ -106,7 +122,8 @@ class SwapOrder:
return (self.tokenIn, self.tokenOut, self.route.dump(), return (self.tokenIn, self.tokenOut, self.route.dump(),
str(self.amount), str(self.minFillAmount), self.amountIsInput, str(self.amount), str(self.minFillAmount), self.amountIsInput,
self.outputDirectlyToOwner, self.inverted, self.conditionalOrder, self.outputDirectlyToOwner, self.inverted, self.conditionalOrder,
[t.dump() for t in self.tranches]) [t.dump() for t in self.tranches],
self.gmx.dump() if self.gmx is not None else None)
def __str__(self): def __str__(self):
msg = f''' msg = f'''
@@ -250,6 +267,26 @@ class ElaboratedSwapOrderStatus:
def copy(self): def copy(self):
return copy.deepcopy(self) return copy.deepcopy(self)
def __str__(self):
msg = f'''
SwapOrder
status: {self.state.name}
in: {self.order.tokenIn}
out: {self.order.tokenOut}
exchange: {self.order.route.exchange.name, self.order.route.fee}
amount: {"input" if self.order.amountIsInput else "output"} {self.filledIn if self.order.amountIsInput else self.filledOut}/{self.order.amount}{" to owner" if self.order.outputDirectlyToOwner else ""}
minFill: {self.order.minFillAmount}
inverted: {self.order.inverted}
tranches:
'''
for i in range(len(self.trancheStatus)):
tranche = self.order.tranches[i]
ts = self.trancheStatus[i]
msg += f' {tranche}\n'
for fill in ts.fills:
msg += f' {fill}\n'
return msg
NO_OCO = 18446744073709551615 # max uint64 NO_OCO = 18446744073709551615 # max uint64
@@ -263,6 +300,9 @@ DISTANT_FUTURE = 4294967295 # max uint32
MAX_FRACTION = 65535 # max uint16 MAX_FRACTION = 65535 # max uint16
MIN_SLIPPAGE = 0.0001 # one bip
MIN_SLIPPAGE_EPSILON = 0.000000000003
@dataclass @dataclass
class Tranche: class Tranche:
@@ -344,7 +384,7 @@ class Tranche:
) )
def __str__(self): def __str__(self):
msg = f'{self.fraction/MAX_FRACTION:.1%} {"start+" if self.startTimeIsRelative else ""}{self.startTime} to {"start+" if self.startTimeIsRelative else ""}{self.endTime}' msg = f'{self.fraction/MAX_FRACTION:.1%} {"start+" if self.startTimeIsRelative else ""}{from_timestamp(self.startTime)} to {"start+" if self.startTimeIsRelative else ""}{from_timestamp(self.endTime)}'
if self.marketOrder: if self.marketOrder:
# for marketOrders, minLine.intercept is the slippage # for marketOrders, minLine.intercept is the slippage
msg += f' market order slippage {self.minLine.intercept:.2%}' msg += f' market order slippage {self.minLine.intercept:.2%}'
@@ -352,11 +392,11 @@ class Tranche:
if self.minLine.intercept or self.minLine.slope: if self.minLine.intercept or self.minLine.slope:
msg += f' >{self.minLine.intercept:.5g}' msg += f' >{self.minLine.intercept:.5g}'
if self.minLine.slope: if self.minLine.slope:
msg += f'{self.minLine.slope:+.5g}/s({self.minLine.value():5g})' msg += f'{self.minLine.slope:+.5g}/s={self.minLine.value():5g}'
if self.maxLine.intercept or self.maxLine.slope: if self.maxLine.intercept or self.maxLine.slope:
msg += f' <{self.maxLine.intercept:.5g}' msg += f' <{self.maxLine.intercept:.5g}'
if self.maxLine.slope: if self.maxLine.slope:
msg += f'{self.maxLine.slope:+.5g}/s({self.maxLine.value():5g})' msg += f'{self.maxLine.slope:+.5g}/s={self.maxLine.value():5g}'
if self.rateLimitPeriod: if self.rateLimitPeriod:
msg += f' {self.rateLimitFraction/MAX_FRACTION:.1%} every {self.rateLimitPeriod/60:.0} minutes' msg += f' {self.rateLimitFraction/MAX_FRACTION:.1%} every {self.rateLimitPeriod/60:.0} minutes'
return msg return msg

View File

@@ -14,7 +14,7 @@ from dexorder.blockstate.fork import Fork
from dexorder.configuration import parse_args from dexorder.configuration import parse_args
from dexorder.contract import get_contract_event from dexorder.contract import get_contract_event
from dexorder.database import db from dexorder.database import db
from dexorder.event_handler import check_ohlc_rollover, handle_uniswap_swaps from dexorder.event_handler import handle_uniswap_swaps
from dexorder.memcache import memcache from dexorder.memcache import memcache
from dexorder.memcache.memcache_state import RedisState, publish_all from dexorder.memcache.memcache_state import RedisState, publish_all
from dexorder.ohlc import recent_ohlcs, ohlc_save, ohlcs from dexorder.ohlc import recent_ohlcs, ohlc_save, ohlcs
@@ -58,7 +58,7 @@ async def main():
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None, timer_period=0) runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None, timer_period=0)
runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True) runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
runner.add_callback(check_ohlc_rollover) # runner.add_callback(check_ohlc_rollover)
runner.on_promotion.append(finalize_callback) runner.on_promotion.append(finalize_callback)
if db: if db:
# noinspection PyUnboundLocalVariable # noinspection PyUnboundLocalVariable
@@ -74,4 +74,4 @@ async def main():
if __name__ == '__main__': if __name__ == '__main__':
execute(main()) execute(main)

View File

@@ -37,4 +37,4 @@ if __name__ == '__main__':
time = parse_date(sys.argv[1], ignoretz=True).replace(tzinfo=timezone.utc) time = parse_date(sys.argv[1], ignoretz=True).replace(tzinfo=timezone.utc)
seconds_per_block = float(sys.argv[2]) seconds_per_block = float(sys.argv[2])
sys.argv = [sys.argv[0], *sys.argv[3:]] sys.argv = [sys.argv[0], *sys.argv[3:]]
execute(main()) execute(main)

View File

@@ -0,0 +1,95 @@
import argparse
import logging
from dexorder import db, blockchain
from dexorder.base.order import OrderKey
from dexorder.blocks import current_block, get_block
from dexorder.blockstate import current_blockstate
from dexorder.blockstate.blockdata import BlockData
from dexorder.blockstate.db_state import DbState
from dexorder.blockstate.fork import current_fork
from dexorder.contract.dexorder import VaultContract
from dexorder.order.orderstate import Order
from dexorder.tokens import adjust_decimals
from dexorder.util import json
from dexorder.vault_blockdata import vault_balances, pretty_balances
from dexorder.bin.executable import execute
log = logging.getLogger(__name__)
async def dump_orders(orders, args):
if args.json:
print(json.dumps([order.status.dump() for order in orders]))
else:
first = True
for order in orders:
if first:
first = False
else:
print()
print(await order.pprint())
def command_vault_argparse(subparsers):
parser = subparsers.add_parser('vault', help='show the vault\'s balances and orders')
parser.add_argument('address', help='address of the vault')
parser.add_argument('--all', help='show all orders including closed ones', action='store_true')
parser.add_argument('--json', help='output in JSON format', action='store_true')
async def command_vault(args):
balances = vault_balances.get(args.address, {})
print(f'Vault {args.address} v{await VaultContract(args.address).version()}')
print(f'Balances:')
print(pretty_balances({k: (await adjust_decimals(k, v)) for k, v in balances.items()}))
print(f'Orders:')
i = 0
orders = []
while True:
key = OrderKey(args.address, i)
try:
order = Order.of(key)
except KeyError:
break
if args.all or order.is_open:
orders.append(order)
i += 1
await dump_orders(orders, args)
def command_open_argparse(subparsers):
parser = subparsers.add_parser('open', help='show all open orders')
parser.add_argument('--json', help='output in JSON format', action='store_true')
async def command_open(args):
await dump_orders([Order.of(key) for key in Order.open_orders], args)
async def main(args: list):
parser = argparse.ArgumentParser()
parser.add_argument('--chain-id', default=None)
subparsers = parser.add_subparsers(dest='command')
for name in globals():
if name.startswith('command_') and name.endswith('_argparse'):
globals()[name](subparsers)
parsed = parser.parse_args(args)
print(parsed)
try:
subcommand = globals()[f'command_{parsed.command}']
except KeyError:
parser.print_help()
exit(1)
await blockchain.connect()
db.connect()
db_state = DbState(BlockData.by_opt('db'))
with db.transaction():
state = await db_state.load()
# state.readonly = True
current_blockstate.set(state)
block = await get_block(state.root_hash)
current_block.set(block)
current_fork.set(state.root_fork)
await subcommand(parsed)
if __name__ == '__main__':
execute(main, parse_args=True)

View File

@@ -7,10 +7,14 @@ import tomllib
from asyncio import CancelledError from asyncio import CancelledError
from signal import Signals from signal import Signals
from traceback import print_exception from traceback import print_exception
from typing import Coroutine from typing import Coroutine, Callable, Union, Any
from omegaconf import OmegaConf
from dexorder import configuration, config from dexorder import configuration, config
from dexorder.alert import init_alerts from dexorder.alert import init_alerts
from dexorder.configuration.load import config_file
from dexorder.configuration.schema import Config
from dexorder.metric.metric_startup import start_metrics_server from dexorder.metric.metric_startup import start_metrics_server
if __name__ == '__main__': if __name__ == '__main__':
@@ -25,7 +29,27 @@ async def _shutdown_coro(_sig, _loop):
if task is not this_task: if task is not this_task:
task.cancel() task.cancel()
def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=True):
def split_args():
omegaconf_args = []
regular_args = []
for arg in sys.argv[1:]:
if '=' in arg and not arg.startswith('--'):
key, value = arg.split('=', 1)
if hasattr(Config, key):
omegaconf_args.append(arg)
continue
regular_args.append(arg)
return omegaconf_args, regular_args
def execute(main:Callable[...,Coroutine[Any,Any,Any]], shutdown=None, *, parse_logging=True,
parse_args: Union[Callable[[list[str]],Any], type, bool]=True):
"""
if parse_args is a function, then the command-line arguments are given to OmegaConf first, and any args parsed by
OmegaConf are stripped from the args list. The remaining args are then passed to parse_args(args)
if parse_args is a type, then the type is used to parse the extra command-line arguments using OmegaConf.
"""
# config # config
configured = False configured = False
if parse_logging: if parse_logging:
@@ -42,10 +66,24 @@ def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=Tru
logging.basicConfig(level=logging.INFO, stream=sys.stdout) logging.basicConfig(level=logging.INFO, stream=sys.stdout)
log.setLevel(logging.DEBUG) log.setLevel(logging.DEBUG)
log.info('Logging configured to default') log.info('Logging configured to default')
log.info(f'Loaded main config from {config_file}')
xconf = None
if parse_args: if parse_args:
# NOTE: there is special command-line argument handling in config/load.py to get a config filename. # NOTE: there is special command-line argument handling in config/load.py to get a config filename.
# The -c/--config flag MUST BE FIRST if present. # The -c/--config flag MUST BE FIRST if present.
configuration.parse_args() # The rest of the arguments are split by format into key=value for omegaconf and anything else is "regular args"
omegaconf_args, regular_args = split_args()
configuration.parse_args(omegaconf_args)
# must check for `type` before `callable`, because types are also callables
if isinstance(parse_args, type):
# noinspection PyUnboundLocalVariable
xconf = OmegaConf.merge(OmegaConf.structured(parse_args), OmegaConf.from_cli(regular_args))
elif callable(parse_args):
# noinspection PyUnboundLocalVariable
xconf = parse_args(regular_args)
else:
# just pass the regular args to main
xconf = regular_args
init_alerts() init_alerts()
@@ -59,7 +97,14 @@ def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=Tru
loop.add_signal_handler(s, lambda sig=s: asyncio.create_task(_shutdown_coro(sig, loop), name=f'{s.name} handler')) loop.add_signal_handler(s, lambda sig=s: asyncio.create_task(_shutdown_coro(sig, loop), name=f'{s.name} handler'))
# main # main
task = loop.create_task(main, name='main') num_args = len(inspect.signature(main).parameters)
if num_args == 0:
coro = main()
elif num_args == 1:
coro = main(xconf)
else:
raise Exception(f'main() must accept 0 or 1 arguments, not {num_args}')
task = loop.create_task(coro, name='main')
try: try:
loop.run_until_complete(task) loop.run_until_complete(task)
except CancelledError: except CancelledError:

View File

@@ -11,8 +11,9 @@ from dexorder.bin.executable import execute
from dexorder.blocks import get_block_timestamp, get_block from dexorder.blocks import get_block_timestamp, get_block
from dexorder.blockstate.fork import current_fork from dexorder.blockstate.fork import current_fork
from dexorder.configuration import parse_args from dexorder.configuration import parse_args
from dexorder.contract import get_contract_event from dexorder.event_handler import wire_dexorder_debug
from dexorder.final_ohlc import FinalOHLCRepository from dexorder.final_ohlc import FinalOHLCRepository
from dexorder.gmx import gmx_wire_runner_late, gmx_wire_runner_early
from dexorder.pools import get_uniswap_data from dexorder.pools import get_uniswap_data
from dexorder.util import hexstr from dexorder.util import hexstr
from dexorder.util.shutdown import fatal from dexorder.util.shutdown import fatal
@@ -56,10 +57,15 @@ async def main():
ohlcs = FinalOHLCRepository() ohlcs = FinalOHLCRepository()
await blockchain.connect() await blockchain.connect()
walker = BlockWalker(flush_callback, timedelta(seconds=config.walker_flush_interval)) walker = BlockWalker(flush_callback, timedelta(seconds=config.walker_flush_interval))
walker.add_event_trigger(handle_backfill_uniswap_swaps, # gmx_wire_runner_early(walker, backfill=ohlcs)
get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True) gmx_wire_runner_early(walker) # todo re-enable backfill
wire_dexorder_debug(walker)
# todo re-enable uniswap
# walker.add_event_trigger(handle_backfill_uniswap_swaps,
# get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
gmx_wire_runner_late(walker)
await walker.run() await walker.run()
if __name__ == '__main__': if __name__ == '__main__':
execute(main()) execute(main)

View File

@@ -2,8 +2,8 @@ import logging
from asyncio import CancelledError from asyncio import CancelledError
from dexorder import db, blockchain from dexorder import db, blockchain
from dexorder.accounting import initialize_accounting from dexorder.accounting import initialize_accounting_runner
from dexorder.alert import warningAlert from dexorder.alert import infoAlert
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.bin.executable import execute from dexorder.bin.executable import execute
from dexorder.blockstate import current_blockstate from dexorder.blockstate import current_blockstate
@@ -14,7 +14,11 @@ from dexorder.contract import get_contract_event
from dexorder.contract.dexorder import get_dexorder_contract from dexorder.contract.dexorder import get_dexorder_contract
from dexorder.event_handler import (init, dump_log, handle_vault_created, handle_order_placed, from dexorder.event_handler import (init, dump_log, handle_vault_created, handle_order_placed,
handle_transfer, handle_swap_filled, handle_order_canceled, handle_order_cancel_all, handle_transfer, handle_swap_filled, handle_order_canceled, handle_order_cancel_all,
handle_uniswap_swaps, handle_vault_impl_changed, update_metrics) handle_uniswap_swaps, handle_vault_impl_changed, update_metrics,
activate_new_price_triggers)
from dexorder.gmx import gmx_wire_runner_early, gmx_wire_runner_late
from dexorder.gmx._handle import gmx_wire_runner_init
from dexorder.marks import publish_marks
from dexorder.memcache import memcache from dexorder.memcache import memcache
from dexorder.memcache.memcache_state import RedisState, publish_all from dexorder.memcache.memcache_state import RedisState, publish_all
from dexorder.order.executionhandler import handle_dexorderexecutions, execute_tranches from dexorder.order.executionhandler import handle_dexorderexecutions, execute_tranches
@@ -60,35 +64,39 @@ def setup_logevent_triggers(runner):
runner.add_callback(check_activate_orders) runner.add_callback(check_activate_orders)
runner.add_callback(init) runner.add_callback(init)
gmx_wire_runner_init(runner)
runner.add_event_trigger(handle_transaction_receipts) runner.add_event_trigger(handle_transaction_receipts)
runner.add_event_trigger(handle_vault_created, get_contract_event('Vault', 'VaultCreated')) runner.add_event_trigger(handle_vault_created, get_contract_event('Vault', 'VaultCreated'))
runner.add_event_trigger(handle_vault_impl_changed, get_contract_event('Vault', 'VaultImplChanged')) runner.add_event_trigger(handle_vault_impl_changed, get_contract_event('Vault', 'VaultImplChanged'))
runner.add_event_trigger(handle_order_placed, get_contract_event('VaultImpl', 'DexorderSwapPlaced')) runner.add_event_trigger(handle_order_placed, get_contract_event('VaultImpl', 'DexorderSwapPlaced'))
gmx_wire_runner_early(runner) # must come after DexorderSwapPlaced so the GMXOrder event can add data to the existing order
runner.add_event_trigger(handle_transfer, get_contract_event('ERC20', 'Transfer')) runner.add_event_trigger(handle_transfer, get_contract_event('ERC20', 'Transfer'))
runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True) runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
runner.add_event_trigger(handle_swap_filled, get_contract_event('VaultImpl', 'DexorderSwapFilled')) runner.add_event_trigger(handle_swap_filled, get_contract_event('VaultImpl', 'DexorderSwapFilled'))
runner.add_event_trigger(handle_order_canceled, get_contract_event('VaultImpl', 'DexorderSwapCanceled')) runner.add_event_trigger(handle_order_canceled, get_contract_event('VaultImpl', 'DexorderSwapCanceled'))
runner.add_event_trigger(handle_order_cancel_all, get_contract_event('VaultImpl', 'DexorderCancelAll')) runner.add_event_trigger(handle_order_cancel_all, get_contract_event('VaultImpl', 'DexorderCancelAll'))
gmx_wire_runner_late(runner)
runner.add_event_trigger(handle_dexorderexecutions, executions) runner.add_event_trigger(handle_dexorderexecutions, executions)
runner.add_event_trigger(handle_vault_creation_requests) runner.add_event_trigger(handle_vault_creation_requests)
runner.add_event_trigger(activate_new_price_triggers)
runner.add_callback(end_trigger_updates) runner.add_callback(end_trigger_updates)
runner.add_callback(execute_tranches) runner.add_callback(execute_tranches)
runner.add_callback(cleanup_jobs)
# fee adjustments are handled offline by batch jobs # fee adjustments are handled offline by batch jobs
# runner.add_event_trigger(handle_fee_limits_changed, get_contract_event('IFeeManager', 'FeeLimitsChanged')) # runner.add_event_trigger(handle_fee_limits_changed, get_contract_event('IFeeManager', 'FeeLimitsChanged'))
# runner.add_event_trigger(handle_fees_changed, get_contract_event('IFeeManager', 'FeesChanged')) # runner.add_event_trigger(handle_fees_changed, get_contract_event('IFeeManager', 'FeesChanged'))
# runner.add_callback(adjust_gas) # runner.add_callback(adjust_gas)
runner.add_callback(cleanup_jobs)
runner.add_callback(publish_marks)
runner.add_callback(update_metrics) runner.add_callback(update_metrics)
# noinspection DuplicatedCode # noinspection DuplicatedCode
async def main(): async def main():
warningAlert('Started', 'backend has started', log_level=logging.INFO) infoAlert('Started', 'backend has started', log_level=logging.INFO)
await blockchain.connect(autosign=False) # the transaction manager checks out accounts and releases them. await blockchain.connect(autosign=False) # the transaction manager checks out accounts and releases them.
redis_state = None redis_state = None
state = None state = None
@@ -115,9 +123,9 @@ async def main():
if redis_state: if redis_state:
# load initial state # load initial state
log.info('initializing redis with root state') log.info('initializing redis with root state')
await redis_state.save(state.root_fork, state.diffs_by_branch[state.root_branch.id]) await redis_state.init(state, state.root_fork)
await initialize_accounting() await initialize_accounting_runner()
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None) runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None)
setup_logevent_triggers(runner) setup_logevent_triggers(runner)
@@ -138,4 +146,4 @@ async def main():
if __name__ == '__main__': if __name__ == '__main__':
execute(main()) execute(main)

View File

@@ -95,7 +95,7 @@ async def write_metadata( pools, mirror_pools ):
pool_dicts = [get_pool(addr) for (addr,_inverted) in mirror_pools] pool_dicts = [get_pool(addr) for (addr,_inverted) in mirror_pools]
pool_dicts = await asyncio.gather(*pool_dicts) pool_dicts = await asyncio.gather(*pool_dicts)
for data, addr, (_,inverted) in zip(pool_dicts, pools, mirror_pools): for data, addr, (_,inverted) in zip(pool_dicts, pools, mirror_pools):
data['x'] = dict(data=dict(uri=f'https://app.dexorder.trade/ohlc/', chain=42161, symbol=addr, inverted=inverted)) data['x'] = dict(data=dict(uri=f'https://app.dexorder.com/ohlc/', chain=42161, symbol=addr, inverted=inverted))
tokens = set(p['base'] for p in pool_dicts) tokens = set(p['base'] for p in pool_dicts)
tokens.update(p['quote'] for p in pool_dicts) tokens.update(p['quote'] for p in pool_dicts)
tokens = await asyncio.gather(*[get_token(t) for t in tokens]) tokens = await asyncio.gather(*[get_token(t) for t in tokens])
@@ -190,6 +190,7 @@ async def main():
while True: while True:
wake_up = now() + delay wake_up = now() + delay
# log.debug(f'querying {pool}') # log.debug(f'querying {pool}')
tx = None
try: try:
price = await get_pool_price(pool) price = await get_pool_price(pool)
if price != last_prices.get(pool): if price != last_prices.get(pool):
@@ -200,7 +201,10 @@ async def main():
addr, inverted = mirror_pools[pool] addr, inverted = mirror_pools[pool]
log.debug(f'Mirrored {addr} {price}') log.debug(f'Mirrored {addr} {price}')
except Exception as x: except Exception as x:
log.debug(f'Could not update {pool}: {x}') log.debug(f'Could not update {pool}: {x} {tx}')
if tx is not None:
tx.account.reset_nonce()
tx.account.release()
continue continue
try: try:
pool = next(pool_iter) pool = next(pool_iter)
@@ -216,4 +220,4 @@ async def main():
if __name__ == '__main__': if __name__ == '__main__':
execute(main()) execute(main)

View File

@@ -1,9 +1,9 @@
import logging import logging
from sqlalchemy import select from sqlalchemy import select, text
from dexorder import db, blockchain from dexorder import db, blockchain
from dexorder.accounting import accounting_reconcile, accounting_lock from dexorder.accounting import accounting_reconcile
from dexorder.bin.executable import execute from dexorder.bin.executable import execute
from dexorder.blocks import fetch_latest_block, current_block from dexorder.blocks import fetch_latest_block, current_block
from dexorder.database.model import DbAccount from dexorder.database.model import DbAccount
@@ -15,7 +15,7 @@ async def main():
db.connect() db.connect()
block = await fetch_latest_block() block = await fetch_latest_block()
current_block.set(block) current_block.set(block)
accounting_lock() db.session.execute(text("LOCK TABLE account, accounting, reconciliation IN EXCLUSIVE MODE"))
try: try:
accounts = db.session.execute(select(DbAccount)).scalars().all() accounts = db.session.execute(select(DbAccount)).scalars().all()
for account in accounts: for account in accounts:
@@ -28,5 +28,4 @@ async def main():
if __name__ == '__main__': if __name__ == '__main__':
execute(main()) execute(main)

View File

@@ -1,14 +1,23 @@
import logging import logging
from dataclasses import dataclass
from dexorder import blockchain, db from dexorder import dec
from dexorder.bin.executable import execute from dexorder.bin.executable import execute
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
async def main():
await blockchain.connect()
db.connect()
@dataclass
class RefillConfig:
refill_level: dec
refill_accounts: list[str]
async def main(refill_config: RefillConfig):
# await blockchain.connect()
# db.connect()
log.info(f'Refilling to {refill_config.refill_level:.18f} ETH')
log.info(f'Refilling accounts: {refill_config.refill_accounts}')
if __name__ == '__main__': if __name__ == '__main__':
execute(main()) execute(main, parse_args=RefillConfig)

View File

@@ -22,6 +22,11 @@ from dexorder.util import hexbytes
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def blocktime():
""" timestamp of the most recent block seen in real-time, NOT the current block being worked on """
return latest_block[current_chain.get().id].timestamp
async def get_block_timestamp(block_id: Union[bytes,int]) -> int: async def get_block_timestamp(block_id: Union[bytes,int]) -> int:
block = await get_block(block_id) block = await get_block(block_id)
if block is None: if block is None:

View File

@@ -52,7 +52,7 @@ class BlockData (Generic[T]):
def setitem(self, item, value: T, overwrite=True): def setitem(self, item, value: T, overwrite=True):
state = current_blockstate.get() state = current_blockstate.get()
fork = current_fork.get() fork = current_fork.get()
state.set(fork, self.series, item, value, overwrite) return state.set(fork, self.series, item, value, overwrite)
def getitem(self, item, default=NARG) -> T: def getitem(self, item, default=NARG) -> T:
state = current_blockstate.get() state = current_blockstate.get()
@@ -63,9 +63,11 @@ class BlockData (Generic[T]):
result = default result = default
if self.lazy_getitem: if self.lazy_getitem:
lazy = self.lazy_getitem(self, item) lazy = self.lazy_getitem(self, item)
if lazy is not NARG: if lazy is not NARG and lazy is not DELETE:
state.set(state.root_fork, self.series, item, lazy) state.set(state.root_fork, self.series, item, lazy, readonly_override=True)
result = lazy result = lazy
if result is DELETE:
result = default
if result is NARG: if result is NARG:
raise KeyError raise KeyError
return result return result
@@ -142,7 +144,7 @@ class BlockSet(Generic[T], Iterable[T], BlockData[T]):
return self.contains(item) return self.contains(item)
def __iter__(self) -> Iterator[T]: def __iter__(self) -> Iterator[T]:
yield from (k for k,v in self.iter_items(self.series)) return self.iter_keys(self.series)
class BlockDict(Generic[K,V], BlockData[V]): class BlockDict(Generic[K,V], BlockData[V]):
@@ -162,6 +164,9 @@ class BlockDict(Generic[K,V], BlockData[V]):
def __contains__(self, item: K) -> bool: def __contains__(self, item: K) -> bool:
return self.contains(item) return self.contains(item)
def __iter__(self) -> Iterator[K]:
return self.iter_keys(self.series)
def items(self) -> Iterable[tuple[K,V]]: def items(self) -> Iterable[tuple[K,V]]:
return self.iter_items(self.series) return self.iter_items(self.series)

View File

@@ -53,7 +53,10 @@ class BlockState:
with a diff height of the root branch or older is always part of the finalized blockchain. with a diff height of the root branch or older is always part of the finalized blockchain.
""" """
class ReadOnlyError(Exception): ...
def __init__(self): def __init__(self):
self.readonly = False
self._root_branch: Optional[Branch] = None self._root_branch: Optional[Branch] = None
self._root_fork: Optional[Fork] = None self._root_fork: Optional[Fork] = None
self.height: int = 0 # highest branch seen self.height: int = 0 # highest branch seen
@@ -80,6 +83,8 @@ class BlockState:
@root_branch.setter @root_branch.setter
def root_branch(self, value: Branch): def root_branch(self, value: Branch):
if self.readonly:
raise self.ReadOnlyError()
self._root_branch = value self._root_branch = value
self._root_fork = Fork([value]) self._root_fork = Fork([value])
@@ -92,6 +97,8 @@ class BlockState:
return self._root_branch.head return self._root_branch.head
def init_root_block(self, root_block: Block) -> Fork: def init_root_block(self, root_block: Block) -> Fork:
if self.readonly:
raise self.ReadOnlyError()
assert self.root_branch is None assert self.root_branch is None
return self.add_branch(Branch.from_block(root_block)) return self.add_branch(Branch.from_block(root_block))
@@ -113,6 +120,8 @@ class BlockState:
should only be set to False when it is assured that the branch may be joined by height alone, because should only be set to False when it is assured that the branch may be joined by height alone, because
the branch join is known to be at a live-blockchain-finalized height. the branch join is known to be at a live-blockchain-finalized height.
""" """
if self.readonly:
raise self.ReadOnlyError()
assert branch.id not in self.branches_by_id assert branch.id not in self.branches_by_id
if self.root_branch is None: if self.root_branch is None:
@@ -155,6 +164,8 @@ class BlockState:
def remove_branch(self, branch: Branch, *, remove_series_diffs=True): def remove_branch(self, branch: Branch, *, remove_series_diffs=True):
if self.readonly:
raise self.ReadOnlyError()
if branch.height == self.height and len(self.branches_by_height[branch.height]) == 1: if branch.height == self.height and len(self.branches_by_height[branch.height]) == 1:
# this is the only branch at this height: compute the new lower height # this is the only branch at this height: compute the new lower height
other_heights = [b.height for b in self.branches_by_id.values() if b is not branch] other_heights = [b.height for b in self.branches_by_id.values() if b is not branch]
@@ -210,7 +221,9 @@ class BlockState:
return DELETE return DELETE
def set(self, fork: Fork, series, key, value, overwrite=True): def set(self, fork: Fork, series, key, value, overwrite=True, *, readonly_override=False):
if not readonly_override and self.readonly:
raise self.ReadOnlyError()
# first look for an existing value # first look for an existing value
branch = fork.branch branch = fork.branch
diffs = self.diffs_by_series.get(series,{}).get(key) diffs = self.diffs_by_series.get(series,{}).get(key)
@@ -219,8 +232,9 @@ class BlockState:
for diff in diffs: for diff in diffs:
if diff.branch_id == branch.id: if diff.branch_id == branch.id:
# if there's an existing value for this branch, we replace it # if there's an existing value for this branch, we replace it
old_value = diff.value
diff.value = value diff.value = value
return return old_value
elif self._fork_has_diff(fork, diff): elif self._fork_has_diff(fork, diff):
# if there's an existing value on this fork, remember it # if there's an existing value on this fork, remember it
old_value = diff.value old_value = diff.value
@@ -236,6 +250,8 @@ class BlockState:
return old_value return old_value
def unload(self, fork: Optional[Fork], series, key): def unload(self, fork: Optional[Fork], series, key):
if self.readonly:
raise self.ReadOnlyError()
self.unloads[fork.branch_id].append((series, key)) self.unloads[fork.branch_id].append((series, key))
def iteritems(self, fork: Optional[Fork], series): def iteritems(self, fork: Optional[Fork], series):
@@ -285,6 +301,8 @@ class BlockState:
Returns the set of diffs for the promoted fork. Returns the set of diffs for the promoted fork.
""" """
if self.readonly:
raise self.ReadOnlyError()
found_root = False found_root = False
promotion_branches = [] promotion_branches = []
for branch in reversed(fork.branches): for branch in reversed(fork.branches):
@@ -350,6 +368,7 @@ class FinalizedBlockState:
""" """
def __init__(self): def __init__(self):
self.readonly = False
self.data = {} self.data = {}
self.by_hash = {} self.by_hash = {}
@@ -361,6 +380,8 @@ class FinalizedBlockState:
def set(self, _fork: Optional[Fork], series, key, value, overwrite=True): def set(self, _fork: Optional[Fork], series, key, value, overwrite=True):
assert overwrite assert overwrite
if self.readonly:
raise BlockState.ReadOnlyError()
self.data.setdefault(series, {})[key] = value self.data.setdefault(series, {})[key] = value
def iteritems(self, _fork: Optional[Fork], series): def iteritems(self, _fork: Optional[Fork], series):
@@ -373,6 +394,8 @@ class FinalizedBlockState:
return self.data.get(series,{}).values() return self.data.get(series,{}).values()
def delete_series(self, _fork: Optional[Fork], series: str): def delete_series(self, _fork: Optional[Fork], series: str):
if self.readonly:
raise BlockState.ReadOnlyError()
del self.data[series] del self.data[series]

View File

@@ -8,9 +8,9 @@ from omegaconf.errors import OmegaConfBaseException
from .schema import Config from .schema import Config
schema = OmegaConf.structured(Config()) schema = OmegaConf.structured(Config(), flags={'struct': False})
_config_file = 'dexorder.toml' config_file = 'dexorder.toml'
class ConfigException (Exception): class ConfigException (Exception):
pass pass
@@ -21,7 +21,7 @@ def load_config():
result:ConfigDict = OmegaConf.merge( result:ConfigDict = OmegaConf.merge(
schema, schema,
from_toml('.secret.toml'), from_toml('.secret.toml'),
from_toml(_config_file), from_toml(config_file),
from_toml('config.toml'), from_toml('config.toml'),
from_env() from_env()
) )
@@ -73,7 +73,7 @@ if len(sys.argv) > 1 and (sys.argv[1] == '-c' or sys.argv[1] == '--config'):
if len(sys.argv) < 3: if len(sys.argv) < 3:
raise ConfigException('Missing config file argument') raise ConfigException('Missing config file argument')
else: else:
_config_file = sys.argv[2] config_file = sys.argv[2]
sys.argv = [sys.argv[0], *sys.argv[3:]] sys.argv = [sys.argv[0], *sys.argv[3:]]
config = load_config() config = load_config()

View File

@@ -1,3 +1,3 @@
from .load import config

View File

@@ -9,6 +9,7 @@ from typing import Optional
@dataclass @dataclass
class Config: class Config:
contract_version: Optional[str] = None # version tag of the contract deployment to use. if None then
confirms: Optional[int] = None # number of blocks before data is considered finalized. if None then the chain's default setting is used confirms: Optional[int] = None # number of blocks before data is considered finalized. if None then the chain's default setting is used
batch_size: Optional[int] = None # max number of blocks to query in a single backfill rpc request batch_size: Optional[int] = None # max number of blocks to query in a single backfill rpc request
rpc_url: str = 'http://localhost:8545' # may be a comma-separated list. may include names of entries in rpc_urls. rpc_url: str = 'http://localhost:8545' # may be a comma-separated list. may include names of entries in rpc_urls.
@@ -16,6 +17,7 @@ class Config:
ws_url: Optional[str] = 'ws://localhost:8545' ws_url: Optional[str] = 'ws://localhost:8545'
rpc_urls: Optional[dict[str,str]] = field(default_factory=dict) rpc_urls: Optional[dict[str,str]] = field(default_factory=dict)
db_url: Optional[str] = 'postgresql://dexorder:redroxed@localhost/dexorder' db_url: Optional[str] = 'postgresql://dexorder:redroxed@localhost/dexorder'
db_readonly: bool = False
dump_sql: bool = False dump_sql: bool = False
redis_url: Optional[str] = 'redis://localhost:6379' redis_url: Optional[str] = 'redis://localhost:6379'
@@ -41,12 +43,17 @@ class Config:
fee_leeway = 0.1 # do not adjust fees if they are within this proportion fee_leeway = 0.1 # do not adjust fees if they are within this proportion
min_gas: str = '0' min_gas: str = '0'
mark_publish_seconds: float = 60 # publish mark prices every this number of seconds
# Order slashing # Order slashing
slash_kill_count: int = 5 slash_kill_count: int = 5
slash_delay_base: float = 60 # one minute slash_delay_base: float = 60 # one minute
slash_delay_mul: float = 2 # double the delay each time slash_delay_mul: float = 2 # double the delay each time
slash_delay_max: int = 15 * 60 slash_delay_max: int = 15 * 60
# Tranches are paused for this long after they trigger a slippage control
slippage_control_delay: float = 10 # matches the 10-second TWAP used by our uniswap router
walker_name: str = 'default' walker_name: str = 'default'
walker_flush_interval: float = 300 walker_flush_interval: float = 300
walker_stop: Optional[int] = None # block number of the last block the walker should process walker_stop: Optional[int] = None # block number of the last block the walker should process
@@ -60,6 +67,3 @@ class Config:
stablecoins: list[str] = field(default_factory=list) # primary stablecoins which are marked to $1 stablecoins: list[str] = field(default_factory=list) # primary stablecoins which are marked to $1
quotecoins: list[str] = field(default_factory=list) # quote tokens like WETH that have stablecoin markets quotecoins: list[str] = field(default_factory=list) # quote tokens like WETH that have stablecoin markets
nativecoin: Optional[str] = None # used for accounting of native values. e.g. address of WETH nativecoin: Optional[str] = None # used for accounting of native values. e.g. address of WETH
# account: target_balance
refill: dict[str,str] = field(default_factory=dict)

View File

@@ -1,5 +1,6 @@
import glob import glob
import json import json
import logging
import os import os
from eth_abi.exceptions import InsufficientDataBytes from eth_abi.exceptions import InsufficientDataBytes
@@ -9,7 +10,7 @@ from web3.exceptions import BadFunctionCallOutput, ContractLogicError
from .abi import abis from .abi import abis
from .contract_proxy import ContractProxy from .contract_proxy import ContractProxy
from .. import current_w3 from .. import current_w3, config
from ..base.chain import current_chain from ..base.chain import current_chain
CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOutput) CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOutput)
@@ -18,10 +19,28 @@ CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOut
# set initially to the string filename, then loaded on demand and set to the parsed JSON result # set initially to the string filename, then loaded on demand and set to the parsed JSON result
_contract_data: dict[str,Union[str,dict]] = {} _contract_data: dict[str,Union[str,dict]] = {}
# finds all .sol files and sets _contract_data with their pathname initialized = False
for _file in glob.glob('../contract/out/**/*.sol/*.json', recursive=True): _contract_path = ''
if os.path.isfile(_file):
_contract_data[os.path.basename(_file)[:-5]] = _file def get_contract_path():
init_contract_data()
return _contract_path
log = logging.getLogger(__name__)
def init_contract_data():
global initialized, _contract_path
if initialized:
return
subpath = '' if config.contract_version is None else f'/deployment/{config.contract_version}'
_contract_path = f'../contract{subpath}'
# finds all .json files in the out path and sets _contract_data with their pathname
for _file in glob.glob(f'{_contract_path}/out/**/*.sol/*.json', recursive=True):
if os.path.isfile(_file):
_contract_data[os.path.basename(_file)[:-5]] = _file
initialized = True
log.info(f'Configured contracts from {_contract_path}')
def get_abi(name): def get_abi(name):
@@ -29,6 +48,7 @@ def get_abi(name):
def get_contract_data(name): def get_contract_data(name):
init_contract_data()
try: try:
return {'abi':abis[name]} return {'abi':abis[name]}
except KeyError: except KeyError:
@@ -43,9 +63,10 @@ def get_contract_data(name):
def get_deployment_address(deployment_name, contract_name, *, chain_id=None): def get_deployment_address(deployment_name, contract_name, *, chain_id=None):
init_contract_data()
if chain_id is None: if chain_id is None:
chain_id = current_chain.get().id chain_id = current_chain.get().id
with open(f'../contract/broadcast/{deployment_name}.sol/{chain_id}/run-latest.json', 'rt') as file: with open(f'{_contract_path}/broadcast/{deployment_name}.sol/{chain_id}/run-latest.json', 'rt') as file:
data = json.load(file) data = json.load(file)
for tx in data.get('transactions',[]): for tx in data.get('transactions',[]):
if tx.get('contractName') == contract_name: if tx.get('contractName') == contract_name:

View File

@@ -1,5 +1,5 @@
abis = { abis = {
# ERC20 where symbol() returns a bytes32 instead of a string # Special ERC20 definition where symbol() returns a bytes32 instead of a string
'ERC20.sb': '''[{"type":"function","name":"symbol","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"name","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"}]''' 'ERC20.sb': '''[{"type":"function","name":"symbol","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"name","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"}]'''
# 'WMATIC': '''[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]''', # 'WMATIC': '''[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]''',
} }

View File

@@ -60,14 +60,14 @@ class DeployTransaction (ContractTransaction):
def call_wrapper(addr, name, func): def call_wrapper(addr, name, func):
async def f(*args, block_identifier=None, **kwargs): async def f(*args, block_identifier=None, kwargs=None):
if block_identifier is None: if block_identifier is None:
try: try:
block_identifier = current_block.get().height block_identifier = current_block.get().height
except (LookupError, AttributeError): except (LookupError, AttributeError):
block_identifier = 'latest' block_identifier = 'latest'
try: try:
return await func(*args).call(block_identifier=block_identifier, **kwargs) return await func(*args).call(block_identifier=block_identifier, **(kwargs or {}))
except Web3Exception as e: except Web3Exception as e:
e.args += addr, name e.args += addr, name
raise e raise e
@@ -75,26 +75,29 @@ def call_wrapper(addr, name, func):
def transact_wrapper(addr, name, func): def transact_wrapper(addr, name, func):
async def f(*args, **kwargs): async def f(*args, kwargs=None):
tx = await func(*args).build_transaction(kwargs) tx = await func(*args).build_transaction(kwargs or {})
ct = ContractTransaction(tx) ct = ContractTransaction(tx)
account = await Account.acquire() account = await Account.acquire()
if account is None: if account is None:
raise ValueError(f'No account to sign transaction {addr}.{name}()') raise ValueError(f'No account to sign transaction {addr}.{name}()')
await ct.sign(account)
try: try:
tx_id = await current_w3.get().eth.send_raw_transaction(ct.data) await ct.sign(account)
assert tx_id == ct.id_bytes try:
return ct tx_id = await current_w3.get().eth.send_raw_transaction(ct.data)
except Web3Exception as e: assert tx_id == ct.id_bytes
e.args += addr, name return ct
raise e except Web3Exception as e:
e.args += addr, name
raise e
finally:
account.release()
return f return f
def build_wrapper(_addr, _name, func): def build_wrapper(_addr, _name, func):
async def f(*args, **kwargs): async def f(*args, kwargs=None):
tx = await func(*args).build_transaction(kwargs) tx = await func(*args).build_transaction(kwargs or {})
return ContractTransaction(tx) return ContractTransaction(tx)
return f return f

View File

@@ -1,24 +0,0 @@
import logging
from dexorder import db
from dexorder.contract import ERC20, CONTRACT_ERRORS
log = logging.getLogger(__name__)
async def token_decimals(addr):
key = f'td|{addr}'
try:
return db.kv[key]
except KeyError:
# noinspection PyBroadException
try:
decimals = await ERC20(addr).decimals()
except CONTRACT_ERRORS:
log.warning(f'token {addr} has no decimals()')
decimals = 0
except Exception:
log.debug(f'could not get token decimals for {addr}')
return None
db.kv[key] = decimals
return decimals

View File

@@ -6,26 +6,37 @@ from eth_utils import keccak, to_bytes, to_checksum_address
from typing_extensions import Optional from typing_extensions import Optional
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.contract import ContractProxy from dexorder.contract import ContractProxy, get_contract_path
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
version = None
chain_info = None
_factory = {} _factory = {}
_dexorder = {} _dexorder = {}
_vault_init_code_hash = {} _vault_init_code_hash = {}
_initialized = False
def _ensure_init():
global version, chain_info
with open(f'{get_contract_path()}/version.json') as version_file:
version = json.load(version_file)
log.info(f'Version: {version}')
chain_info = version['chainInfo']
for _chain_id, info in chain_info.items():
_chain_id = int(_chain_id)
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'DexorderGMX')
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
with open('../contract/version.json') as version_file: def __getattr__(name):
version = json.load(version_file) global _initialized
log.info(f'Version: {version}') if not _initialized:
_ensure_init()
chain_info = version['chainInfo'] _initialized = True
raise AttributeError()
for _chain_id, info in chain_info.items():
_chain_id = int(_chain_id)
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'Dexorder')
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
def get_by_chain(d): def get_by_chain(d):
return d[current_chain.get().id] return d[current_chain.get().id]
@@ -40,11 +51,12 @@ def get_vault_init_code_hash() -> bytes:
return get_by_chain(_vault_init_code_hash) return get_by_chain(_vault_init_code_hash)
def get_mockenv() -> Optional[ContractProxy]: def get_mockenv() -> Optional[ContractProxy]:
addr = chain_info.get(str(current_chain.get().id),{}).get('mockenv') addr = globals()['chain_info'].get(str(current_chain.get().id), {}).get('mockenv')
return ContractProxy(addr, 'MockEnv') if addr is not None else None return ContractProxy(addr, 'MockEnv') if addr is not None else None
def get_mirrorenv() -> Optional[ContractProxy]: def get_mirrorenv() -> Optional[ContractProxy]:
addr = chain_info.get(str(current_chain.get().id),{}).get('mirrorenv') addr = globals()['chain_info'].get(str(current_chain.get().id), {}).get('mirrorenv')
return ContractProxy(addr, 'MirrorEnv') if addr is not None else None return ContractProxy(addr, 'MirrorEnv') if addr is not None else None
def vault_address(owner, num): def vault_address(owner, num):

View File

@@ -3,7 +3,7 @@ import logging
from contextvars import ContextVar from contextvars import ContextVar
import sqlalchemy import sqlalchemy
from sqlalchemy import Engine from sqlalchemy import Engine, event
from sqlalchemy.orm import Session, SessionTransaction from sqlalchemy.orm import Session, SessionTransaction
from .migrate import migrate_database from .migrate import migrate_database
@@ -99,7 +99,7 @@ class Db:
_session.set(None) _session.set(None)
# noinspection PyShadowingNames # noinspection PyShadowingNames
def connect(self, url=None, migrate=True, reconnect=False, dump_sql=None): def connect(self, url=None, migrate=True, reconnect=False, dump_sql=None, readonly:bool=None):
if _engine.get() is not None and not reconnect: if _engine.get() is not None and not reconnect:
return None return None
if url is None: if url is None:
@@ -114,6 +114,19 @@ class Db:
if dump_sql is None: if dump_sql is None:
dump_sql = config.dump_sql dump_sql = config.dump_sql
engine = sqlalchemy.create_engine(url, echo=dump_sql, json_serializer=json.dumps, json_deserializer=json.loads) engine = sqlalchemy.create_engine(url, echo=dump_sql, json_serializer=json.dumps, json_deserializer=json.loads)
if readonly is None:
readonly = config.db_readonly
if readonly:
@event.listens_for(engine, "connect")
def set_readonly(dbapi_connection, _connection_record):
cursor = dbapi_connection.cursor()
try:
cursor.execute("SET default_transaction_read_only = on;")
log.info('database connection set to READ ONLY')
finally:
cursor.close()
if migrate: if migrate:
migrate_database(url) migrate_database(url)
with engine.connect() as connection: with engine.connect() as connection:

View File

@@ -10,3 +10,4 @@ from .ofac import OFAC, OFACAlerts
from .accounting import Accounting, DbAccount from .accounting import Accounting, DbAccount
from .vaultcreationrequest import VaultCreationRequest from .vaultcreationrequest import VaultCreationRequest
from .tos import TOSAcceptance from .tos import TOSAcceptance
from .sharedata import ShareData

View File

@@ -1,7 +1,7 @@
import logging import logging
from datetime import datetime from datetime import datetime
from decimal import Decimal as dec from decimal import Decimal as dec
from enum import Enum from enum import Enum, auto
from sqlalchemy import ForeignKeyConstraint from sqlalchemy import ForeignKeyConstraint
from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.ext.mutable import MutableDict
@@ -17,35 +17,37 @@ log = logging.getLogger(__name__)
class AccountingCategory (Enum): class AccountingCategory (Enum):
Transfer = 0 Transfer = auto()
Income = 1 Income = auto()
Expense = 2 Expense = auto()
Trade = 3 Trade = auto()
Special = 4 Special = auto()
class AccountingSubcategory (Enum): class AccountingSubcategory (Enum):
# Income # Income
OrderFee = 0 OrderFee = auto()
GasFee = 1 GasFee = auto()
FillFee = 2 FillFee = auto()
# Expense # Expense
VaultCreation = 3 Admin = auto() # contract deployments and upgrades, changing adjuster address, etc.
Execution = 4 TransactionGas = auto()
FeeAdjustment = 5 # includes adjusting fee limits VaultCreation = auto()
Execution = auto()
FeeAdjustment = auto() # includes adjusting fee limits
# Transfer # Transfer
# Transfers have no subcategories, but the note field will be the address of the other account. Both a debit and a # Transfers have no subcategories, but the note field will be the address of the other account. Both a debit and a
# credit entry will be created, one for each account participating in the transfer. # credit entry will be created, one for each account participating in the transfer.
# Special Codes # Special Codes
InitialBalance = 5 InitialBalance = auto()
class Accounting (Base): class Accounting (Base):
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
time: Mapped[datetime] = mapped_column(default=now(), index=True) time: Mapped[datetime] = mapped_column(default=now(), index=True)
chain_id: Mapped[int] = mapped_column(index=True) chain_id: Mapped[int] = mapped_column(index=True) # chain_id
account: Mapped[str] = mapped_column(index=True) account: Mapped[str] = mapped_column(index=True)
category: Mapped[AccountingCategory] = mapped_column(index=True) category: Mapped[AccountingCategory] = mapped_column(index=True)
subcategory: Mapped[Optional[AccountingSubcategory]] = mapped_column(index=True) subcategory: Mapped[Optional[AccountingSubcategory]] = mapped_column(index=True)

View File

@@ -3,6 +3,7 @@ from typing import TypedDict, Optional
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
from dexorder.base import OldPoolDict
from dexorder.base.orderlib import Exchange from dexorder.base.orderlib import Exchange
from dexorder.database.column import Address, Blockchain from dexorder.database.column import Address, Blockchain
from dexorder.database.model import Base from dexorder.database.model import Base
@@ -20,17 +21,6 @@ class PoolDict (TypedDict):
x: Optional[dict] x: Optional[dict]
class OldPoolDict (TypedDict):
type: str
chain: int
address: str
exchange: int
base: str
quote: str
fee: int
decimals: int
class Pool (Base): class Pool (Base):
__tablename__ = 'pool' __tablename__ = 'pool'

View File

@@ -0,0 +1,12 @@
import logging
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column
from dexorder.database.model import Base
log = logging.getLogger(__name__)
class ShareData (Base):
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
data: Mapped[dict] = mapped_column(JSONB)

View File

@@ -1,37 +1,15 @@
import logging import logging
from typing import TypedDict, Optional, NotRequired
from sqlalchemy import Index from sqlalchemy import Index
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
from dexorder.base import OldTokenDict
from dexorder.database.column import Address, Blockchain, Uint8 from dexorder.database.column import Address, Blockchain, Uint8
from dexorder.database.model import Base from dexorder.database.model import Base
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class TokenDict (TypedDict):
a: str
n: str
s: str
d: int
w: Optional[bool] # approved ("w"hitelisted)
x: NotRequired[dict] # extra data
# OldTokenDict is the primary dict we use in-memory, with basic JSON-able types
class OldTokenDict (TypedDict):
type: str
chain: int
address: str
name: str
symbol: str
decimals: int
approved: bool # whether this token is in the whitelist or not
x: NotRequired[dict] # extra data
# the database object is primarily write-only so we are able to index queries for pools-by-token from the nodejs server # the database object is primarily write-only so we are able to index queries for pools-by-token from the nodejs server
class Token (Base): class Token (Base):

View File

@@ -1,14 +1,14 @@
import asyncio import asyncio
import logging import logging
from eth_utils import keccak
from web3.types import EventData from web3.types import EventData
from dexorder import db, metric, current_w3, timestamp from dexorder import db, metric, current_w3, timestamp
from dexorder.accounting import accounting_fill, accounting_placement, accounting_transfer, is_tracked_address, \ from dexorder.accounting import accounting_fill, accounting_placement
accounting_lock
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.base.order import TrancheKey, OrderKey from dexorder.base.order import TrancheKey, OrderKey
from dexorder.base.orderlib import SwapOrderState from dexorder.base.orderlib import SwapOrderState, Exchange, GMXOrder
from dexorder.blocks import get_block_timestamp from dexorder.blocks import get_block_timestamp
from dexorder.blockstate import current_blockstate from dexorder.blockstate import current_blockstate
from dexorder.contract.dexorder import VaultContract, get_factory_contract from dexorder.contract.dexorder import VaultContract, get_factory_contract
@@ -18,7 +18,8 @@ from dexorder.ohlc import ohlcs
from dexorder.order.orderstate import Order from dexorder.order.orderstate import Order
from dexorder.order.triggers import (OrderTriggers, activate_order, update_balance_triggers, start_trigger_updates, from dexorder.order.triggers import (OrderTriggers, activate_order, update_balance_triggers, start_trigger_updates,
update_price_triggers, TimeTrigger, PriceLineTrigger) update_price_triggers, TimeTrigger, PriceLineTrigger)
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data, get_pool
from dexorder.progressor import BlockProgressor
from dexorder.util import hexstr from dexorder.util import hexstr
from dexorder.vault_blockdata import vault_owners, adjust_balance, verify_vault, publish_vaults from dexorder.vault_blockdata import vault_owners, adjust_balance, verify_vault, publish_vaults
@@ -32,7 +33,14 @@ def dump_log(eventlog):
def init(): def init():
new_pool_prices.clear() new_pool_prices.clear()
start_trigger_updates() start_trigger_updates()
accounting_lock()
def wire_dexorder_debug(runner: BlockProgressor):
runner.add_event_trigger(handle_dexorderdebug, None, {"topics":[keccak(text='DexorderDebug(string)')]})
def handle_dexorderdebug(events: list):
for event in events:
print(f'DexorderDebug {event}')
async def handle_order_placed(event: EventData): async def handle_order_placed(event: EventData):
@@ -59,8 +67,11 @@ async def handle_order_placed(event: EventData):
obj = await contract.swapOrderStatus(index) obj = await contract.swapOrderStatus(index)
log.debug(f'raw order status {obj}') log.debug(f'raw order status {obj}')
order = Order.create(addr, index, event['transactionHash'], obj) order = Order.create(addr, index, event['transactionHash'], obj)
if order.order.route.exchange == Exchange.GMX:
gmxStatus = await contract.gmxOrderStatus(index)
order.order.gmx = GMXOrder.load(gmxStatus[0])
await activate_order(order) await activate_order(order)
log.debug(f'new order {order.key}{order}') log.debug(f'new order {order.key} {await order.pprint()}')
async def handle_swap_filled(event: EventData): async def handle_swap_filled(event: EventData):
@@ -83,9 +94,10 @@ async def handle_swap_filled(event: EventData):
except KeyError: except KeyError:
log.warning(f'DexorderSwapFilled IGNORED due to missing order {vault} {order_index}') log.warning(f'DexorderSwapFilled IGNORED due to missing order {vault} {order_index}')
return return
value = await accounting_fill(event, order.order.tokenOut) usd_value = await accounting_fill(event, order.order.tokenOut)
if value is not None: # from here down is almost the same as a section of handle_gmxorderexecuted()
metric.volume.inc(float(value)) if usd_value is not None:
metric.volume.inc(float(usd_value))
order.status.trancheStatus[tranche_index].activationTime = next_execution_time # update rate limit order.status.trancheStatus[tranche_index].activationTime = next_execution_time # update rate limit
try: try:
triggers = OrderTriggers.instances[order.key] triggers = OrderTriggers.instances[order.key]
@@ -139,10 +151,11 @@ async def handle_transfer(transfer: EventData):
vault = None vault = None
if vault is not None: if vault is not None:
await adjust_balance(vault, token_address, amount) await adjust_balance(vault, token_address, amount)
await update_balance_triggers(vault, token_address, amount) await update_balance_triggers(vault, token_address)
if is_tracked_address(to_address): # This wuold double-count fill fees. Instead, we book the transfer when sending money to the account as part of a refill.
# noinspection PyTypeChecker # if is_tracked_address(to_address):
await accounting_transfer(transfer, token_address, from_address, to_address, amount, adjust_decimals=True) # # noinspection PyTypeChecker
# await accounting_transfer(transfer, token_address, from_address, to_address, amount, adjust_decimals=True)
async def handle_uniswap_swaps(swaps: list[EventData]): async def handle_uniswap_swaps(swaps: list[EventData]):
# asynchronously prefetch the block timestamps we'll need # asynchronously prefetch the block timestamps we'll need
@@ -159,12 +172,26 @@ async def handle_uniswap_swap(swap: EventData):
return return
pool, time, price = data pool, time, price = data
addr = pool['address'] addr = pool['address']
pool_prices[addr] = price await update_pool_price(addr, time, price, pool['decimals'])
await ohlcs.update_all(addr, time, price)
await update_price_triggers(pool, price)
# log.debug(f'pool {addr} {minutely(time)} {price}') # log.debug(f'pool {addr} {minutely(time)} {price}')
async def update_pool_price(addr, time, price, decimals):
"""
Price should be an adjusted price with decimals, not the raw price from the pool. The decimals are used to
convert the price back to blockchain format for the triggers.
"""
pool_prices[addr] = price # this will update new_pool_prices if necessary
await ohlcs.update_all(addr, time, price)
update_price_triggers(addr, price, decimals)
async def activate_new_price_triggers():
for addr, price in new_pool_prices.items():
pool = await get_pool(addr)
update_price_triggers(addr, price, pool['decimals'])
async def handle_vault_created(created: EventData): async def handle_vault_created(created: EventData):
try: try:
owner = created['args']['owner'] owner = created['args']['owner']
@@ -221,7 +248,7 @@ async def update_metrics():
metric.vaults.set(vault_owners.upper_len()) metric.vaults.set(vault_owners.upper_len())
metric.open_orders.set(Order.open_orders.upper_len()) metric.open_orders.set(Order.open_orders.upper_len())
metric.triggers_time.set(len(TimeTrigger.all)) metric.triggers_time.set(len(TimeTrigger.all))
metric.triggers_line.set(len(PriceLineTrigger.triggers_set)) metric.triggers_line.set(sum(len(s) for s in PriceLineTrigger.by_pool.values()))
# slow updates # slow updates
global slow_metric_update global slow_metric_update

View File

@@ -2,7 +2,7 @@ import asyncio
import logging import logging
from dexorder.contract import ContractProxy from dexorder.contract import ContractProxy
from dexorder.contract.dexorder import get_factory_contract, get_fee_manager_contract from dexorder.contract.dexorder import get_fee_manager_contract
log = logging.getLogger(__name__) log = logging.getLogger(__name__)

View File

@@ -242,6 +242,10 @@ class OHLCFileSeries:
self.dirty_files = set() self.dirty_files = set()
self.quote: Optional[tuple[datetime,dec]] = None self.quote: Optional[tuple[datetime,dec]] = None
@property
def exists(self) -> bool:
return self.quote_file is not None or os.path.exists(self.quote_filename)
@property @property
def quote_filename(self): def quote_filename(self):
@@ -276,6 +280,16 @@ class OHLCFileSeries:
self.dirty_files.add(file) self.dirty_files.add(file)
# noinspection PyShadowingBuiltins
def update_ohlc(self, period: timedelta, time: datetime, open: dec, high: dec, low: dec, close: dec):
file = OHLCFile.get(self.base_dir, OHLCFilePath(self.symbol, period, time))
file.update(time, open)
file.update(time, high)
file.update(time, low)
file.update(time, close)
self.dirty_files.add(file)
def _load(self, time): def _load(self, time):
# #
# load quote file # load quote file
@@ -359,14 +373,25 @@ class FinalOHLCRepository:
""" """
def __init__(self): def __init__(self):
assert config.ohlc_dir assert config.ohlc_dir
self.dirty_series = set() self.dirty_series: set[OHLCFileSeries] = set()
def update(self, symbol: str, time: datetime, price: Optional[dec]): def update(self, symbol: str, time: datetime, price: Optional[dec]):
series = self.get_series(symbol)
series.update(time, price)
self.dirty_series.add(series)
# noinspection PyShadowingBuiltins
def update_ohlc(self, symbol: str, period: timedelta, time: datetime, open: dec, high: dec, low: dec, close: dec):
series = self.get_series(symbol)
series.update_ohlc(period, time, open, high, low, close)
self.dirty_series.add(series)
@staticmethod
def get_series(symbol):
chain_id = current_chain.get().id chain_id = current_chain.get().id
base_dir = os.path.join(config.ohlc_dir, str(chain_id)) base_dir = os.path.join(config.ohlc_dir, str(chain_id))
series = OHLCFileSeries.get(base_dir, symbol) series = OHLCFileSeries.get(base_dir, symbol)
series.update(time, price) return series
self.dirty_series.add(series)
def flush(self) -> None: def flush(self) -> None:
for series in self.dirty_series: for series in self.dirty_series:
@@ -378,3 +403,6 @@ class FinalOHLCRepository:
closing.file.close() closing.file.close()
# noinspection PyProtectedMember # noinspection PyProtectedMember
OHLCFile._closing.clear() OHLCFile._closing.clear()
def has_symbol(self, symbol: str):
return self.get_series(symbol).exists

View File

@@ -0,0 +1,5 @@
from ._base import gmx_prices, gmx_tk_in_flight, tk_gmx_in_flight
from ._chaininfo import gmx_chain_info
from ._handle import gmx_wire_runner_early, gmx_wire_runner_late
from ._metadata import *

51
src/dexorder/gmx/_abi.py Normal file
View File

@@ -0,0 +1,51 @@
import logging
import re
from eth_utils import keccak
from dexorder.util import hexbytes, hexstr
from dexorder.util.abiencode import abi_decoder
log = logging.getLogger(__name__)
def no_ws(s):
return re.sub(r"\s+", "", s)
EventLogDataType = '''
(((string,address)[],(string,address[])[]),
((string,uint256)[],(string,uint256[])[]),
((string,int256)[], (string,int256[])[] ),
((string,bool)[], (string,bool[])[] ),
((string,bytes32)[],(string,bytes32[])[]),
((string,bytes)[], (string,bytes[])[] ),
((string,string)[], (string,string[])[] )
)'''
EventLogType = f'EventLog( address, string, string, {EventLogDataType} )'
EventLog1Type = f'EventLog1( address, string, string, bytes32, {EventLogDataType} )'
EventLog2Type = f'EventLog2( address, string, string, bytes32, bytes32, {EventLogDataType} )'
EventLogTopic = hexstr(keccak(text=no_ws(EventLogType)))
EventLog1Topic = hexstr(keccak(text=no_ws(EventLog1Type)).hex())
EventLog2Topic = hexstr(keccak(text=no_ws(EventLog2Type)).hex())
def topic_hash(signature):
return hexstr(keccak(text=no_ws(signature)))
def parse_event_log_data(event_log):
event_log_data = event_log['data']
if type(event_log_data) is str:
event_log_data = hexbytes(event_log_data)
sender, event_name, event_log_data = abi_decoder.decode(('address', 'string', no_ws(EventLogDataType),), event_log_data)
result = {'sender': sender, 'event': event_name, 'tx': hexstr(event_log['transactionHash'])}
for items, array_items in event_log_data:
for k, v in items:
result[k] = v
for k, v in array_items:
result[k] = v
return result

97
src/dexorder/gmx/_base.py Normal file
View File

@@ -0,0 +1,97 @@
import logging
from dataclasses import dataclass
from enum import Enum
from typing import NamedTuple
import requests
from eth_utils import to_checksum_address
from ._chaininfo import GMX_API_BASE_URLS
from .. import dec
from ..base.chain import current_chain
from ..base.order import TrancheKey
from ..blockstate import BlockDict
from ..util import json
log = logging.getLogger(__name__)
@dataclass
class GMXPosition:
# compound key fields
market_token: str
collateral_token: str
is_long: bool
# non-key attrs
size: dec = dec(0)
class Key (NamedTuple):
market_token: str
collateral_token: str
is_long: bool
def __str__(self):
return f'{self.market_token}|{self.collateral_token}|{"L" if self.is_long else "S"}'
@staticmethod
def str2key(keystring: str):
market_token, collateral_token, is_long = keystring.split('|')
return GMXPosition.Key(market_token.lower(), collateral_token.lower(), is_long == 'L')
@property
def key(self):
return GMXPosition.Key(self.market_token, self.collateral_token, self.is_long)
@staticmethod
def load(d: dict):
return GMXPosition(to_checksum_address(d['m']), to_checksum_address(d['c']), d['l'], dec(d['s']))
def dump(self):
return {
'm': self.market_token,
'c': self.collateral_token,
'l': self.is_long,
's': str(self.size),
}
def __hash__(self):
return hash(self.key)
def __eq__(self, other):
return self.key == other.key
class GMXOrderType (Enum):
MarketSwap = 0
LimitSwap = 1
MarketIncrease = 2
LimitIncrease = 3
MarketDecrease = 4
LimitDecrease = 5
StopLossDecrease = 6
Liquidation = 7
StopIncrease = 8
GMX_API_BASE_URL = None
def gmx_api(method, **params):
global GMX_API_BASE_URL
if GMX_API_BASE_URL is None:
GMX_API_BASE_URL = GMX_API_BASE_URLS[current_chain.get().id]
return requests.get(GMX_API_BASE_URL+method, params=params, timeout=5).json()
gmx_markets_by_index_token: BlockDict[str, list[str]] = BlockDict('gmx_t_m', redis=True, db=True, value2str=lambda mks: json.dumps(mks), str2value=lambda s: json.loads(s))
gmx_prices: BlockDict[str, dec] = BlockDict('gmx_p', redis=True, str2value=dec)
# open positions by vault
gmx_positions: BlockDict[str, list[GMXPosition]] = BlockDict('gmx_pos', redis=True, db=True,
value2str=lambda positions: json.dumps([p.dump() for p in positions]),
str2value=lambda positions: [GMXPosition.load(p) for p in json.loads(positions)] )
# dual mappings of our TrancheKey to a GMX Order key exist only when a GMX order has been placed but not yet handled
gmx_tk_in_flight: BlockDict[str, TrancheKey] = BlockDict('gmx_tif', db=True, str2value=TrancheKey.str2key)
tk_gmx_in_flight: BlockDict[TrancheKey, str] = BlockDict('tk2gmx', db=True, str2key=TrancheKey.str2key)

View File

@@ -0,0 +1,16 @@
import logging
log = logging.getLogger(__name__)
gmx_chain_info = {
42161: {
'EventEmitter': '0xC8ee91A54287DB53897056e12D9819156D3822Fb',
'DataStore': '0xFD70de6b91282D8017aA4E741e9Ae325CAb992d8',
'Reader': '0x0537C767cDAC0726c76Bb89e92904fe28fd02fE1',
}
}
GMX_API_BASE_URLS={
31337: 'https://arbitrum-api.gmxinfra.io/',
42161: 'https://arbitrum-api.gmxinfra.io/',
}

View File

@@ -0,0 +1,24 @@
import logging
from functools import cache
from dexorder.contract import ContractProxy
from dexorder.gmx._datastore import DataStore
from dexorder.util import json
log = logging.getLogger(__name__)
def get_gmx_contract_info(name: str):
with open(f'./resource/abi/42161/gmx/{name}.json') as file:
info = json.load(file)
return info
@cache
def get_gmx_contract(name: str):
info = get_gmx_contract_info(name)
if name == 'DataStore':
clazz = DataStore
else:
clazz = ContractProxy
return clazz(info['address'], abi=info['abi'])

View File

@@ -0,0 +1,28 @@
import logging
from eth_utils import keccak
from dexorder import dec
from dexorder.contract import ContractProxy
from dexorder.util.abiencode import abi_encoder
log = logging.getLogger(__name__)
def combo_key(key_str, arg, arg_type='address'):
key_bytes = keccak(abi_encoder.encode(['string'], [key_str]))
return keccak(abi_encoder.encode(['bytes32', arg_type], [key_bytes, arg]))
IS_MARKET_DISABLED_KEY = 'IS_MARKET_DISABLED'
MIN_COLLATERAL_FACTOR_KEY = 'MIN_COLLATERAL_FACTOR'
class DataStore (ContractProxy):
async def is_market_disabled(self, market_addr: str):
return await self.getBool(combo_key(IS_MARKET_DISABLED_KEY, market_addr))
async def min_collateral_factor(self, market_addr: str):
result = await self.getUint(combo_key(MIN_COLLATERAL_FACTOR_KEY, market_addr))
if result == 0:
log.warning(f'no min collateral factor for market {market_addr}')
return 2 * dec(result) / dec(1e30)

292
src/dexorder/gmx/_error.py Normal file
View File

@@ -0,0 +1,292 @@
import logging
from dexorder.util.abiencode import abi_decoder
log = logging.getLogger(__name__)
gmx_error_map = {
'b244a107': 'ActionAlreadySignalled()',
'94fdaea2': 'ActionNotSignalled()',
'3285dc57': 'AdlNotEnabled()',
'd06ed8be': 'AdlNotRequired(int256,uint256)',
'70657e04': 'ArrayOutOfBoundsBytes(bytes[],uint256,string)',
'9d18e63b': 'ArrayOutOfBoundsUint256(uint256[],uint256,string)',
'60c5e472': 'AvailableFeeAmountIsZero(address,address,uint256)',
'11aeaf6b': 'BlockNumbersNotSorted(uint256,uint256)',
'ec775484': 'BuybackAndFeeTokenAreEqual(address,address)',
'd6b52b60': 'ChainlinkPriceFeedNotUpdated(address,uint256,uint256)',
'ec6d89c8': 'CollateralAlreadyClaimed(uint256,uint256)',
'bdec9c0d': 'CompactedArrayOutOfBounds(uint256[],uint256,uint256,string)',
'5ebb87c9': 'ConfigValueExceedsAllowedRange(bytes32,uint256)',
'413f9a54': 'DataStreamIdAlreadyExistsForToken(address)',
'83f2ba20': 'DeadlinePassed(uint256,uint256)',
'43e30ca8': 'DepositNotFound(bytes32)',
'dd70e0c9': 'DisabledFeature(bytes32)',
'09f8c937': 'DisabledMarket(address)',
'd4064737': 'DuplicatedIndex(uint256,string)',
'91c78b78': 'DuplicatedMarketInSwapPath(address)',
'dd7016a2': 'EmptyAccount()',
'e474a425': 'EmptyAddressInMarketTokenBalanceValidation(address,address)',
'52dfddfd': 'EmptyChainlinkPaymentToken()',
'8db88ccf': 'EmptyChainlinkPriceFeed(address)',
'b86fffef': 'EmptyChainlinkPriceFeedMultiplier(address)',
'616daf1f': 'EmptyClaimFeesMarket()',
'62e402cc': 'EmptyDataStreamFeedId(address)',
'088405c6': 'EmptyDataStreamMultiplier(address)',
'95b66fe9': 'EmptyDeposit()',
'01af8c24': 'EmptyDepositAmounts()',
'd1c3d5bd': 'EmptyDepositAmountsAfterSwap()',
'a14e1b3d': 'EmptyGlv(address)',
'bd192971': 'EmptyGlvDeposit()',
'03251ce6': 'EmptyGlvDepositAmounts()',
'94409f52': 'EmptyGlvMarketAmount()',
'93856b1a': 'EmptyGlvTokenSupply()',
'0e5be78f': 'EmptyGlvWithdrawal()',
'402a866f': 'EmptyGlvWithdrawalAmount()',
'e9b78bd4': 'EmptyHoldingAddress()',
'05fbc1ae': 'EmptyMarket()',
'eb1947dd': 'EmptyMarketPrice(address)',
'2ee3d69c': 'EmptyMarketTokenSupply()',
'16307797': 'EmptyOrder()',
'4dfbbff3': 'EmptyPosition()',
'cd64a025': 'EmptyPrimaryPrice(address)',
'd551823d': 'EmptyReceiver()',
'6af5e96f': 'EmptyShift()',
'60d5e84a': 'EmptyShiftAmount()',
'3df42531': 'EmptySizeDeltaInTokens()',
'9fc297fa': 'EmptyTokenTranferGasLimit(address)',
'9231be69': 'EmptyValidatedPrices()',
'6d4bb5e9': 'EmptyWithdrawal()',
'01d6f7b1': 'EmptyWithdrawalAmount()',
'4e48dcda': 'EndOfOracleSimulation()',
'59afd6c6': 'ExternalCallFailed(bytes)',
'2df6dc23': 'FeeBatchNotFound(bytes32)',
'e44992d0': 'GlvAlreadyExists(bytes32,address)',
'057058b6': 'GlvDepositNotFound(bytes32)',
'30b8a225': 'GlvDisabledMarket(address,address)',
'8da31161': 'GlvEnabledMarket(address,address)',
'c8b70b2c': 'GlvInsufficientMarketTokenBalance(address,address,uint256,uint256)',
'80ad6831': 'GlvInvalidLongToken(address,address,address)',
'9673a10b': 'GlvInvalidShortToken(address,address,address)',
'3aa9fc91': 'GlvMarketAlreadyExists(address,address)',
'af7d3787': 'GlvMaxMarketCountExceeded(address,uint256)',
'd859f947': 'GlvMaxMarketTokenBalanceAmountExceeded(address,address,uint256,uint256)',
'66560e7d': 'GlvMaxMarketTokenBalanceUsdExceeded(address,address,uint256,uint256)',
'155712e1': 'GlvNameTooLong()',
'2e3780e5': 'GlvNegativeMarketPoolValue(address,address)',
'3afc5e65': 'GlvNonZeroMarketBalance(address,address)',
'6c00ed8a': 'GlvNotFound(address)',
'232d7165': 'GlvShiftIntervalNotYetPassed(uint256,uint256,uint256)',
'c906a05a': 'GlvShiftMaxPriceImpactExceeded(uint256,uint256)',
'de45e162': 'GlvShiftNotFound(bytes32)',
'9cb4f5c5': 'GlvSymbolTooLong()',
'07e9c4d5': 'GlvUnsupportedMarket(address,address)',
'20dcb068': 'GlvWithdrawalNotFound(bytes32)',
'd90abe06': 'GmEmptySigner(uint256)',
'ee6e8ecf': 'GmInvalidBlockNumber(uint256,uint256)',
'b8aaa455': 'GmInvalidMinMaxBlockNumber(uint256,uint256)',
'c7b44b28': 'GmMaxOracleSigners(uint256,uint256)',
'0f885e52': 'GmMaxPricesNotSorted(address,uint256,uint256)',
'5b1250e7': 'GmMaxSignerIndex(uint256,uint256)',
'dc2a99e7': 'GmMinOracleSigners(uint256,uint256)',
'cc7bbd5b': 'GmMinPricesNotSorted(address,uint256,uint256)',
'a581f648': 'InsufficientBuybackOutputAmount(address,address,uint256,uint256)',
'74cc815b': 'InsufficientCollateralAmount(uint256,int256)',
'2159b161': 'InsufficientCollateralUsd(int256)',
'5dac504d': 'InsufficientExecutionFee(uint256,uint256)',
'bb416f93': 'InsufficientExecutionGas(uint256,uint256,uint256)',
'79293964': 'InsufficientExecutionGasForErrorHandling(uint256,uint256)',
'19d50093': 'InsufficientFundsToPayForCosts(uint256,string)',
'd3dacaac': 'InsufficientGasForCancellation(uint256,uint256)',
'79a2abad': 'InsufficientGasLeftForCallback(uint256,uint256)',
'3083b9e5': 'InsufficientHandleExecutionErrorGas(uint256,uint256)',
'82c8828a': 'InsufficientMarketTokens(uint256,uint256)',
'd28d3eb5': 'InsufficientOutputAmount(uint256,uint256)',
'23090a31': 'InsufficientPoolAmount(uint256,uint256)',
'9cd76295': 'InsufficientRelayFee(uint256,uint256)',
'315276c9': 'InsufficientReserve(uint256,uint256)',
'b98c6179': 'InsufficientReserveForOpenInterest(uint256,uint256)',
'a7aebadc': 'InsufficientSwapOutputAmount(uint256,uint256)',
'041b3483': 'InsufficientWntAmount(uint256,uint256)',
'3a78cd7e': 'InsufficientWntAmountForExecutionFee(uint256,uint256)',
'1d4fc3c0': 'InvalidAdl(int256,int256)',
'8ac146e6': 'InvalidAmountInForFeeBatch(uint256,uint256)',
'eb19d3f5': 'InvalidBaseKey(bytes32)',
'25e5dc07': 'InvalidBlockRangeSet(uint256,uint256)',
'752fdb63': 'InvalidBuybackToken(address)',
'89736584': 'InvalidCancellationReceiverForSubaccountOrder(address,address)',
'5b3043dd': 'InvalidClaimAffiliateRewardsInput(uint256,uint256)',
'42c0d1f2': 'InvalidClaimCollateralInput(uint256,uint256,uint256)',
'7363cfa5': 'InvalidClaimFundingFeesInput(uint256,uint256)',
'74cee48d': 'InvalidClaimUiFeesInput(uint256,uint256)',
'6c2738d3': 'InvalidClaimableFactor(uint256)',
'839c693e': 'InvalidCollateralTokenForMarket(address,address)',
'4a591309': 'InvalidContributorToken(address)',
'8d56bea1': 'InvalidDataStreamBidAsk(address,int192,int192)',
'a4949e25': 'InvalidDataStreamFeedId(address,bytes32,bytes32)',
'2a74194d': 'InvalidDataStreamPrices(address,int192,int192)',
'6e0c29ed': 'InvalidDataStreamSpreadReductionFactor(address,uint256)',
'9fbe2cbc': 'InvalidDecreaseOrderSize(uint256,uint256)',
'751951f9': 'InvalidDecreasePositionSwapType(uint256)',
'9b867f31': 'InvalidExecutionFee(uint256,uint256,uint256)',
'99e26b44': 'InvalidExecutionFeeForMigration(uint256,uint256)',
'831e9f11': 'InvalidExternalCallInput(uint256,uint256)',
'be55c895': 'InvalidExternalCallTarget(address)',
'e15f2701': 'InvalidExternalReceiversInput(uint256,uint256)',
'fa804399': 'InvalidFeeBatchTokenIndex(uint256,uint256)',
'cb9339d5': 'InvalidFeeReceiver(address)',
'be6514b6': 'InvalidFeedPrice(address,int256)',
'fc90fcc3': 'InvalidGlpAmount(uint256,uint256)',
'bf16cb0a': 'InvalidGlvDepositInitialLongToken(address)',
'df0f9a23': 'InvalidGlvDepositInitialShortToken(address)',
'055ab8b9': 'InvalidGlvDepositSwapPath(uint256,uint256)',
'993417d5': 'InvalidGmMedianMinMaxPrice(uint256,uint256)',
'a54d4339': 'InvalidGmOraclePrice(address)',
'8d648a7f': 'InvalidGmSignature(address,address)',
'b21c863e': 'InvalidGmSignerMinMaxPrice(uint256,uint256)',
'e5feddc0': 'InvalidKeeperForFrozenOrder(address)',
'33a1ea6b': 'InvalidMarketTokenBalance(address,address,uint256,uint256)',
'9dd026db': 'InvalidMarketTokenBalanceForClaimableFunding(address,address,uint256,uint256)',
'808c464f': 'InvalidMarketTokenBalanceForCollateralAmount(address,address,uint256,uint256)',
'c08bb8a0': 'InvalidMinGlvTokensForFirstGlvDeposit(uint256,uint256)',
'3f9c06ab': 'InvalidMinMarketTokensForFirstDeposit(uint256,uint256)',
'1608d41a': 'InvalidMinMaxForPrice(address,uint256,uint256)',
'e71a51be': 'InvalidNativeTokenSender(address)',
'05d102a2': 'InvalidOracleProvider(address)',
'68b49e6c': 'InvalidOracleProviderForToken(address,address)',
'f9996e9f': 'InvalidOracleSetPricesDataParam(uint256,uint256)',
'dd51dc73': 'InvalidOracleSetPricesProvidersParam(uint256,uint256)',
'c1b14c91': 'InvalidOracleSigner(address)',
'0481a15a': 'InvalidOrderPrices(uint256,uint256,uint256,uint256)',
'253c8c02': 'InvalidOutputToken(address,address)',
'3c0ac199': 'InvalidPermitSpender(address,address)',
'adaa688d': 'InvalidPoolValueForDeposit(int256)',
'90a6af3b': 'InvalidPoolValueForWithdrawal(int256)',
'182e30e3': 'InvalidPositionMarket(address)',
'bff65b3f': 'InvalidPositionSizeValues(uint256,uint256)',
'663de023': 'InvalidPrimaryPricesForSimulation(uint256,uint256)',
'9cfea583': 'InvalidReceiver(address)',
'77e8e698': 'InvalidReceiverForFirstDeposit(address,address)',
'6eedac2f': 'InvalidReceiverForFirstGlvDeposit(address,address)',
'4baab816': 'InvalidReceiverForSubaccountOrder(address,address)',
'370abac2': 'InvalidRelayParams()',
'530b2590': 'InvalidSetContributorPaymentInput(uint256,uint256)',
'29a93dc4': 'InvalidSetMaxTotalContributorTokenAmountInput(uint256,uint256)',
'2a34f7fe': 'InvalidSignature(string)',
'720bb461': 'InvalidSizeDeltaForAdl(uint256,uint256)',
'3044992f': 'InvalidSubaccountApprovalNonce(uint256,uint256)',
'545e8f2b': 'InvalidSubaccountApprovalSubaccount()',
'cb9bd134': 'InvalidSwapMarket(address)',
'6ba3dd8b': 'InvalidSwapOutputToken(address,address)',
'672e4fba': 'InvalidSwapPathForV1(address[],address)',
'e6b0ddb6': 'InvalidTimelockDelay(uint256)',
'53f81711': 'InvalidTokenIn(address,address)',
'81468139': 'InvalidUiFeeFactor(uint256,uint256)',
'f3d06236': 'InvalidUserNonce(uint256,uint256)',
'1de2bca4': 'InvalidVersion(uint256)',
'bc121108': 'LiquidatablePosition(string,int256,int256,int256)',
'a38dfb2a': 'LongTokensAreNotEqual(address,address)',
'25e34fa1': 'MarketAlreadyExists(bytes32,address)',
'6918f9bf': 'MarketNotFound(address)',
'143e2156': 'MaskIndexOutOfBounds(uint256,string)',
'f0794a60': 'MaxAutoCancelOrdersExceeded(uint256,uint256)',
'4e3f62a8': 'MaxBuybackPriceAgeExceeded(uint256,uint256,uint256)',
'10aeb692': 'MaxCallbackGasLimitExceeded(uint256,uint256)',
'4f82a998': 'MaxFundingFactorPerSecondLimitExceeded(uint256,uint256)',
'2bf127cf': 'MaxOpenInterestExceeded(uint256,uint256)',
'dd9c6b9a': 'MaxOracleTimestampRangeExceeded(uint256,uint256)',
'6429ff3f': 'MaxPoolAmountExceeded(uint256,uint256)',
'46169f04': 'MaxPoolUsdForDepositExceeded(uint256,uint256)',
'2b6e7c3f': 'MaxPriceAgeExceeded(uint256,uint256)',
'3d1986f7': 'MaxRefPriceDeviationExceeded(address,uint256,uint256,uint256)',
'519ba753': 'MaxSubaccountActionCountExceeded(address,address,uint256,uint256)',
'9da36043': 'MaxSwapPathLengthExceeded(uint256,uint256)',
'faf66f0c': 'MaxTimelockDelayExceeded(uint256)',
'c10ceac7': 'MaxTotalCallbackGasLimitForAutoCancelOrdersExceeded(uint256,uint256)',
'043038f0': 'MaxTotalContributorTokenAmountExceeded(address,uint256,uint256)',
'961b4025': 'MinContributorPaymentIntervalBelowAllowedRange(uint256)',
'b9dc7b9d': 'MinContributorPaymentIntervalNotYetPassed(uint256)',
'966fea10': 'MinGlvTokens(uint256,uint256)',
'f442c0bc': 'MinLongTokens(uint256,uint256)',
'6ce23460': 'MinMarketTokens(uint256,uint256)',
'85efb31a': 'MinPositionSize(uint256,uint256)',
'b4a196af': 'MinShortTokens(uint256,uint256)',
'cc32db99': 'NegativeExecutionPrice(int256,uint256,uint256,int256,uint256)',
'53410c43': 'NonAtomicOracleProvider(address)',
'28f773e9': 'NonEmptyExternalCallsForSubaccountOrder()',
'ef2df9b5': 'NonEmptyTokensWithPrices(uint256)',
'730293fd': 'OpenInterestCannotBeUpdatedForSwapOnlyMarket(address)',
'8cf95e58': 'OracleProviderAlreadyExistsForToken(address)',
'd84b8ee8': 'OracleTimestampsAreLargerThanRequestExpirationTime(uint256,uint256,uint256)',
'7d677abf': 'OracleTimestampsAreSmallerThanRequired(uint256,uint256)',
'730d44b1': 'OrderAlreadyFrozen()',
'59485ed9': 'OrderNotFound(bytes32)',
'e09ad0e9': 'OrderNotFulfillableAtAcceptablePrice(uint256,uint256)',
'9aba92cb': 'OrderNotUpdatable(uint256)',
'8a4bd513': 'OrderTypeCannotBeCreated(uint256)',
'cf9319d6': 'OrderValidFromTimeNotReached(uint256,uint256)',
'b92fb250': 'PnlFactorExceededForLongs(int256,uint256)',
'b0010694': 'PnlFactorExceededForShorts(int256,uint256)',
'9f0bc7de': 'PnlOvercorrected(int256,uint256)',
'426cfff0': 'PositionNotFound(bytes32)',
'ee919dd9': 'PositionShouldNotBeLiquidated(string,int256,int256,int256)',
'ded099de': 'PriceAlreadySet(address,uint256,uint256)',
'd4141298': 'PriceFeedAlreadyExistsForToken(address)',
'f0641c92': 'PriceImpactLargerThanOrderSize(int256,uint256)',
'e8266438': 'RequestNotYetCancellable(uint256,uint256,string)',
'e70f9152': 'SelfTransferNotSupported(address)',
'032b3d00': 'SequencerDown()',
'113cfc03': 'SequencerGraceDurationNotYetPassed(uint256,uint256)',
'950227bb': 'ShiftFromAndToMarketAreEqual(address)',
'b611f297': 'ShiftNotFound(bytes32)',
'f54d8776': 'ShortTokensAreNotEqual(address,address)',
'20b23584': 'SignalTimeNotYetPassed(uint256)',
'26025b4e': 'SubaccountApprovalDeadlinePassed(uint256,uint256)',
'9b539f07': 'SubaccountApprovalExpired(address,address,uint256,uint256)',
'9be0a43c': 'SubaccountNotAuthorized(address,address)',
'75885d69': 'SwapPriceImpactExceedsAmountIn(uint256,int256)',
'd2e229e6': 'SwapsNotAllowedForAtomicWithdrawal(uint256,uint256)',
'7bf8d2b3': 'SyncConfigInvalidInputLengths(uint256,uint256)',
'624b5b13': 'SyncConfigInvalidMarketFromData(address,address)',
'8b3d4655': 'SyncConfigUpdatesDisabledForMarket(address)',
'0798d283': 'SyncConfigUpdatesDisabledForMarketParameter(address,string)',
'8ea7eb18': 'SyncConfigUpdatesDisabledForParameter(string)',
'b783c88a': 'ThereMustBeAtLeastOneRoleAdmin()',
'282b5b70': 'ThereMustBeAtLeastOneTimelockMultiSig()',
'979dc780': 'TokenTransferError(address,address,uint256)',
'0e92b837': 'Uint256AsBytesLengthExceeds32Bytes(uint256)',
'6afad778': 'UnableToGetBorrowingFactorEmptyPoolUsd()',
'be4729a2': 'UnableToGetCachedTokenPrice(address,address)',
'11423d95': 'UnableToGetFundingFactorEmptyOpenInterest()',
'7a0ca681': 'UnableToGetOppositeToken(address,address)',
'3a61a4a9': 'UnableToWithdrawCollateral(int256)',
'a35b150b': 'Unauthorized(address,string)',
'99b2d582': 'UnexpectedBorrowingFactor(uint256,uint256)',
'cc3459ff': 'UnexpectedMarket()',
'3b42e952': 'UnexpectedPoolValue(int256)',
'814991c3': 'UnexpectedPositionState()',
'e949114e': 'UnexpectedRelayFeeToken(address,address)',
'a9721241': 'UnexpectedRelayFeeTokenAfterSwap(address,address)',
'785ee469': 'UnexpectedTokenForVirtualInventory(address,address)',
'3af14617': 'UnexpectedValidFromTime(uint256)',
'3784f834': 'UnsupportedOrderType(uint256)',
'0d0fcc0b': 'UnsupportedRelayFeeToken(address,address)',
'eadaf93a': 'UsdDeltaExceedsLongOpenInterest(int256,uint256)',
'2e949409': 'UsdDeltaExceedsPoolValue(int256,uint256)',
'8af0d140': 'UsdDeltaExceedsShortOpenInterest(int256,uint256)',
'60737bc0': 'WithdrawalNotFound(bytes32)',
}
gmx_error_map = {bytes.fromhex(k):v for k,v in gmx_error_map.items()}
def gmx_parse_reason_bytes(e: bytes) -> str:
sig_bytes = e[:4]
sig = gmx_error_map.get(e)
if sig is None:
return f'Unknown GMX error {e.hex()}'
name, types = sig.split('(',1)
types = types[:-1]
if len(e) > 4:
data = e[4:]
values = abi_decoder.decode(types.split(','), data)
return f'{name}({",".join(map(str, values))})'
return name

446
src/dexorder/gmx/_handle.py Normal file
View File

@@ -0,0 +1,446 @@
import asyncio
import logging
from copy import copy
from datetime import timedelta
from eth_utils import to_checksum_address
from web3.types import EventData
from ._abi import parse_event_log_data
from ._base import GMXPosition, gmx_positions, GMXOrderType, gmx_tk_in_flight, tk_gmx_in_flight, gmx_api, \
gmx_markets_by_index_token
from ._chaininfo import gmx_chain_info
from ._error import gmx_parse_reason_bytes
from ._metadata import gmx_update_metadata
from .. import dec, from_timestamp
from ..addrmeta import address_metadata
from ..base import OldTokenDict, OldGMXDict
from ..base.chain import current_chain
from ..base.order import TrancheKey
from ..contract import get_contract_event
from ..contract.dexorder import get_dexorder_contract
from ..event_handler import update_pool_price
from ..final_ohlc import FinalOHLCRepository
from ..ohlc import period_name
from ..periodic import periodic
from ..progressor import BlockProgressor
from ..tokens import get_token
from ..util import hexstr
from ..util.async_util import maywait
log = logging.getLogger(__name__)
def gmx_wire_runner_init(runner: BlockProgressor):
pass
def gmx_wire_runner_early(runner: BlockProgressor, backfill: FinalOHLCRepository = None):
runner.add_event_trigger(handle_gmxcallbackerror_event, get_contract_event('GMXCallbackHandler', 'GMXCallbackError'))
runner.add_callback(gmx_handle_metadata_update)
if backfill is not None:
runner.add_callback(create_backfill_handler(backfill) if backfill else gmx_update_prices)
runner.add_event_trigger(handle_gmx_events, log_filter={'address':gmx_chain_info[current_chain.get().id]['EventEmitter'], })
runner.add_event_trigger(handle_gmxorderplaced, get_contract_event('GMX', 'GMXOrderPlaced'))
def gmx_wire_runner_late(runner: BlockProgressor):
pass
def handle_gmxcallbackerror_event(event: EventData):
log.error(f'GMX callback error {event["args"]["reason"]}')
# GMX orders wait on-chain a few blocks before the GMX Handlers execute or cancel them. Also, liquidation orders can
# occur without any associated vault order. Therefore, we take the following approach:
#
# When orders are placed, a GMXOrderPlaced event is emitted alongside the DexorderSwapPlaced event, providing a mapping
# between vault tranche keys and GMX order keys, as well as an in-flight locking mechanism in both the vault and
# backend. In a few blocks' time, the GMX Handlers will deal with the order and emit an OrderCreated or OrderCancelled
# event in addition to invoking the corresponding callback method on the vault, which unlocks the tranche, adjusts
# rate limits, and emits the regular DexorderSwapFilled event, using amountOut as the USD amount filled and amountIn
# as the "price," a virtual amount calculated to make the execution price equal amountOut/amountIn, matching the format
# for non-inverted swaps.
#
# Therefore, the regular backend triggers and fill records act normally on GMX orders without modification.
#
# The backend in-flight lock and tranche-key to gmx-order-key mapping is maintained in gmx_in_flight using a vault event
# to open and a GMX order event to close.
#
# The Position object is maintained by watching GMX PositionIncrease and PositionDecrease events, which capture
# liquidations as well as vault-initiated orders to accurately maintain the Position state.
def invalid_vault(vault):
# return vault not in vault_owners
return False # todo debug
#
# GMXOrderPlaced along with OrderCancelled and OrderExecuted maintain the gmx_in_flight lock and mapping to a tranche key
#
def handle_gmxorderplaced(event: EventData):
# This is emitted alongside the DexorderSwapPlaced event in order to provide additional information for GMX.
# event GMXOrderPlaced(uint64 orderIndex, uint8 trancheIndex, bytes32 gmxOrderKey);
log.info(f'GMXOrderPlaced {event}')
vault = event['address']
if invalid_vault(vault):
return
order_index = event['args']['orderIndex']
tranche_index = event['args']['trancheIndex']
gmx_order_key = event['args']['gmxOrderKey']
# register the gmx order key as in-flight
keystr = hexstr(gmx_order_key)
tk = TrancheKey(vault, order_index, tranche_index)
# start gmx in flight. see end_gmx_in_flight()
gmx_tk_in_flight[keystr] = tk
tk_gmx_in_flight[tk] = keystr
def handle_ordercancelled_event(event: dict, data: dict):
log.info(f'GMX order cancelled {data}')
vault = data['account']
if invalid_vault(vault):
return
reason = gmx_parse_reason_bytes(data['reasonBytes'])
gmx_order_key = data['key']
if gmx_order_key not in gmx_tk_in_flight:
log.warning(f'GMX order cancelled but not in flight: {gmx_order_key}')
return
end_gmx_in_flight(gmx_order_key)
log.info(f'GMX order cancelled due to {reason} in tx {data['tx']}')
def handle_orderexecuted_event(event: dict, data: dict):
log.info(f'GMX order executed {data}')
vault = data['account']
if invalid_vault(vault):
return
gmx_order_key = data['key']
if gmx_order_key not in gmx_tk_in_flight:
# todo handle liquidation either here or with PositionDecrease events
log.warning(f'GMX order executed but not in flight: {gmx_order_key}')
return
end_gmx_in_flight(gmx_order_key)
def end_gmx_in_flight(gmx_order_key):
gmx_order_key = hexstr(gmx_order_key)
tk = gmx_tk_in_flight[gmx_order_key]
del gmx_tk_in_flight[gmx_order_key]
del tk_gmx_in_flight[tk]
#
# GMXPositionIncrease and GMXPositionDecrease events maintain our Position records
#
def handle_position_event(event: dict, data: dict, is_increase: bool):
log.info(f'GMX position {"increase" if is_increase else "decrease"} {event}')
# {'account': '0xdfc16a4247677d723d897aa4fe865a02f5d78746',
# 'borrowingFactor': 250545812647447573795593810338,
# 'collateralAmount': 1019200,
# 'collateralDeltaAmount': 1019200,
# 'collateralToken': '0xaf88d065e77c8cc2239327c5edb3a432268e5831',
# 'collateralTokenPrice.max': 999856563986601850000000,
# 'collateralTokenPrice.min': 999856563986601850000000,
# 'event': 'PositionIncrease',
# 'executionPrice': 3816407734365198,
# 'fundingFeeAmountPerSize': 430546959972637644839,
# 'increasedAtTime': 1753748680,
# 'indexTokenPrice.max': 3817347116613155,
# 'indexTokenPrice.min': 3817347116613155,
# 'isLong': True,
# 'longTokenClaimableFundingAmountPerSize': 4117446384759965489999004204,
# 'market': '0x70d95587d40a2caf56bd97485ab3eec10bee6336',
# 'orderKey': b'2\xe6\x8a\x07\xe9x\x839\x8f\xdd\xd5j\x16\x88\x80\xff[HY\xadk\x0f\xb4n3\xfe\xa2.\xd6\x97\x90\x9b',
# 'orderType': 2,
# 'positionKey': b"\xa8r\xc6\xcf^\x89\xf8k\xfa='\xe9\x19\x12\x11\xb8|;k3Df8\xee^\x9a\x9f)\xef8\x8c\x86",
# 'priceImpactAmount': 128960267235,
# 'priceImpactUsd': 492286104290598018742093888,
# 'sender': '0xe68caaacdf6439628dfd2fe624847602991a31eb',
# 'shortTokenClaimableFundingAmountPerSize': 7250294981528901831,
# 'sizeDeltaInTokens': 524053020328728,
# 'sizeDeltaUsd': 2000000000000000000000000000000,
# 'sizeInTokens': 524053020328728,
# 'sizeInUsd': 2000000000000000000000000000000,
# 'tx': '0x74e3aee1e4a92d3fe4e05d8050197c080c51dc0170ac12e8e90dbbe9fb3cc4b5'}
vault = to_checksum_address(data['account'])
if invalid_vault(vault):
return
order_type = GMXOrderType(data['orderType'])
gmx_order_key = data['orderKey']
is_long = data['isLong']
size_delta = data['sizeDeltaUsd']
size = data['sizeInUsd']
market = data['market']
collateral_token = data['collateralToken']
collateral_amount = data['collateralAmount']
collateral_delta = data['collateralDeltaAmount']
price = data['executionPrice']
key = GMXPosition.Key(market, collateral_token, is_long)
positions = gmx_positions.get(vault)
pos = GMXPosition(key.market_token, key.collateral_token, key.is_long)
if positions is None:
positions = [pos]
else:
positions = list(positions)
if pos in positions:
old = [p for p in positions if p==pos][0]
positions.remove(old)
pos = copy(old)
positions.append(pos)
buy = is_long == is_increase
if buy:
if -size_delta < pos.size < 0:
log.error(f'GMX short position becoming positive: {pos} + {size_delta}')
pos.size += size_delta
else:
if 0 < pos.size < size_delta:
log.error(f'GMX long position becoming negative: {pos} - {size_delta}')
pos.size -= size_delta
if pos.size != size:
log.error(f'GMX position size mismatch: {pos} != {size}')
if not pos.size:
positions.remove(pos)
if not positions:
del gmx_positions[vault]
else:
gmx_positions[vault] = positions
# todo DANNY: if a position is liquidated, should I cancel pending orders in that market?
def handle_positionincrease_event(event: dict, data: dict):
handle_position_event(event, data, True)
def handle_positiondecrease_event(event: dict, data: dict):
handle_position_event(event, data, False)
# def handle_depositcreated_event(event: dict, data: dict):
# log.info(f'GMX deposit created {event}')
#
# def handle_depositexecuted_event(event: dict, data: dict):
# log.info(f'GMX deposit executed {event}')
#
# def handle_withdrawalcreated_event(event: dict, data: dict):
# log.info(f'GMX withdrawal created {event}')
#
# def handle_withdrawalexecuted_event(event: dict, data: dict):
# log.info(f'GMX withdrawal executed {event}')
event_handlers = {
'OraclePriceUpdate': None,
'MarketPoolValueInfo': None,
'MarketPoolValueUpdated': None,
'DepositCreated': None,
'DepositExecuted': None,
'WithdrawalCreated': None,
'WithdrawalExecuted': None,
'OrderCreated': None,
'OrderUpdated': None,
'OrderCancelled': handle_ordercancelled_event,
'OrderExecuted': handle_orderexecuted_event,
'OrderSizeDeltaAutoUpdated': None, # ADL?
'OrderCollateralDeltaAmountAutoUpdated': None,
'PositionIncrease': handle_positionincrease_event,
'PositionDecrease': handle_positiondecrease_event,
'PositionFeesCollected': None,
'PositionImpactPoolAmountUpdated': None,
'PositionImpactPoolDistributed': None,
'VirtualPositionInventoryUpdated': None,
'ClaimableFeeAmountUpdated': None,
'ClaimableFundingUpdated': None,
'ClaimableFundingAmountPerSizeUpdated': None,
'FundingFeeAmountPerSizeUpdated': None,
'FundingFeesClaimed': None,
'CollateralSumUpdated': None,
'CollateralClaimed': None,
'OpenInterestInTokensUpdated': None,
'OpenInterestUpdated': None,
'SetAvailableFeeAmount': None,
'BuybackFees': None,
'FeesClaimed': None,
'ExecutionFeeRefundCallback': None,
'PoolAmountUpdated': None,
'SwapInfo': None,
'SwapFeesCollected': None,
'SwapImpactPoolAmountUpdated': None,
'VirtualSwapInventoryUpdated': None,
'CumulativeBorrowingFactorUpdated': None,
'KeeperExecutionFee': None,
'ExecutionFeeRefund': None,
'SetUint': None,
# SetBytes32 presumably and others...
'SyncConfig': None,
'ShiftCreated': None,
'ShiftExecuted': None,
'GlvValueUpdated': None,
'GlvDepositCreated': None,
'GlvDepositExecuted': None,
'GlvWithdrawalCreated': None,
'GlvWithdrawalExecuted': None,
'GlvShiftCreated': None,
'GlvShiftExecuted': None,
'AffiliateRewardUpdated': None,
'AffiliateRewardClaimed': None,
'SetMaxAllowedSubaccountActionCount': None,
'IncrementSubaccountActionCount': None,
'SetSubaccountAutoTopUpAmount': None,
'SubaccountAutoTopUp': None,
}
async def handle_gmx_events(events: list[dict]):
for event in events:
data = parse_event_log_data(event)
log.info(f'GMX Event {data}')
event_name = data['event']
try:
func = event_handlers[event_name]
except KeyError:
log.debug(f'Unknown event {event_name}')
else:
if func:
await maywait(func(event, data))
#
# Metadata update triggers
# todo These are here because they used to be blockchain event handlers and should be once again...
#
initialized = False
@periodic(timedelta(hours=1))
async def gmx_handle_metadata_update():
global initialized
# noinspection PyBroadException
try:
await gmx_update_metadata()
initialized = True
except:
if not initialized:
raise
log.exception('Exception in gmx_handle_metadata_update()')
# @periodic(timedelta(seconds=1))
# async def gmx_handle_price_update():
# updates = await fetch_price_updates()
# # ticker updates have only one price per addr so we can parallelize setting prices
# await asyncio.gather(*[update_pool_price(addr, time, price, 30) for addr, time, price in updates])
def create_backfill_handler(ohlcs: FinalOHLCRepository):
@periodic(timedelta(seconds=1))
async def gmx_handle_price_update_with_backfill():
updates = await fetch_price_updates()
backfill_addrs = [addr for addr, time, price in updates if not ohlcs.has_symbol(addr)]
if backfill_addrs:
log.info(f'Backfilling {len(backfill_addrs)} new GMX tokens')
await asyncio.gather(*[backfill_token(ohlcs, a) for a in backfill_addrs])
for addr, time, price in updates:
ohlcs.update(addr, time, price)
return gmx_handle_price_update_with_backfill
def push_candle(ohlcs, addr, period, candle):
time, *prices = candle
time = from_timestamp(time)
prices = [dec(p) for p in prices]
ohlcs.update_ohlc(addr, period, time, *prices)
GMX_OHLC_PERIODS = [
timedelta(minutes=1),
timedelta(minutes=5),
timedelta(minutes=15),
timedelta(hours=1),
timedelta(hours=4),
timedelta(days=1),
]
async def backfill_token(ohlcs: FinalOHLCRepository, addr: str):
token = await get_token(addr)
addr = token['address']
for period in GMX_OHLC_PERIODS:
# Polling a large window is the only history method GMX provides :( It's also how their web client works!
symbol = token['symbol']
interval = period_name(period).lower()
response = gmx_api('prices/candles', tokenSymbol=symbol, period=interval, limit=10_000)
if 'error' in response:
if not response['error'].startswith('unsupported period'):
log.warning(f'Could not query token backfill for {token["symbol"]}: {response["error"]}')
else:
for c in reversed(response['candles']):
push_candle(ohlcs, addr, period, c)
log.info(f'Backfilled new GMX token {token["symbol"]}')
@periodic(timedelta(seconds=1))
async def gmx_update_prices():
for token, time, price in await fetch_price_updates():
for market in gmx_markets_by_index_token.get(token, []):
info: OldGMXDict = address_metadata[market]['index']
decimals = info['decimals']
await update_pool_price(market, time, price*dec(10)**decimals, decimals)
async def fetch_price_updates():
tokens = list(gmx_markets_by_index_token.keys())
prices = await get_dexorder_contract().getGMXPrices(tokens)
factor = dec(10)**-30
return [
(addr, from_timestamp(timestamp), (dec(bid) + dec(ask)) / 2 * factor)
for addr, (timestamp, bid, ask) in zip(tokens, prices)
]
async def fetch_price_updates_using_gmx_api():
updates = []
# todo use on-chain oracle events
for t in gmx_api('prices/tickers'):
"""
{
"tokenAddress": "0x3Eea56A1ccCdbfB70A26aD381C71Ee17E4c8A15F",
"tokenSymbol": "BOME",
"minPrice": "1621019778803375000000",
"maxPrice": "1621534421901125000000",
"updatedAt": 1749849326251,
"timestamp": 1749849325
},
"""
addr = t['tokenAddress']
if addr not in address_metadata:
continue
# GMX prices use 30 decimal places
price = (dec(t['minPrice']) + dec(t['maxPrice'])) / 2 * dec(10) ** dec(-30)
time = from_timestamp(t['timestamp'])
updates.append((addr, time, price))
return updates

View File

@@ -0,0 +1,93 @@
__all__ = ['gmx_update_metadata']
import asyncio
import logging
import re
from typing import Optional
from dexorder import ADDRESS_0
from dexorder.addrmeta import address_metadata
from dexorder.base import OldTokenDict, OldGMXDict
from dexorder.base.chain import current_chain
from dexorder.base.orderlib import Exchange
from dexorder.gmx._base import gmx_api, gmx_markets_by_index_token
from dexorder.gmx._contract import get_gmx_contract
from dexorder.tokens import get_token
log = logging.getLogger(__name__)
async def gmx_update_metadata():
log.info('Updating GMX metadata')
await gmx_detect_markets()
token_response: Optional[dict] = None
async def gmx_get_token(addr: str):
# The GMX API appears to be the only way to obtain the index token metadata, since there is no corresponding ERC20
# on-chain at the synthetic address.
found = await get_token(addr, squelch=True) # use our normal lookup first
if found is not None:
return found
global token_response
if token_response is None or addr not in token_response['tokens']:
token_response = gmx_api('tokens')
for info in token_response['tokens']:
if info['address'] == addr:
synthetic = info.get('synthetic',False)
if not synthetic:
log.warning('loading non-synthetic token via GMX API')
name = f'GMX {info["symbol"]}'
if synthetic:
name += ' Synthetic'
chain_id = current_chain.get().id
approved = not re.search(r'deprecated', info['symbol'], re.IGNORECASE)
token = OldTokenDict(type='Token', chain=chain_id, address=info['address'], name=name,
symbol=info['symbol'], decimals=info['decimals'],
approved=approved)
address_metadata[info['address']] = token
return token
log.error(f'Could not find index token {addr} in GMX tokens API')
return None
async def gmx_detect_markets():
ds = get_gmx_contract('DataStore')
reader = get_gmx_contract('Reader')
market_info = await reader.getMarkets(ds.address, 0, 1000)
markets = [
OldGMXDict(type='GMX', chain=current_chain.get().id, exchange=Exchange.GMX.value, address=market_token,
index=index_token, long=long_token, short=short_token, decimals=0, leverage=0)
for market_token, index_token, long_token, short_token in market_info
# discard spot-only markets that do not have an index token
# todo support single-asset markets
if market_token != ADDRESS_0 and index_token != ADDRESS_0 and
long_token != ADDRESS_0 and short_token != ADDRESS_0 and market_token not in address_metadata
]
market_disabled = await asyncio.gather(*[ds.is_market_disabled(m['address']) for m in markets])
new_markets = [m for m,d in zip(markets, market_disabled) if not d and m['address'] not in address_metadata]
async def init_market(m: OldGMXDict):
min_collateral_factor, token = await asyncio.gather(
ds.min_collateral_factor(m['address']), gmx_get_token(m['index']))
m['decimals'] = token['decimals']
m['leverage'] = round(1 / min_collateral_factor)
address_metadata[m['address']] = m
cur = gmx_markets_by_index_token.get(m['index'])
if cur is None:
gmx_markets_by_index_token[m['index']] = [m['address']]
else:
if m['address'] not in cur:
gmx_markets_by_index_token[m['index']] = cur + [m['address']]
await asyncio.gather(*[init_market(m) for m in new_markets])
token_addrs = set(t for m in new_markets for t in (m['address'], m['long'], m['short']))
await asyncio.gather(*[get_token(t) for t in token_addrs])
# Log the markets
def t(addr):
# noinspection PyTypedDict
return address_metadata[addr]['symbol'] if addr in address_metadata and address_metadata[addr] else addr
for m in new_markets:
log.info(f'GMX:{m["address"]} {t(m["index"])}/USD [{t(m["long"])}-{t(m["short"])}] {m["leverage"]}x')

44
src/dexorder/marks.py Normal file
View File

@@ -0,0 +1,44 @@
"""
"marks" are mark-to-market USD values of a selected set of tokens called quote tokens. Publishing a set of USD marks
for the quote tokens allows almost any token to be marked to USD via one hop.
"""
import logging
import time
from dexorder import dec, NATIVE_TOKEN, config
from dexorder.base.chain import current_chain
from dexorder.blockstate import BlockDict
from dexorder.pools import quotes, mark_to_market
log = logging.getLogger(__name__)
def pub_marks(_s,k,v):
chain_id = current_chain.get().id
return str(chain_id), 'marks.usd', (chain_id, k, str(v))
marks: BlockDict[str, dec] = BlockDict('mark.usd', db=False, redis=True, pub=pub_marks, value2str=str)
class RateLimiter:
def __init__(self, rate: float):
self.rate = rate
self.last_update = 0.0
def ready(self):
now = time.monotonic()
if now - self.last_update < self.rate:
return False
self.last_update = now
return True
mark_publish_rate = RateLimiter(config.mark_publish_seconds)
def publish_marks():
if mark_publish_rate.ready():
for token_addr in [NATIVE_TOKEN]+quotes:
# overwrite=False checks the previous value and does not generate a diff if the values match. This prevents
# excessive updates to Redis
value = mark_to_market(token_addr)
if value is not None:
marks.setitem(token_addr, value, overwrite=False)

View File

@@ -10,16 +10,70 @@ from dexorder import config
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
BATCH_SIZE = 1_000
class PipelineProxy:
def __init__(self, pipe: Pipeline):
self.pipe = pipe
self.ops = 0
async def push(self, num=1):
self.ops += num
if self.ops >= BATCH_SIZE:
self.ops = 0
await self.pipe.execute()
async def sadd(self, series, *keys):
while keys:
most = min(BATCH_SIZE-self.ops, len(keys))
assert most > 0
send = keys[:most]
keys = keys[most:]
await self.pipe.sadd(series, *send)
await self.push(len(send))
async def srem(self, series, *keys):
while keys:
most = min(BATCH_SIZE-self.ops, len(keys))
assert most > 0
send = keys[:most]
keys = keys[most:]
await self.pipe.srem(series, *send)
await self.push(len(send))
async def hset(self, series, *, mapping):
items = list(mapping.items())
while items:
most = min(BATCH_SIZE-self.ops, len(items))
assert most > 0
send = items[:most]
items = items[most:]
await self.pipe.hset(series, mapping={k:v for k,v in send})
await self.push(len(send))
async def hdel(self, series, *keys):
while keys:
most = min(BATCH_SIZE-self.ops, len(keys))
assert most > 0
send = keys[:most]
keys = keys[most:]
await self.pipe.hdel(series, *send)
await self.push(len(send))
def __getattr__(self, item):
return getattr(self.pipe, item)
class Memcache: class Memcache:
@staticmethod @staticmethod
@asynccontextmanager @asynccontextmanager
async def batch(): async def batch(transaction=True):
old_redis: Redis = current_redis.get() old_redis: Redis = current_redis.get()
pipe: Pipeline = old_redis.pipeline() pipe = old_redis.pipeline(transaction=transaction)
# noinspection PyTypeChecker
current_redis.set(pipe) current_redis.set(pipe)
try: try:
yield pipe yield PipelineProxy(pipe)
await pipe.execute() await pipe.execute()
finally: finally:
current_redis.set(old_redis) current_redis.set(old_redis)

View File

@@ -12,7 +12,7 @@ from dexorder.blockstate.blockdata import SeriesCollection, BlockData
from dexorder.blockstate.diff import DiffEntryItem from dexorder.blockstate.diff import DiffEntryItem
from dexorder.blockstate.fork import Fork from dexorder.blockstate.fork import Fork
from dexorder.blockstate.state import compress_diffs from dexorder.blockstate.state import compress_diffs
from dexorder.memcache import current_redis, memcache from dexorder.memcache import current_redis, memcache, PipelineProxy
from dexorder.util import hexstr from dexorder.util import hexstr
from dexorder.util.async_util import maywait from dexorder.util.async_util import maywait
from dexorder.util.json import json_encoder from dexorder.util.json import json_encoder
@@ -40,11 +40,11 @@ class RedisState (SeriesCollection):
for series in self.datas.keys(): for series in self.datas.keys():
for k, v in state.iteritems(fork, series): for k, v in state.iteritems(fork, series):
diffs.append(DiffItem(series, k, v)) diffs.append(DiffItem(series, k, v))
await self.save(fork, diffs) await self.save(fork, diffs, use_transaction=False, skip_pubs=True) # use_transaction=False if the data is too big
# noinspection PyAsyncCall # noinspection PyAsyncCall
async def save(self, fork: Fork, diffs: Reversible[Union[DiffItem, DiffEntryItem]]): async def save(self, fork: Fork, diffs: Reversible[Union[DiffItem, DiffEntryItem]], *, use_transaction=True, skip_pubs=False):
# the diffs must be already compressed such that there is only one action per key # the diffs must be already compressed such that there is only one action per key
chain = current_chain.get() chain = current_chain.get()
chain_id = chain.id chain_id = chain.id
@@ -91,22 +91,23 @@ class RedisState (SeriesCollection):
hsets[series][key] = value hsets[series][key] = value
else: else:
raise NotImplementedError raise NotImplementedError
async with memcache.batch() as r:
r: Pipeline async with memcache.batch(use_transaction) as r:
r: PipelineProxy
for series, keys in sadds.items(): for series, keys in sadds.items():
r.sadd(series, *keys) await r.sadd(series, *keys)
for series, keys in sdels.items(): for series, keys in sdels.items():
r.srem(series, *keys) await r.srem(series, *keys)
for series, kvs in hsets.items(): for series, kvs in hsets.items():
r.hset(series, mapping=kvs) await r.hset(series, mapping=kvs)
for series, keys in hdels.items(): for series, keys in hdels.items():
r.hdel(series, *keys) await r.hdel(series, *keys)
block_series = f'{chain_id}|head' block_series = f'{chain_id}|head'
headstr = hexstr(fork.head) headstr = hexstr(fork.head)
r.json(json_encoder).set(block_series,'$',[fork.height, headstr]) r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
pubs.append((str(chain_id), 'head', [fork.height, headstr])) pubs.append((str(chain_id), 'head', [fork.height, headstr]))
# separate batch for pubs # separate batch for pubs
if pubs: if pubs and not skip_pubs:
await publish_all(pubs) await publish_all(pubs)

View File

@@ -26,10 +26,10 @@ import sys
from typing import Union, Iterable, Optional from typing import Union, Iterable, Optional
from dexorder import config, NARG from dexorder import config, NARG
from dexorder.base import OldPoolDict, OldTokenDict
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.database.model import Token, Pool from dexorder.database.model import Token, Pool
from dexorder.database.model.pool import OldPoolDict, PoolDict from dexorder.database.model.pool import PoolDict
from dexorder.database.model.token import OldTokenDict, TokenDict
from dexorder.util import json from dexorder.util import json
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -50,7 +50,6 @@ def dump_tokens(out, tokens, include_unapproved=False):
approved_addrs = set() approved_addrs = set()
had_output = False had_output = False
for token in tokens: for token in tokens:
token: Token
if isinstance(token, Token): if isinstance(token, Token):
token: Token token: Token
a = token.address a = token.address

View File

@@ -342,11 +342,17 @@ class OHLCRepository:
def add_symbol(symbol: str, period: timedelta = None): def add_symbol(symbol: str, period: timedelta = None):
if period is not None: if period is not None:
if (symbol, period) not in recent_ohlcs: if (symbol, period) not in recent_ohlcs:
recent_ohlcs[(symbol, period)] = [] # setting an empty value will initiate price capture recent_ohlcs[OHLCKey(symbol, period)] = [] # setting an empty value will initiate price capture
else: else:
for period in OHLC_PERIODS: for period in OHLC_PERIODS:
if (symbol, period) not in recent_ohlcs: if (symbol, period) not in recent_ohlcs:
recent_ohlcs[(symbol, period)] = [] recent_ohlcs[OHLCKey(symbol, period)] = []
@staticmethod
def has_symbol(symbol: str, period: timedelta):
return OHLCKey(symbol, period) in recent_ohlcs
async def update_all(self, symbol: str, time: datetime, price: dec, *, create: bool = True): async def update_all(self, symbol: str, time: datetime, price: dec, *, create: bool = True):
""" the update_all() and update() methods generate bars for the recent_ohlcs BlockDict """ """ the update_all() and update() methods generate bars for the recent_ohlcs BlockDict """
@@ -359,45 +365,43 @@ class OHLCRepository:
if price is None, then bars are advanced based on the time but no new price is added to the series. if price is None, then bars are advanced based on the time but no new price is added to the series.
""" """
if OHLC_LIMIT_POOLS_DEBUG is not None and (symbol,period) not in OHLC_LIMIT_POOLS_DEBUG: if OHLC_LIMIT_POOLS_DEBUG is not None and (symbol,period) not in OHLC_LIMIT_POOLS_DEBUG:
return return None
# logname = f'{symbol} {period_name(period)}' # logname = f'{symbol} {period_name(period)}'
# log.debug(f'Updating OHLC {logname} {minutely(time)} {price}') # log.debug(f'Updating OHLC {logname} {minutely(time)} {price}')
if price is not None: if price is not None:
self.quotes[symbol] = timestamp(time), str(price) self.quotes[symbol] = timestamp(time), str(price)
key = symbol, period key = OHLCKey(symbol, period)
# recent_ohlcs holds a list of "recent" NativeOHLC's stored as blockdata. we try to keep the recent array long # recent_ohlcs holds a list of "recent" NativeOHLC's stored as blockdata. we try to keep the recent array long
# enough to extend prior the root block time # enough to extend prior the root block time
historical: Optional[list[NativeOHLC]] = recent_ohlcs.get(key) historical: Optional[list[NativeOHLC]] = recent_ohlcs.get(key)
# log.debug(f'got recent {historical}') # log.debug(f'got recent {historical}')
if not historical: if not historical:
if create is False or price is None: if create is False or price is None:
return # do not track symbols which have not been explicity set up return None # do not track symbols which have not been explicity set up
historical = []
updated = [NativeOHLC(ohlc_start_time(time, period), price, price, price, price)] updated = [NativeOHLC(ohlc_start_time(time, period), price, price, price, price)]
# log.debug(f'\tcreated new bars {updated}') # log.debug(f'\tcreated new bars {updated}')
else: else:
updated = update_ohlc(historical[-1], period, time, price) updated = update_ohlc(historical[-1], period, time, price)
# drop any historical bars that are older than we need # overlap the updated OHLC's on top of the historical ones
# oldest_needed = cover the root block time plus one period prior
root_branch = current_blockstate.get().root_branch
root_hash = root_branch.head
if root_hash is not None:
root_timestamp = await get_block_timestamp(root_hash)
oldest_needed = from_timestamp(root_timestamp) - period
# noinspection PyTypeChecker
trim = (oldest_needed - historical[0].start) // period
if trim > 0:
historical = historical[trim:]
# now overlap the updated data on top of the historical data
if not historical or not updated:
updated = historical + updated
else:
last_bar = historical[-1].start last_bar = historical[-1].start
first_updated = updated[0].start first_updated = updated[0].start
overlap = (first_updated - last_bar) // period + 1 overlap = (first_updated - last_bar) // period + 1
updated = historical[:-overlap] + updated if overlap > 0 else historical + updated updated = historical[:-overlap] + updated if overlap > 0 else historical + updated
# log.debug(f'\tnew recents: {updated}')
# drop any bars that are older than we need
# oldest_needed = cover the root block time plus one period prior
root_branch = current_blockstate.get().root_branch
root_hash = root_branch.head
if root_hash is not None:
root_timestamp = await get_block_timestamp(root_hash)
oldest_needed = from_timestamp(root_timestamp) - period
# noinspection PyTypeChecker
trim = (oldest_needed - updated[0].start) // period
if trim > 0:
updated = updated[trim:]
# if len(updated) > 3:
# log.debug(f'\tnew recents ({len(updated)}): {updated}')
recent_ohlcs.setitem(key, updated) recent_ohlcs.setitem(key, updated)
return updated return updated
@@ -431,7 +435,7 @@ class OHLCRepository:
return found return found
def flush(self) -> None: def flush(self) -> None:
log.debug(f'flushing {len(self.dirty_chunks)} chunks') # log.debug(f'flushing {len(self.dirty_chunks)} chunks')
for chunk in self.dirty_chunks: for chunk in self.dirty_chunks:
chunk.save() chunk.save()
self.dirty_chunks.clear() self.dirty_chunks.clear()

View File

@@ -6,19 +6,23 @@ from uuid import UUID
from web3.exceptions import ContractPanicError, ContractLogicError from web3.exceptions import ContractPanicError, ContractLogicError
from web3.types import EventData from web3.types import EventData
from dexorder import db, metric from dexorder import db, metric, config
from dexorder.accounting import accounting_transaction_gas from dexorder.accounting import accounting_transaction_gas
from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_deserializers from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_deserializers
from dexorder.base.order import TrancheKey, OrderKey from dexorder.base.order import TrancheKey, OrderKey
from dexorder.base.orderlib import PriceProof from dexorder.base.orderlib import PriceProof, Exchange
from dexorder.contract import ContractProxy
from dexorder.contract.contract_proxy import ContractTransaction
from dexorder.contract.dexorder import get_dexorder_contract from dexorder.contract.dexorder import get_dexorder_contract
from dexorder.database.model.accounting import AccountingSubcategory from dexorder.database.model.accounting import AccountingSubcategory
from dexorder.database.model.transaction import TransactionJob from dexorder.database.model.transaction import TransactionJob
from dexorder.gmx import tk_gmx_in_flight
from dexorder.order.orderstate import Order from dexorder.order.orderstate import Order
from dexorder.order.triggers import (inflight_execution_requests, OrderTriggers, from dexorder.order.triggers import (OrderTriggers,
TrancheState, active_tranches, order_error) TrancheState, active_tranches, order_error)
from dexorder.transactions import TransactionHandler, submit_transaction_request from dexorder.transactions import TransactionHandler, submit_transaction_request
from dexorder.util import hexbytes from dexorder.util import hexbytes
from dexorder.vault_blockdata import refresh_vault_balances
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -67,10 +71,18 @@ class TrancheExecutionHandler (TransactionHandler):
def __init__(self): def __init__(self):
super().__init__('te') super().__init__('te')
async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> dict: async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> Optional[ContractTransaction]:
tk = req.tranche_key tk = req.tranche_key
try: try:
return await get_dexorder_contract().build.execute(job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof)) kwargs = {}
if Order.of(tk).order.route.exchange == Exchange.GMX:
if tk_gmx_in_flight.get(tk):
return None # a GMX order is already in flight
fee = await ContractProxy(req.vault, 'IVaultGMX').gmxExecutionFee(False)
kwargs['value'] = round(fee * 1.1) # extra 10% because gas prices can change quickly
return await get_dexorder_contract().build.execute(
job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof),
kwargs=kwargs)
except ContractPanicError as x: except ContractPanicError as x:
exception = x exception = x
errcode = '' errcode = ''
@@ -79,7 +91,7 @@ class TrancheExecutionHandler (TransactionHandler):
errcode = hexbytes(x.args[1]).decode('utf-8') errcode = hexbytes(x.args[1]).decode('utf-8')
log.error(f'While building execution for tranche {tk}: {errcode}') log.error(f'While building execution for tranche {tk}: {errcode}')
# if there's a logic error we shouldn't keep trying # if there's a logic error we shouldn't keep trying
finish_execution_request(tk, errcode) await finish_execution_request(tk, errcode)
raise exception raise exception
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None: async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None:
@@ -90,13 +102,13 @@ class TrancheExecutionHandler (TransactionHandler):
log.error('Could not build execution transaction due to exception', exc_info=e) log.error('Could not build execution transaction due to exception', exc_info=e)
# noinspection PyTypeChecker # noinspection PyTypeChecker
req: TrancheExecutionRequest = job.request req: TrancheExecutionRequest = job.request
finish_execution_request(req.tranche_key, '') await finish_execution_request(req.tranche_key, '')
TrancheExecutionHandler() # map 'te' to a TrancheExecutionHandler TrancheExecutionHandler() # map 'te' to a TrancheExecutionHandler
def finish_execution_request(tk: TrancheKey, error: Optional[str]=None): async def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
order_key = OrderKey(tk.vault, tk.order_index) order_key = OrderKey(tk.vault, tk.order_index)
try: try:
order: Order = Order.of(order_key) order: Order = Order.of(order_key)
@@ -104,11 +116,6 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
log.error(f'Could not get order {order_key}') log.error(f'Could not get order {order_key}')
return return
try:
inflight_execution_requests.remove(tk)
except KeyError:
pass
def get_trigger(): def get_trigger():
try: try:
return OrderTriggers.instances[order_key].triggers[tk.tranche_index] return OrderTriggers.instances[order_key].triggers[tk.tranche_index]
@@ -125,6 +132,11 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
if trig is not None: if trig is not None:
trig.touch() trig.touch()
def delay(secs=None):
trig = get_trigger()
if trig is not None:
trig.deactivate(secs if secs is not None else config.slippage_control_delay)
if error is None: if error is None:
metric.executions.inc() metric.executions.inc()
else: else:
@@ -139,7 +151,9 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
# Insufficient Input Amount # Insufficient Input Amount
token = order.order.tokenIn token = order.order.tokenIn
log.debug(f'insufficient funds {tk.vault} {token} ') log.debug(f'insufficient funds {tk.vault} {token} ')
slash()
retry() retry()
await refresh_vault_balances(tk.vault, order.order.tokenIn, order.order.tokenOut)
elif error == 'SPL': elif error == 'SPL':
# todo tight slippage can cause excessive executions as the backend repeatedly retries the remainder. The symptom is error 'SPL'. # todo tight slippage can cause excessive executions as the backend repeatedly retries the remainder. The symptom is error 'SPL'.
# Square-root price limit from Uniswap means we asked for a limit price that isn't met. This is a fault of # Square-root price limit from Uniswap means we asked for a limit price that isn't met. This is a fault of
@@ -164,6 +178,7 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
retry() retry()
elif error == 'RL': elif error == 'RL':
log.debug(f'tranche {tk} execution failed due to "RL" rate limit') log.debug(f'tranche {tk} execution failed due to "RL" rate limit')
delay()
retry() retry()
elif error == 'TE': elif error == 'TE':
log.debug(f'tranche {tk} execution failed due to "TE" too early') log.debug(f'tranche {tk} execution failed due to "TE" too early')
@@ -199,24 +214,20 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
def execute_tranches(): def execute_tranches():
new_execution_requests = [] new_execution_requests = []
for tk, proof in active_tranches.items(): for tk, proof in active_tranches.items():
if tk not in inflight_execution_requests: new_execution_requests.append((tk, proof))
new_execution_requests.append((tk, proof))
else:
log.debug(f'execute {tk} already in flight')
# todo order requests and batch # todo order requests and batch
for tk, proof in new_execution_requests: for tk, proof in new_execution_requests:
create_execution_request(tk, proof) create_execution_request(tk, proof)
def create_execution_request(tk: TrancheKey, proof: PriceProof): def create_execution_request(tk: TrancheKey, proof: PriceProof):
inflight_execution_requests.add(tk)
job = submit_transaction_request(new_tranche_execution_request(tk, proof)) job = submit_transaction_request(new_tranche_execution_request(tk, proof))
if job is not None: if job is not None:
log.debug(f'Executing {tk} as job {job.id}') log.debug(f'Executing {tk} as job {job.id}')
return job return job
def handle_dexorderexecutions(event: EventData): async def handle_dexorderexecutions(event: EventData):
log.debug(f'executions {event}') log.debug(f'executions {event}')
exe_id = UUID(bytes=event['args']['id']) exe_id = UUID(bytes=event['args']['id'])
try: try:
@@ -233,7 +244,11 @@ def handle_dexorderexecutions(event: EventData):
if job is None: if job is None:
log.warning(f'Job {exe_id} not found!') log.warning(f'Job {exe_id} not found!')
return return
# verify that the transaction hash of the event is the same as that of our request
if job.tx_id != event['transactionHash']:
log.warning(f'Ignoring rogue DexorderExecutions {exe_id} with wrong txid {job.tx_id} != {event["transactionHash"]}')
return
# noinspection PyTypeChecker # noinspection PyTypeChecker
req: TrancheExecutionRequest = job.request req: TrancheExecutionRequest = job.request
tk = TrancheKey(req.vault, req.order_index, req.tranche_index) tk = TrancheKey(req.vault, req.order_index, req.tranche_index)
finish_execution_request(tk, None if errors[0] == '' else errors[0]) await finish_execution_request(tk, None if errors[0] == '' else errors[0])

View File

@@ -3,13 +3,14 @@ import logging
from dataclasses import dataclass from dataclasses import dataclass
from typing import overload from typing import overload
from dexorder import DELETE, db, order_log from dexorder import DELETE, db, order_log, from_timestamp
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.base.order import OrderKey, TrancheKey from dexorder.base.order import OrderKey, TrancheKey
from dexorder.base.orderlib import SwapOrderState, ElaboratedSwapOrderStatus, Fill from dexorder.base.orderlib import SwapOrderState, ElaboratedSwapOrderStatus, Fill
from dexorder.blockstate import BlockDict, BlockSet from dexorder.blockstate import BlockDict, BlockSet
from dexorder.database.model.orderindex import OrderIndex from dexorder.database.model.orderindex import OrderIndex
from dexorder.routing import pool_address from dexorder.routing import pool_address
from dexorder.tokens import adjust_decimals
from dexorder.util import json from dexorder.util import json
from dexorder.vault_blockdata import vault_owners from dexorder.vault_blockdata import vault_owners
@@ -127,7 +128,7 @@ class Order:
key = a if b is None else OrderKey(a, b) key = a if b is None else OrderKey(a, b)
assert key not in Order.instances assert key not in Order.instances
self.key = key self.key = key
self.status: ElaboratedSwapOrderStatus = Order.order_statuses[key].copy() self._status: ElaboratedSwapOrderStatus = Order.order_statuses[key].copy()
self.pool_address: str = pool_address(self.status.order) self.pool_address: str = pool_address(self.status.order)
self.tranche_keys = [TrancheKey(key.vault, key.order_index, i) for i in range(len(self.status.trancheStatus))] self.tranche_keys = [TrancheKey(key.vault, key.order_index, i) for i in range(len(self.status.trancheStatus))]
# flattenings of various static data # flattenings of various static data
@@ -138,6 +139,14 @@ class Order:
self.tranche_amounts = [t.fraction_of(self.amount) for t in self.order.tranches] self.tranche_amounts = [t.fraction_of(self.amount) for t in self.order.tranches]
Order.instances[self.key] = self Order.instances[self.key] = self
@property
def status(self):
return self._status
@status.setter
def status(self, v):
self._status = Order.order_statuses[self.key] = v
@property @property
def state(self): def state(self):
return self.status.state return self.status.state
@@ -279,6 +288,40 @@ class Order:
Order.vault_recently_closed_orders.listremove(key.vault, key.order_index) Order.vault_recently_closed_orders.listremove(key.vault, key.order_index)
def __str__(self):
return str(self.key)
async def pprint(self):
amount_token = self.order.tokenIn if self.order.amountIsInput else self.order.tokenOut
msg = f'''
SwapOrder {self.key}
status: {self.state.name}
placed: {from_timestamp(self.status.startTime)}
in: {self.order.tokenIn}
out: {self.order.tokenOut}
exchange: {self.order.route.exchange.name, self.order.route.fee}
amount: {"input" if self.order.amountIsInput else "output"} {await adjust_decimals(amount_token, self.filled):f}/{await adjust_decimals(amount_token, self.amount):f}{" to owner" if self.order.outputDirectlyToOwner else ""}
minFill: {await adjust_decimals(amount_token, self.min_fill_amount):f}
inverted: {self.order.inverted}
'''
if self.order.gmx:
msg += f'''
gmx order: {"increase" if self.order.gmx.is_increase else "decrease"} {"long" if self.order.gmx.is_long else "short"}
collateral: {self.order.gmx.reserve_amount}
'''
msg += '''
tranches:
'''
for i in range(len(self.order.tranches)):
tranche = self.order.tranches[i]
msg += f' {tranche}'
filled_amount = self.tranche_filled(i)
if filled_amount:
msg += f' filled {await adjust_decimals(amount_token, filled_amount)}'
msg += '\n'
return msg
# ORDER STATE # ORDER STATE
# various blockstate fields hold different aspects of an order's state. # various blockstate fields hold different aspects of an order's state.
@@ -310,8 +353,6 @@ class Order:
'of', db=True, redis=True, pub=pub_order_fills, 'of', db=True, redis=True, pub=pub_order_fills,
str2key=OrderKey.str2key, value2str=lambda v: json.dumps(v.dump()), str2value=lambda s:OrderFilled.load(json.loads(s))) str2key=OrderKey.str2key, value2str=lambda v: json.dumps(v.dump()), str2value=lambda s:OrderFilled.load(json.loads(s)))
def __str__(self):
return str(self.order)
# "active" means the order wants to be executed now. this is not BlockData because it's cleared every block # "active" means the order wants to be executed now. this is not BlockData because it's cleared every block
active_orders: dict[OrderKey,Order] = {} active_orders: dict[OrderKey,Order] = {}

View File

@@ -1,21 +1,22 @@
import asyncio import asyncio
import logging import logging
from abc import abstractmethod
from collections import defaultdict from collections import defaultdict
from datetime import timedelta
from enum import Enum, auto from enum import Enum, auto
from typing import Optional, Sequence from typing import Optional, Sequence, Union
import numpy as np import numpy as np
from sortedcontainers import SortedList from sortedcontainers import SortedList
from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, DISTANT_PAST, Line from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, DISTANT_PAST, Line, MIN_SLIPPAGE, \
MIN_SLIPPAGE_EPSILON
from dexorder.blockstate import BlockDict from dexorder.blockstate import BlockDict
from .orderstate import Order from .orderstate import Order
from .. import dec, order_log, timestamp, from_timestamp, config from .. import dec, order_log, timestamp, config
from ..base import OldPoolDict
from ..base.chain import current_clock from ..base.chain import current_clock
from ..base.order import OrderKey, TrancheKey from ..base.order import OrderKey, TrancheKey
from ..contract import ERC20 from ..contract import ERC20
from ..database.model.pool import OldPoolDict
from ..pools import ensure_pool_price, pool_prices, get_pool from ..pools import ensure_pool_price, pool_prices, get_pool
from ..routing import pool_address from ..routing import pool_address
from ..vault_blockdata import vault_balances, adjust_balance from ..vault_blockdata import vault_balances, adjust_balance
@@ -36,12 +37,9 @@ execution should be attempted on the tranche.
""" """
# tranches which have passed all constraints and should be executed # tranches which have passed all constraints and should be executed. This set gets checked against already in-
active_tranches: BlockDict[TrancheKey, Optional[PriceProof]] = BlockDict('at') active_tranches: BlockDict[TrancheKey, Optional[PriceProof]] = BlockDict('at')
# tranches which have an execute() transaction sent but not completed
inflight_execution_requests: set[TrancheKey] = set()
class OrderTriggers: class OrderTriggers:
instances: dict[OrderKey, 'OrderTriggers'] = {} instances: dict[OrderKey, 'OrderTriggers'] = {}
@@ -58,13 +56,13 @@ class OrderTriggers:
self.order = order self.order = order
self.triggers = triggers self.triggers = triggers
OrderTriggers.instances[order.key] = self OrderTriggers.instances[order.key] = self
log.debug(f'created OrderTriggers for {order.key}') # log.debug(f'created OrderTriggers for {order.key}')
def disable(self): def disable(self):
for t in self.triggers: for t in self.triggers:
t.disable() t.disable()
del OrderTriggers.instances[self.order.key] del OrderTriggers.instances[self.order.key]
log.debug(f'disabled OrderTriggers for {self.order.key}') # log.debug(f'disabled OrderTriggers for {self.order.key}')
@property @property
def closed(self): def closed(self):
@@ -74,6 +72,10 @@ class OrderTriggers:
def open(self): def open(self):
return not self.closed return not self.closed
@property
def error(self):
return any(t.error for t in self.triggers)
def check_complete(self): def check_complete(self):
if self.closed: if self.closed:
final_state = SwapOrderState.Filled if self.order.remaining == 0 or self.order.remaining < self.order.min_fill_amount else SwapOrderState.Expired final_state = SwapOrderState.Filled if self.order.remaining == 0 or self.order.remaining < self.order.min_fill_amount else SwapOrderState.Expired
@@ -102,15 +104,16 @@ def start_trigger_updates():
PriceLineTrigger.clear_data() PriceLineTrigger.clear_data()
async def update_balance_triggers(vault: str, token: str, balance: int): async def update_balance_triggers(vault: str, token: str):
balance = vault_balances.get(vault, {}).get(token)
updates = [bt.update(balance) for bt in BalanceTrigger.by_vault_token.get((vault, token), [])] updates = [bt.update(balance) for bt in BalanceTrigger.by_vault_token.get((vault, token), [])]
await asyncio.gather(*updates) await asyncio.gather(*updates)
async def update_price_triggers(pool: OldPoolDict, price: dec): def update_price_triggers(addr: str, price: dec, decimals: int):
price = price * dec(10) ** dec(-pool['decimals']) # adjust for pool decimals to get onchain price price = price * dec(10) ** dec(-decimals) # adjust for pool decimals to get onchain price
price = float(price) # since we use SIMD operations to evaluate lines, we must convert to float price = float(price) # since we use SIMD operations to evaluate lines, we must convert to float
for pt in PriceLineTrigger.by_pool.get(pool['address'], []): for pt in PriceLineTrigger.by_pool.get(addr, []):
pt.update(price) pt.update(price)
@@ -174,6 +177,7 @@ class Trigger:
Expiration = 2 Expiration = 2
MinLine = 3 MinLine = 3
MaxLine = 4 MaxLine = 4
GMXInFlight = 5
def __init__(self, trigger_type: TriggerType, tk: TrancheKey, value: bool): def __init__(self, trigger_type: TriggerType, tk: TrancheKey, value: bool):
""" """
@@ -207,19 +211,17 @@ class Trigger:
def _value_changed(self): pass def _value_changed(self): pass
def remove(self): pass
@abstractmethod
def remove(self): ...
async def has_funds(tk: TrancheKey): async def has_funds(tk: TrancheKey):
log.debug(f'has funds? {tk.vault}') # log.debug(f'has funds? {tk.vault}')
order = Order.of(tk) order = Order.of(tk)
balances = vault_balances.get(tk.vault, {}) balances = vault_balances.get(tk.vault, {})
log.debug(f'balances {balances}') # log.debug(f'balances {balances}')
token_addr = order.status.order.tokenIn token_addr = order.status.order.tokenIn
token_balance = balances.get(token_addr) token_balance = balances.get(token_addr)
log.debug(f'amount of {token_addr} = {token_balance}') # log.debug(f'amount of {token_addr} = {token_balance}')
if token_balance is None: if token_balance is None:
# unknown balance # unknown balance
token_balance = balances[token_addr] = await ERC20(token_addr).balanceOf(tk.vault) token_balance = balances[token_addr] = await ERC20(token_addr).balanceOf(tk.vault)
@@ -229,6 +231,7 @@ async def has_funds(tk: TrancheKey):
async def input_amount_is_sufficient(order, token_balance): async def input_amount_is_sufficient(order, token_balance):
# todo modify for GMX
# log.debug(f'input is sufficient? {order.min_fill_amount}') # log.debug(f'input is sufficient? {order.min_fill_amount}')
if order.amount_is_input: if order.amount_is_input:
# log.debug(f'amount is input: {token_balance} >= {order.min_fill_amount}') # log.debug(f'amount is input: {token_balance} >= {order.min_fill_amount}')
@@ -259,11 +262,12 @@ class BalanceTrigger (Trigger):
self.order = Order.of(self.tk) self.order = Order.of(self.tk)
self.vault_token = self.tk.vault, self.order.status.order.tokenIn self.vault_token = self.tk.vault, self.order.status.order.tokenIn
BalanceTrigger.by_vault_token[self.vault_token].add(self) BalanceTrigger.by_vault_token[self.vault_token].add(self)
self._value_changed()
# log.debug(f'initializing Balance Trigger {id(self)} {tk} {value} {self.value}') # log.debug(f'initializing Balance Trigger {id(self)} {tk} {value} {self.value}')
async def update(self, balance): async def update(self, balance):
self.value = await input_amount_is_sufficient(self.order, balance) self.value = await input_amount_is_sufficient(self.order, balance)
# log.debug(f'update balance {balance} was sufficient? {self.value}') # log.debug(f'update balance {balance} was sufficient? {self.value} {self.order.key}')
def remove(self): def remove(self):
try: try:
@@ -271,6 +275,17 @@ class BalanceTrigger (Trigger):
except (KeyError, ValueError): except (KeyError, ValueError):
pass pass
def _value_changed(self):
ok = self.value
order = Order.of(self.tk)
old_state = order.status.state
if not ok and old_state == SwapOrderState.Open:
order.status = order.status.copy()
order.status.state = SwapOrderState.Underfunded
elif ok and old_state == SwapOrderState.Underfunded:
order.status = order.status.copy()
order.status.state = SwapOrderState.Open
class TimeTrigger (Trigger): class TimeTrigger (Trigger):
@@ -307,11 +322,8 @@ class TimeTrigger (Trigger):
if time == self._time: if time == self._time:
return return
self._time = time self._time = time
if self.active: in_future = time_now >= time
# remove old trigger self.value = in_future is self.is_start
TimeTrigger.all.remove(self)
self.active = False
self.update_active(time_now)
def update_active(self, time_now: int = None, time: int = None): def update_active(self, time_now: int = None, time: int = None):
if time_now is None: if time_now is None:
@@ -380,7 +392,7 @@ class PriceLineTrigger (Trigger):
if inverted: if inverted:
price_now = 1/price_now price_now = 1/price_now
activated = value_now < price_now if is_min else value_now > price_now activated = value_now < price_now if is_min else value_now > price_now
log.debug(f'initial price line {value_now} {"<" if is_min else ">"} {price_now} {activated}') # log.debug(f'initial price line {value_now} {"<" if is_min else ">"} {price_now} {activated}')
trigger_type = Trigger.TriggerType.MinLine if is_min else Trigger.TriggerType.MaxLine trigger_type = Trigger.TriggerType.MinLine if is_min else Trigger.TriggerType.MaxLine
super().__init__(trigger_type, tk, activated) super().__init__(trigger_type, tk, activated)
self.inverted = inverted self.inverted = inverted
@@ -495,7 +507,8 @@ async def activate_order(order: Order):
triggers = await OrderTriggers.create(order) triggers = await OrderTriggers.create(order)
if triggers.closed: if triggers.closed:
log.debug(f'order {order.key} was immediately closed') log.debug(f'order {order.key} was immediately closed')
final_state = SwapOrderState.Filled if order.remaining == 0 or order.remaining < order.min_fill_amount \ final_state = SwapOrderState.Error if triggers.error \
else SwapOrderState.Filled if order.remaining == 0 or order.remaining < order.min_fill_amount \
else SwapOrderState.Expired else SwapOrderState.Expired
order.complete(final_state) order.complete(final_state)
@@ -556,13 +569,14 @@ class TrancheTrigger:
tranche_remaining = tranche.fraction_of(order.amount) - order.tranche_filled(self.tk.tranche_index) tranche_remaining = tranche.fraction_of(order.amount) - order.tranche_filled(self.tk.tranche_index)
self.status = \ self.status = \
TrancheState.Error if self.market_order and self.slippage < MIN_SLIPPAGE - MIN_SLIPPAGE_EPSILON else \
TrancheState.Filled if tranche_remaining == 0 or tranche_remaining < self.order.min_fill_amount else \ TrancheState.Filled if tranche_remaining == 0 or tranche_remaining < self.order.min_fill_amount else \
TrancheState.Expired if self.expiration_trigger is not None and not self.expiration_trigger else \ TrancheState.Expired if self.expiration_trigger is not None and not self.expiration_trigger else \
TrancheState.Early if self.activation_trigger is None and not self.activation_trigger else \ TrancheState.Early if self.activation_trigger is not None and not self.activation_trigger else \
TrancheState.Active TrancheState.Active
_dirty.add(tk) _dirty.add(tk)
TrancheTrigger.all[tk] = self TrancheTrigger.all[tk] = self
log.debug(f'Tranche {tk} initial status {self.status} {self}') # log.debug(f'Tranche {tk} initial status {self.status} {self}')
@property @property
@@ -578,7 +592,7 @@ class TrancheTrigger:
def fill(self, _amount_in, _amount_out, _next_activation_time ): def fill(self, _amount_in, _amount_out, _next_activation_time ):
if _next_activation_time != DISTANT_PAST: if _next_activation_time != 0:
# rate limit # rate limit
if self.activation_trigger is None: if self.activation_trigger is None:
self.activation_trigger = TimeTrigger(True, self.tk, _next_activation_time, timestamp()) self.activation_trigger = TimeTrigger(True, self.tk, _next_activation_time, timestamp())
@@ -593,7 +607,8 @@ class TrancheTrigger:
else: else:
order_log.debug(f'tranche part-filled {self.tk} in:{_amount_in} out:{_amount_out} remaining:{remaining}') order_log.debug(f'tranche part-filled {self.tk} in:{_amount_in} out:{_amount_out} remaining:{remaining}')
if self.market_order: if self.market_order:
self.expire() order_log.debug(f'tranche {self.tk} delayed {config.slippage_control_delay} seconds due to slippage control')
self.deactivate(config.slippage_control_delay)
self.slash_count = 0 # reset slash count self.slash_count = 0 # reset slash count
def touch(self): def touch(self):
@@ -605,11 +620,11 @@ class TrancheTrigger:
self.order_trigger.expire_tranche(self.tk.tranche_index) self.order_trigger.expire_tranche(self.tk.tranche_index)
def expire(self): def expire(self):
self.disable()
if self.closed: if self.closed:
return return
order_log.debug(f'tranche expired {self.tk}') order_log.debug(f'tranche expired {self.tk}')
self.status = TrancheState.Expired self.status = TrancheState.Expired
self.disable()
def kill(self): def kill(self):
order_log.warning(f'tranche KILLED {self.tk}') order_log.warning(f'tranche KILLED {self.tk}')
@@ -625,15 +640,26 @@ class TrancheTrigger:
self.kill() self.kill()
else: else:
delay = round(config.slash_delay_base * config.slash_delay_mul ** (self.slash_count-1)) delay = round(config.slash_delay_base * config.slash_delay_mul ** (self.slash_count-1))
self.deactivate(timestamp()+delay) self.deactivate(delay)
def deactivate(self, until): def deactivate(self, interval: Union[timedelta, int, float]):
# todo this timestamp should be consistent with the trigger time which is blockchain
now = current_clock.get().timestamp
self.deactivate_until(now + (interval.total_seconds() if isinstance(interval, timedelta) else interval))
def deactivate_until(self, until):
# Temporarily deactivate the tranche due to a rate limit. Use disable() to permanently halt the trigger. # Temporarily deactivate the tranche due to a rate limit. Use disable() to permanently halt the trigger.
log.debug(f'deactivating tranche {self.tk} until {from_timestamp(until)}') now = current_clock.get().timestamp
if until < now:
return
if self.activation_trigger is None: if self.activation_trigger is None:
self.activation_trigger = TimeTrigger.create(True, self.tk, until) self.activation_trigger = TimeTrigger.create(True, self.tk, until)
else: else:
self.activation_trigger.time = until self.activation_trigger.time = max(until, self.activation_trigger.time)
try:
del active_tranches[self.tk]
except KeyError:
pass
def disable(self): def disable(self):
# permanently stop this trigger and deconstruct # permanently stop this trigger and deconstruct
@@ -671,6 +697,10 @@ class TrancheTrigger:
def open(self): def open(self):
return not self.closed return not self.closed
@property
def error(self):
return self.status == TrancheState.Error
def __str__(self): def __str__(self):
trigs = [] trigs = []
if self.balance_trigger is not None: if self.balance_trigger is not None:

36
src/dexorder/periodic.py Normal file
View File

@@ -0,0 +1,36 @@
from datetime import timedelta
import time
import asyncio
from functools import wraps
def periodic(period: timedelta|float):
"""
Decorator to allow only one execution of a function or coroutine per period.
Works for both sync and async functions.
"""
def decorator(func):
last_called = {'time': 0.}
period_seconds = period.total_seconds() if isinstance(period, timedelta) else period
@wraps(func)
def sync_wrapper(*args, **kwargs):
now = time.monotonic()
if now - last_called['time'] >= period_seconds:
last_called['time'] = now
return func(*args, **kwargs)
return None
@wraps(func)
async def async_wrapper(*args, **kwargs):
now = time.monotonic()
if now - last_called['time'] >= period_seconds:
last_called['time'] = now
return await func(*args, **kwargs)
return None
if asyncio.iscoroutinefunction(func):
return async_wrapper
else:
return sync_wrapper
return decorator

View File

@@ -4,18 +4,18 @@ from dataclasses import dataclass
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
from web3.exceptions import ContractLogicError from web3.exceptions import ContractLogicError, BadFunctionCallOutput
from web3.types import EventData from web3.types import EventData
from dexorder import dec, ADDRESS_0, from_timestamp, db, config, NATIVE_TOKEN from dexorder import dec, ADDRESS_0, from_timestamp, db, config, NATIVE_TOKEN
from dexorder.addrmeta import address_metadata from dexorder.addrmeta import address_metadata
from dexorder.base import OldPoolDict
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.base.orderlib import Exchange from dexorder.base.orderlib import Exchange
from dexorder.blocks import get_block_timestamp from dexorder.blocks import get_block_timestamp
from dexorder.blockstate import BlockDict from dexorder.blockstate import BlockDict
from dexorder.blockstate.blockdata import K, V from dexorder.blockstate.blockdata import K, V
from dexorder.database.model import Pool from dexorder.database.model import Pool
from dexorder.database.model.pool import OldPoolDict
from dexorder.tokens import get_token, adjust_decimals as adj_dec from dexorder.tokens import get_token, adjust_decimals as adj_dec
from dexorder.uniswap import UniswapV3Pool, uniswapV3_pool_address from dexorder.uniswap import UniswapV3Pool, uniswapV3_pool_address
@@ -64,7 +64,7 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
log.debug(f'new UniswapV3 pool {token0["symbol"]}/{token1["symbol"]} {fee/1_000_000:.2%} ' log.debug(f'new UniswapV3 pool {token0["symbol"]}/{token1["symbol"]} {fee/1_000_000:.2%} '
f'{("."+str(decimals)) if decimals >= 0 else (str(-decimals)+".")} {address}') f'{("."+str(decimals)) if decimals >= 0 else (str(-decimals)+".")} {address}')
add_mark_pool(address, t0, t1, fee) add_mark_pool(address, t0, t1, fee)
except ContractLogicError: except (ContractLogicError, BadFunctionCallOutput):
pass pass
except ValueError as v: except ValueError as v:
try: try:
@@ -85,8 +85,9 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
class PoolPrices (BlockDict[str, dec]): class PoolPrices (BlockDict[str, dec]):
def __setitem__(self, item: K, value: V) -> None: def __setitem__(self, item: K, value: V) -> None:
super().__setitem__(item, value) old = self.setitem(item, value)
new_pool_prices[item] = value if value != old:
new_pool_prices[item] = value
def pub_pool_price(_s,k,v): def pub_pool_price(_s,k,v):
@@ -148,7 +149,7 @@ class MarkPool:
mark_pools: dict[str, MarkPool] = {} mark_pools: dict[str, MarkPool] = {}
quotes = [] # ordered list of preferred quote tokens quotes = [] # ordered list of preferred quote token addresses
def add_mark_pool(addr: str, base: str, quote: str, fee: int): def add_mark_pool(addr: str, base: str, quote: str, fee: int):
@@ -200,7 +201,7 @@ async def mark_to_market_adj_dec(token: str, amount: dec, adjust_decimals=True)
return mark_to_market(token, amount) return mark_to_market(token, amount)
def mark_to_market(token: str, amount: dec) -> Optional[dec]: def mark_to_market(token: str, amount: dec = dec(1)) -> Optional[dec]:
""" """
amount must already be adjusted for decimals amount must already be adjusted for decimals
""" """

View File

@@ -40,8 +40,8 @@ class BlockProgressor(metaclass=ABCMeta):
def add_event_trigger(self, def add_event_trigger(self,
# callback takes either a single event if multi=False, or if multi=True then a list of all events in the processing range # callback takes either a single event if multi=False, or if multi=True then a list of all events in the processing range
callback: Union[ callback: Union[
Callable[[EventData], Maywaitable[None]], Callable[[EventData|dict], Maywaitable[None]],
Callable[[list[EventData]], Maywaitable[None]], Callable[[list[EventData|dict]], Maywaitable[None]],
Callable[[], Maywaitable[None]], Callable[[], Maywaitable[None]],
], ],
event: ContractEvents = None, event: ContractEvents = None,

View File

@@ -5,7 +5,6 @@ from datetime import timedelta
from typing import Any, Iterable, Callable, Optional from typing import Any, Iterable, Callable, Optional
from eth_bloom import BloomFilter from eth_bloom import BloomFilter
# noinspection PyPackageRequirements
from websockets.exceptions import ConnectionClosedError from websockets.exceptions import ConnectionClosedError
from dexorder import Blockchain, db, current_pub, async_yield, current_w3, config, now, timestamp, metric from dexorder import Blockchain, db, current_pub, async_yield, current_w3, config, now, timestamp, metric
@@ -81,8 +80,7 @@ class BlockStateRunner(BlockProgressor):
async with w3ws as w3ws: async with w3ws as w3ws:
log.debug('connecting to ws provider') log.debug('connecting to ws provider')
await w3ws.provider.connect() await w3ws.provider.connect()
subscription = await w3ws.eth.subscribe('newHeads') # the return value of this call is not consistent between anvil/hardhat/rpc. await w3ws.eth.subscribe('newHeads') # the return value of this call is not consistent between anvil/hardhat/rpc.
# log.debug(f'subscribed to newHeads {subscription}')
while self.running: while self.running:
async for message in w3ws.ws.process_subscriptions(): async for message in w3ws.ws.process_subscriptions():
block = Block(chain_id, message['result']) block = Block(chain_id, message['result'])
@@ -94,11 +92,15 @@ class BlockStateRunner(BlockProgressor):
if not self.running: if not self.running:
break break
await async_yield() await async_yield()
except (ConnectionClosedError, TimeoutError, asyncio.TimeoutError) as e: except (TimeoutError, asyncio.TimeoutError) as e:
log.debug(f'runner timeout {e}') log.debug(f'runner timeout {e}')
except ConnectionClosedError as e:
log.info(f'websocket connection closed {e}')
except ConnectionRefusedError: except ConnectionRefusedError:
log.warning(f'Could not connect to websocket {config.ws_url}') log.warning(f'Could not connect to websocket {config.ws_url}')
await asyncio.sleep(1) await asyncio.sleep(1)
except StopAsyncIteration:
log.info(f'websocket stream ended')
except Exception: except Exception:
log.exception(f'Unhandled exception during run_ws()') log.exception(f'Unhandled exception during run_ws()')
finally: finally:
@@ -314,7 +316,7 @@ class BlockStateRunner(BlockProgressor):
else: else:
lf = dict(log_filter) lf = dict(log_filter)
lf['blockHash'] = hexstr(block.hash) lf['blockHash'] = hexstr(block.hash)
has_logs = any(bytes(hexbytes(topic)) in bloom for topic in lf['topics']) has_logs = 'topics' not in lf or any(bytes(hexbytes(topic)) in bloom for topic in lf['topics'])
# log.debug(f'has {event.__class__.__name__}? {has_logs}') # log.debug(f'has {event.__class__.__name__}? {has_logs}')
if not has_logs: if not has_logs:
get_logs = None get_logs = None
@@ -397,21 +399,21 @@ class BlockStateRunner(BlockProgressor):
# propragate to the DB or Redis. # propragate to the DB or Redis.
# TIME TICKS ARE DISABLED FOR THIS REASON # TIME TICKS ARE DISABLED FOR THIS REASON
return return
current_fork.set(fork) # current_fork.set(fork)
session = db.session # session = db.session
session.begin() # session.begin()
try: # try:
for callback, on_timer in self.callbacks: # for callback, on_timer in self.callbacks:
if on_timer: # if on_timer:
# noinspection PyCallingNonCallable # # noinspection PyCallingNonCallable
await maywait(callback()) # await maywait(callback())
except BaseException: # except BaseException:
session.rollback() # session.rollback()
raise # raise
else: # else:
session.commit() # session.commit()
finally: # finally:
db.close_session() # db.close_session()
async def do_state_init_cbs(self): async def do_state_init_cbs(self):

View File

@@ -1,3 +1,4 @@
import asyncio
import logging import logging
from typing import Optional from typing import Optional
@@ -6,11 +7,11 @@ from web3.exceptions import BadFunctionCallOutput
from dexorder import ADDRESS_0, db, NATIVE_TOKEN, dec, current_w3 from dexorder import ADDRESS_0, db, NATIVE_TOKEN, dec, current_w3
from dexorder.addrmeta import address_metadata from dexorder.addrmeta import address_metadata
from dexorder.base import OldTokenDict
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.blocks import current_block from dexorder.blocks import current_block
from dexorder.contract import ERC20, ContractProxy, CONTRACT_ERRORS from dexorder.contract import ERC20, ContractProxy, CONTRACT_ERRORS
from dexorder.database.model import Token from dexorder.database.model import Token
from dexorder.database.model.token import OldTokenDict
from dexorder.metadata import get_metadata from dexorder.metadata import get_metadata
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -45,18 +46,19 @@ async def get_native_balance(addr, *, adjust_decimals=True) -> dec:
return value return value
async def get_token(address) -> Optional[OldTokenDict]: async def get_token(address, *, squelch=False) -> Optional[OldTokenDict]:
if address == ADDRESS_0: if address == ADDRESS_0:
raise ValueError('No token at address 0') raise ValueError('No token at address 0')
try: try:
# noinspection PyTypeChecker # noinspection PyTypeChecker
return address_metadata[address] return address_metadata[address]
except KeyError: except KeyError:
result = address_metadata[address] = await load_token(address) # noinspection PyTypeChecker
result = address_metadata[address] = await load_token(address, squelch=squelch)
return result return result
async def load_token(address: str) -> Optional[OldTokenDict]: async def load_token(address: str, *, squelch=False) -> Optional[OldTokenDict]:
contract = ERC20(address) contract = ERC20(address)
chain_id = current_chain.get().id chain_id = current_chain.get().id
if db: if db:
@@ -74,7 +76,8 @@ async def load_token(address: str) -> Optional[OldTokenDict]:
try: try:
rb: bytes = await ContractProxy(address, 'ERC20.sb').symbol() rb: bytes = await ContractProxy(address, 'ERC20.sb').symbol()
except CONTRACT_ERRORS: except CONTRACT_ERRORS:
log.warning(f'token {address} has broken {func_name}()') if not squelch:
log.warning(f'token {address} has broken {func_name}()')
return None return None
end = rb.find(b'\x00') end = rb.find(b'\x00')
if end == -1: if end == -1:
@@ -82,21 +85,20 @@ async def load_token(address: str) -> Optional[OldTokenDict]:
try: try:
return rb[:end].decode('utf8') return rb[:end].decode('utf8')
except UnicodeDecodeError: except UnicodeDecodeError:
log.warning(f'token {address} has an invalid {func_name}() {rb}') if not squelch:
log.warning(f'token {address} has an invalid {func_name}() {rb}')
return None return None
dec_prom = contract.decimals() dec_prom = contract.decimals()
symbol_prom = get_string_or_bytes32('symbol')
name_prom = get_string_or_bytes32('name')
try: try:
decimals = await dec_prom decimals = await dec_prom
except CONTRACT_ERRORS: except CONTRACT_ERRORS:
log.warning(f'token {address} has no decimals()') if not squelch:
decimals = 0 log.info(f'token {address} has no decimals()')
return None # we do not support coins that don't specify decimals.
approved = False # never approve new coins approved = False # never approve new coins
chain_id = current_chain.get().id chain_id = current_chain.get().id
symbol = await symbol_prom name, symbol = await asyncio.gather(get_string_or_bytes32('name'), get_string_or_bytes32('symbol'))
name = await name_prom
td = OldTokenDict(type='Token', chain=chain_id, address=address, td = OldTokenDict(type='Token', chain=chain_id, address=address,
name=name, symbol=symbol, decimals=decimals, approved=approved) name=name, symbol=symbol, decimals=decimals, approved=approved)
md = get_metadata(address, chain_id=chain_id) md = get_metadata(address, chain_id=chain_id)

View File

@@ -1,6 +1,6 @@
import asyncio import asyncio
import logging import logging
from abc import abstractmethod from abc import abstractmethod, ABC
from typing import Optional from typing import Optional
from uuid import uuid4 from uuid import uuid4
@@ -18,7 +18,7 @@ from dexorder.util import hexstr
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class TransactionHandler: class TransactionHandler (ABC):
instances: dict[str,'TransactionHandler'] = {} instances: dict[str,'TransactionHandler'] = {}
@staticmethod @staticmethod
@@ -159,12 +159,12 @@ async def handle_transaction_receipts():
receipt: TransactionReceiptDict = await w3.eth.get_transaction_receipt(job.tx_id) receipt: TransactionReceiptDict = await w3.eth.get_transaction_receipt(job.tx_id)
except TransactionNotFound: except TransactionNotFound:
return return
job.state = TransactionJobState.Mined
job.receipt = receipt
fork = current_fork.get() fork = current_fork.get()
assert fork is not None assert fork is not None
if fork.branch.contiguous and receipt['blockHash'] in fork.branch.path or \ if fork.branch.contiguous and receipt['blockHash'] in fork.branch.path or \
fork.branch.disjoint and receipt['blockNumber'] <= fork.height: fork.branch.disjoint and receipt['blockNumber'] <= fork.height:
job.state = TransactionJobState.Mined
job.receipt = receipt
try: try:
handler = TransactionHandler.of(job.request.type) handler = TransactionHandler.of(job.request.type)
except KeyError: except KeyError:

View File

@@ -37,7 +37,7 @@ def dumps(obj):
return dumpb(obj).decode('utf8') return dumpb(obj).decode('utf8')
def dumpb(obj): def dumpb(obj):
opts = orjson.OPT_PASSTHROUGH_SUBCLASS opts = orjson.OPT_PASSTHROUGH_SUBCLASS | orjson.OPT_SERIALIZE_DATACLASS
return orjson.dumps(obj, default=_serialize, option=opts) return orjson.dumps(obj, default=_serialize, option=opts)

View File

@@ -1,12 +1,13 @@
import asyncio
import functools import functools
import logging import logging
from dexorder import current_pub from dexorder import current_pub, dec
from dexorder.base.chain import current_chain from dexorder.base.chain import current_chain
from dexorder.blockstate import BlockDict from dexorder.blockstate import BlockDict
from dexorder.contract import ERC20, CONTRACT_ERRORS from dexorder.contract import ERC20, CONTRACT_ERRORS
from dexorder.contract.dexorder import VaultContract, vault_address from dexorder.contract.dexorder import VaultContract, vault_address
from dexorder.util import json from dexorder.util import json, align_decimal
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -90,3 +91,17 @@ def publish_vaults(chain_id, owner):
break break
log.debug(f'publish_vaults {chain_id} {owner} {vaults}') log.debug(f'publish_vaults {chain_id} {owner} {vaults}')
current_pub.get()(f'{chain_id}|{owner}', 'vaults', chain_id, owner, vaults) current_pub.get()(f'{chain_id}|{owner}', 'vaults', chain_id, owner, vaults)
async def refresh_vault_balances(vault, *tokens):
amounts = await asyncio.gather(*(ERC20(token).balanceOf(vault) for token in tokens))
def _adjust(vaddr, toks, amts, old_balances):
result = dict(old_balances) # copy
for t, a in zip(toks, amts):
result[t] = a
return result
vault_balances.modify(vault, functools.partial(_adjust, vault, tokens, amounts))
def pretty_balances(b: dict[str,dec], padding=8) -> str:
return '\n'.join(f'{k:>} {align_decimal(v,padding)}' for k,v in b.items())