Compare commits
37 Commits
eccf81c3c8
...
gmx
| Author | SHA1 | Date | |
|---|---|---|---|
| 8127a6c900 | |||
| 046903aab4 | |||
| a4a2f6e318 | |||
| 97f98ba7cf | |||
| eef803d3d6 | |||
| 88057607d5 | |||
| 36d0a863c6 | |||
| 89ce46793e | |||
| 2bcf5d043c | |||
| 71942d5b8f | |||
| ef44973646 | |||
| ce55609297 | |||
| a27300b5e4 | |||
| f3faaa3dd6 | |||
| 0bb670b356 | |||
| 52b406ba17 | |||
| 3d0342d19d | |||
| dbf960bae9 | |||
| d49f142fe3 | |||
| 34fa439b3c | |||
| 41a1e2d9fe | |||
| 66229e67bb | |||
| 31b6ddd314 | |||
| 07c6423fd5 | |||
| 4740687167 | |||
| a06eeeb10d | |||
| 4492d23c47 | |||
| 1c0c2f0e63 | |||
| f3bdfdf97b | |||
| be8c8bf019 | |||
| ecf1d21d5f | |||
| b7ed91d1c0 | |||
| 646449e456 | |||
| 1bcf73de22 | |||
| af0f35eba5 | |||
| e868ea5a4b | |||
| c132f40164 |
@@ -28,7 +28,7 @@ def upgrade() -> None:
|
||||
sa.Column('time', sa.DateTime(), nullable=False),
|
||||
sa.Column('account', sa.String(), nullable=False),
|
||||
sa.Column('category', sa.Enum('Transfer', 'Income', 'Expense', 'Trade', 'Special', name='accountingcategory'), nullable=False),
|
||||
sa.Column('subcategory', sa.Enum('OrderFee', 'GasFee', 'FillFee', 'VaultCreation', 'Execution', 'FeeAdjustment', 'InitialBalance', name='accountingsubcategory'), nullable=True),
|
||||
sa.Column('subcategory', sa.Enum('OrderFee', 'GasFee', 'FillFee', 'Admin', 'TransactionGas', 'VaultCreation', 'Execution', 'FeeAdjustment', 'InitialBalance', name='accountingsubcategory'), nullable=True),
|
||||
sa.Column('token', sa.String(), nullable=False),
|
||||
sa.Column('amount', dexorder.database.column_types.DecimalNumeric(), nullable=False),
|
||||
sa.Column('value', dexorder.database.column_types.DecimalNumeric(), nullable=True),
|
||||
|
||||
30
alembic/versions/e47d1bca4b3d_sharedata.py
Normal file
30
alembic/versions/e47d1bca4b3d_sharedata.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""sharedata
|
||||
|
||||
Revision ID: e47d1bca4b3d
|
||||
Revises: 509010f13e8b
|
||||
Create Date: 2025-04-23 11:23:10.809341
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'e47d1bca4b3d'
|
||||
down_revision: Union[str, None] = '509010f13e8b'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table('sharedata',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table('sharedata')
|
||||
15
bin/examine
Executable file
15
bin/examine
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
kubectl port-forward postgres-0 5431:5432 &
|
||||
PF_PID=$!
|
||||
|
||||
shutdown () {
|
||||
kill $PF_PID
|
||||
wait
|
||||
}
|
||||
|
||||
trap shutdown INT TERM
|
||||
|
||||
PYTHONPATH=src python -m dexorder.bin.examine rpc_url=arbitrum_dxod db_url=postgres://dexorder@localhost:5431/dexorder "$@"
|
||||
|
||||
shutdown
|
||||
File diff suppressed because one or more lines are too long
@@ -1,21 +1,24 @@
|
||||
aiohappyeyeballs==2.4.3
|
||||
aiohttp==3.11.12
|
||||
aiohttp==3.11.13
|
||||
aiosignal==1.3.1
|
||||
alembic==1.14.1
|
||||
alembic==1.15.1
|
||||
annotated-types==0.7.0
|
||||
antlr4-python3-runtime==4.9.3
|
||||
asn1crypto==1.5.1
|
||||
async-lru==2.0.4
|
||||
attrs==23.2.0
|
||||
bip-utils==2.9.3
|
||||
bitarray==3.0.0
|
||||
cachetools==5.5.1
|
||||
bitarray==3.1.1
|
||||
cachetools==5.5.2
|
||||
cattrs==24.1.2
|
||||
cbor2==5.6.4
|
||||
certifi==2024.2.2
|
||||
cffi==1.16.0
|
||||
charset-normalizer==3.4.1
|
||||
ckzg==1.0.2
|
||||
click==8.1.8
|
||||
coincurve==20.0.0
|
||||
coremltools==8.2
|
||||
crcmod==1.7
|
||||
cytoolz==0.12.3
|
||||
defaultlist==1.0.0
|
||||
@@ -31,39 +34,73 @@ eth-rlp==1.0.1
|
||||
eth-typing==4.4.0
|
||||
eth-utils==4.1.1
|
||||
eth_abi==5.2.0
|
||||
filelock==3.17.0
|
||||
frozenlist==1.4.1
|
||||
fsspec==2025.2.0
|
||||
google-auth==2.35.0
|
||||
greenlet==3.0.3
|
||||
hexbytes==0.3.1
|
||||
hiredis==3.0.0
|
||||
idna==3.7
|
||||
imageio==2.37.0
|
||||
importlib_resources==6.5.2
|
||||
Jinja2==3.1.6
|
||||
joblib==1.4.2
|
||||
jsonschema==4.21.1
|
||||
jsonschema-specifications==2023.12.1
|
||||
kraken==5.3.0
|
||||
kubernetes==31.0.0
|
||||
lazy_loader==0.4
|
||||
lightning==2.4.0
|
||||
lightning-utilities==0.14.0
|
||||
lru-dict==1.2.0
|
||||
lxml==5.3.1
|
||||
Mako==1.3.3
|
||||
markdown-it-py==3.0.0
|
||||
MarkupSafe==2.1.5
|
||||
mdurl==0.1.2
|
||||
mpmath==1.3.0
|
||||
msgpack-python==0.5.6
|
||||
multidict==6.0.5
|
||||
numpy==2.2.2
|
||||
networkx==3.4.2
|
||||
numpy==2.0.2
|
||||
nvidia-cublas-cu12==12.1.3.1
|
||||
nvidia-cuda-cupti-cu12==12.1.105
|
||||
nvidia-cuda-nvrtc-cu12==12.1.105
|
||||
nvidia-cuda-runtime-cu12==12.1.105
|
||||
nvidia-cudnn-cu12==9.1.0.70
|
||||
nvidia-cufft-cu12==11.0.2.54
|
||||
nvidia-curand-cu12==10.3.2.106
|
||||
nvidia-cusolver-cu12==11.4.5.107
|
||||
nvidia-cusparse-cu12==12.1.0.106
|
||||
nvidia-nccl-cu12==2.20.5
|
||||
nvidia-nvjitlink-cu12==12.8.93
|
||||
nvidia-nvtx-cu12==12.1.105
|
||||
oauthlib==3.2.2
|
||||
omegaconf==2.3.0
|
||||
orjson==3.10.15
|
||||
packaging==24.2
|
||||
pagerduty==1.0.0
|
||||
parsimonious==0.10.0
|
||||
pillow==11.1.0
|
||||
prometheus_client==0.21.1
|
||||
propcache==0.2.0
|
||||
protobuf==5.26.1
|
||||
psycopg2-binary==2.9.10
|
||||
py-sr25519-bindings==0.2.0
|
||||
pyaml==25.1.0
|
||||
pyarrow==19.0.1
|
||||
pyasn1==0.6.1
|
||||
pyasn1_modules==0.4.1
|
||||
pycparser==2.22
|
||||
pycryptodome==3.20.0
|
||||
pydantic==2.9.2
|
||||
pydantic_core==2.23.4
|
||||
Pygments==2.19.1
|
||||
PyNaCl==1.5.0
|
||||
python-bidi==0.6.6
|
||||
python-dateutil==2.9.0.post0
|
||||
pytorch-lightning==2.5.0.post0
|
||||
pytz==2025.1
|
||||
pyunormalize==15.1.0
|
||||
PyYAML==6.0.1
|
||||
@@ -72,18 +109,32 @@ referencing==0.35.0
|
||||
regex==2024.4.28
|
||||
requests==2.32.3
|
||||
requests-oauthlib==2.0.0
|
||||
rich==13.9.4
|
||||
rlp==4.0.1
|
||||
rpds-py==0.18.0
|
||||
rsa==4.9
|
||||
scikit-image==0.24.0
|
||||
scikit-learn==1.5.2
|
||||
scipy==1.13.1
|
||||
setuptools==75.8.2
|
||||
shapely==2.0.7
|
||||
six==1.16.0
|
||||
socket.io-emitter==0.1.5.1
|
||||
sortedcontainers==2.4.0
|
||||
SQLAlchemy==2.0.38
|
||||
sympy==1.13.3
|
||||
threadpoolctl==3.5.0
|
||||
tifffile==2025.2.18
|
||||
toolz==0.12.1
|
||||
torch==2.4.1
|
||||
torchmetrics==1.6.2
|
||||
torchvision==0.19.1
|
||||
tqdm==4.67.1
|
||||
triton==3.0.0
|
||||
types-requests==2.32.0.20240914
|
||||
typing_extensions==4.12.2
|
||||
urllib3==2.2.1
|
||||
web3==6.20.3
|
||||
web3==6.20.4
|
||||
websocket-client==1.8.0
|
||||
websockets==14.2
|
||||
websockets==13.1
|
||||
yarl==1.17.2
|
||||
|
||||
@@ -30,3 +30,4 @@ aiohttp
|
||||
charset-normalizer
|
||||
pytz
|
||||
prometheus_client
|
||||
krakenex
|
||||
|
||||
1866
resource/abi/42161/gmx/DataStore.json
Normal file
1866
resource/abi/42161/gmx/DataStore.json
Normal file
File diff suppressed because one or more lines are too long
0
resource/abi/42161/gmx/EventEmitter.json
Normal file
0
resource/abi/42161/gmx/EventEmitter.json
Normal file
1804
resource/abi/42161/gmx/ExchangeRouter.json
Normal file
1804
resource/abi/42161/gmx/ExchangeRouter.json
Normal file
File diff suppressed because one or more lines are too long
225
resource/abi/42161/gmx/MarketStoreUtils.json
Normal file
225
resource/abi/42161/gmx/MarketStoreUtils.json
Normal file
File diff suppressed because one or more lines are too long
646
resource/abi/42161/gmx/MarketUtils.json
Normal file
646
resource/abi/42161/gmx/MarketUtils.json
Normal file
File diff suppressed because one or more lines are too long
303
resource/abi/42161/gmx/OrderUtils.json
Normal file
303
resource/abi/42161/gmx/OrderUtils.json
Normal file
File diff suppressed because one or more lines are too long
4709
resource/abi/42161/gmx/Reader.json
Normal file
4709
resource/abi/42161/gmx/Reader.json
Normal file
File diff suppressed because one or more lines are too long
@@ -3,7 +3,7 @@ import logging
|
||||
from contextvars import ContextVar
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from typing import Callable, Any, Union, Optional
|
||||
from typing import Callable, Any
|
||||
|
||||
from web3 import AsyncWeb3
|
||||
|
||||
@@ -35,14 +35,16 @@ class _Token:
|
||||
def __repr__(self): return self.__token_name
|
||||
def __str__(self): return self.__token_name
|
||||
|
||||
class _FalseToken (_Token):
|
||||
class _FalseyToken (_Token):
|
||||
def __bool__(self): return False
|
||||
|
||||
|
||||
NARG = _FalseToken('NARG')
|
||||
DELETE = _FalseToken('DELETE') # used as a value token to indicate removal of the key
|
||||
NARG = _FalseyToken('NARG')
|
||||
DELETE = _FalseyToken('DELETE') # used as a value token to indicate removal of the key
|
||||
ADDRESS_0 = '0x0000000000000000000000000000000000000000'
|
||||
NATIVE_TOKEN = '0x0000000000000000000000000000000000000001' # We use 0x01 to indicate the use of native ETH wherever a token address is normally required
|
||||
USD_FIAT = '0x0000000000000000000000000000000000000055' # We use 0x55 (ASCII 'U') to indicate the use of fiat USD
|
||||
CHAIN_ID_OFFCHAIN = -1
|
||||
WEI = 1
|
||||
GWEI = 1_000_000_000
|
||||
ETH = 1_000_000_000_000_000_000
|
||||
@@ -57,7 +59,7 @@ _cwd() # do this first so that config has the right current working directory
|
||||
|
||||
# ordering here is important!
|
||||
from .base.chain import Blockchain # the singletons are loaded into the dexorder.blockchain.* namespace
|
||||
from .util import async_yield
|
||||
from .util import async_yield, json
|
||||
from .base.fixed import Fixed2, FixedDecimals, Dec18
|
||||
from .configuration import config
|
||||
from .base.account import Account
|
||||
|
||||
1
src/dexorder/accounting/__init__.py
Normal file
1
src/dexorder/accounting/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .accounting import *
|
||||
@@ -2,12 +2,12 @@ import asyncio
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy import select, func, text
|
||||
from sqlalchemy import select, func
|
||||
from typing_extensions import Optional
|
||||
from web3.exceptions import ContractLogicError
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account, metric
|
||||
from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account
|
||||
from dexorder.base import TransactionReceiptDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blocks import get_block_timestamp, get_block, current_block
|
||||
@@ -34,19 +34,19 @@ class ReconciliationException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def accounting_lock():
|
||||
"""
|
||||
This must be called before accounting_*() calls are made.
|
||||
"""
|
||||
db.session.execute(text("LOCK TABLE account, accounting, reconciliation IN EXCLUSIVE MODE"))
|
||||
|
||||
|
||||
async def initialize_accounting():
|
||||
def initialize_accounting():
|
||||
global accounting_initialized
|
||||
if not accounting_initialized:
|
||||
load_accounts_cache()
|
||||
accounting_initialized = True
|
||||
|
||||
|
||||
async def initialize_accounting_runner():
|
||||
global accounting_initialized
|
||||
if not accounting_initialized:
|
||||
accounting_lock()
|
||||
await _initialize_mark_to_market() # set up mark-to-market first, so accounts can value their initial balances
|
||||
await _initialize_accounts()
|
||||
load_accounts_cache()
|
||||
accounting_initialized = True
|
||||
log.info(f'accounting initialized\n\tstablecoins: {config.stablecoins}\n\tquotecoins: {config.quotecoins}\n\tnativecoin: {config.nativecoin}')
|
||||
|
||||
@@ -64,17 +64,23 @@ async def _initialize_accounts():
|
||||
|
||||
async def _initialize_accounts_2():
|
||||
fm = await FeeManager.get()
|
||||
of_account = _ensure_account(fm.order_fee_account_addr, AccountKind.OrderFee)
|
||||
gf_account = _ensure_account(fm.gas_fee_account_addr, AccountKind.GasFee)
|
||||
ff_account = _ensure_account(fm.fill_fee_account_addr, AccountKind.FillFee)
|
||||
exe_accounts = [_ensure_account(account.address, AccountKind.Execution) for account in Account.all()]
|
||||
of_account = ensure_account(fm.order_fee_account_addr, AccountKind.OrderFee)
|
||||
gf_account = ensure_account(fm.gas_fee_account_addr, AccountKind.GasFee)
|
||||
ff_account = ensure_account(fm.fill_fee_account_addr, AccountKind.FillFee)
|
||||
exe_accounts = [ensure_account(account.address, AccountKind.Execution) for account in Account.all()]
|
||||
if current_chain.get().id in [1337, 31337]:
|
||||
log.debug('adjusting debug account balances')
|
||||
await asyncio.gather(
|
||||
*map(adjust_balance, (of_account, gf_account, ff_account, *exe_accounts))
|
||||
)
|
||||
for db_account in db.session.execute(select(DbAccount)).scalars():
|
||||
|
||||
|
||||
def load_accounts_cache(*, chain=None):
|
||||
if chain is None:
|
||||
chain = current_chain.get()
|
||||
for db_account in db.session.execute(select(DbAccount).where(DbAccount.chain==chain)).scalars():
|
||||
_tracked_addrs.add(db_account.address)
|
||||
log.info(f'tracking account {db_account.chain.id} {db_account.address}')
|
||||
|
||||
|
||||
async def _initialize_mark_to_market():
|
||||
@@ -124,13 +130,14 @@ async def _initialize_mark_to_market():
|
||||
add_mark_pool(addr, pool['base'], pool['quote'], pool['fee'])
|
||||
|
||||
|
||||
def _ensure_account(addr: str, kind: AccountKind) -> DbAccount:
|
||||
chain = current_chain.get()
|
||||
def ensure_account(addr: str, kind: AccountKind, *, chain=None) -> DbAccount:
|
||||
if chain is None:
|
||||
chain = current_chain.get()
|
||||
found = db.session.get(DbAccount, (chain, addr))
|
||||
if found:
|
||||
if found.kind != kind:
|
||||
log.warning(f'Account {addr} has wrong kind {found.kind} != {kind}')
|
||||
found.kind = kind
|
||||
# found.kind = kind
|
||||
db.session.add(found)
|
||||
_tracked_addrs.add(found.address)
|
||||
else:
|
||||
@@ -160,20 +167,21 @@ async def accounting_transfer(receipt: TransactionReceiptDict, token: str,
|
||||
block_hash = hexstr(receipt['blockHash'])
|
||||
tx_id = hexstr(receipt['transactionHash'])
|
||||
await asyncio.gather(
|
||||
add_accounting_row( sender, block_hash, tx_id, AccountingCategory.Transfer, None,
|
||||
token, -amount, receiver, adjust_decimals=adjust_decimals),
|
||||
add_accounting_row( receiver, block_hash, tx_id, AccountingCategory.Transfer, None,
|
||||
token, amount, sender, adjust_decimals=adjust_decimals),
|
||||
accounting_transaction_gas(receipt),
|
||||
add_accounting_entry_m2m(sender, block_hash, tx_id, AccountingCategory.Transfer, None,
|
||||
token, -amount, receiver, adjust_decimals=adjust_decimals),
|
||||
add_accounting_entry_m2m(receiver, block_hash, tx_id, AccountingCategory.Transfer, None,
|
||||
token, amount, sender, adjust_decimals=adjust_decimals),
|
||||
)
|
||||
|
||||
|
||||
async def accounting_transaction_gas(receipt: TransactionReceiptDict, subcategory: AccountingSubcategory):
|
||||
async def accounting_transaction_gas(receipt: TransactionReceiptDict, subcategory: AccountingSubcategory = AccountingSubcategory.TransactionGas):
|
||||
""" Accounts for the gas spent on the given transaction """
|
||||
amount = dec(receipt['gasUsed']) * dec(receipt['effectiveGasPrice'])
|
||||
await add_accounting_row( receipt['from'],
|
||||
hexstr(receipt['blockHash']), hexstr(receipt['transactionHash']),
|
||||
AccountingCategory.Expense, subcategory, NATIVE_TOKEN, -amount
|
||||
)
|
||||
await add_accounting_entry_m2m(receipt['from'],
|
||||
hexstr(receipt['blockHash']), hexstr(receipt['transactionHash']),
|
||||
AccountingCategory.Expense, subcategory, NATIVE_TOKEN, -amount
|
||||
)
|
||||
|
||||
|
||||
async def accounting_placement(order_placed: EventData):
|
||||
@@ -186,10 +194,10 @@ async def accounting_placement(order_placed: EventData):
|
||||
log.warning(f'Rogue DexorderPlacedEvent in tx {hexstr(tx_id)}')
|
||||
return
|
||||
fm = await FeeManager.get()
|
||||
await add_accounting_row( fm.order_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||
AccountingSubcategory.OrderFee, NATIVE_TOKEN, order_fee)
|
||||
await add_accounting_row( fm.gas_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||
AccountingSubcategory.GasFee, NATIVE_TOKEN, gas_fee)
|
||||
await add_accounting_entry_m2m(fm.order_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||
AccountingSubcategory.OrderFee, NATIVE_TOKEN, order_fee)
|
||||
await add_accounting_entry_m2m(fm.gas_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||
AccountingSubcategory.GasFee, NATIVE_TOKEN, gas_fee)
|
||||
|
||||
|
||||
async def accounting_fill(fill: EventData, out_token: str) -> dec:
|
||||
@@ -200,14 +208,14 @@ async def accounting_fill(fill: EventData, out_token: str) -> dec:
|
||||
tx_id = hexstr(fill['transactionHash'])
|
||||
fee = int(fill['args']['fillFee'])
|
||||
fm = await FeeManager.get()
|
||||
return await add_accounting_row(fm.fill_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||
AccountingSubcategory.FillFee, out_token, fee)
|
||||
return await add_accounting_entry_m2m(fm.fill_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||
AccountingSubcategory.FillFee, out_token, fee)
|
||||
|
||||
|
||||
async def add_accounting_row(account: str, block_hash: Optional[str], tx_id: Optional[str], category, subcategory, token, amount, note=None,
|
||||
*, adjust_decimals=True) -> dec:
|
||||
async def add_accounting_entry_m2m(account: str, block_hash: Optional[str], tx_id: Optional[str], category, subcategory, token, amount, note=None,
|
||||
*, adjust_decimals=True) -> dec:
|
||||
"""
|
||||
Returns the mark-to-market USD value of the transaction.
|
||||
Returns the mark-to-market USD value of the entry.
|
||||
"""
|
||||
if amount == 0:
|
||||
return dec(0)
|
||||
@@ -221,6 +229,13 @@ async def add_accounting_row(account: str, block_hash: Optional[str], tx_id: Opt
|
||||
value = mark_to_market(token, amount)
|
||||
log.debug(f'accounting row {time} {account} {category} {subcategory} {token} {amount} ${value}')
|
||||
chain_id = current_chain.get().id
|
||||
add_accounting_entry(chain_id, account, time, category, subcategory, token, amount, value, tx_id, note)
|
||||
return value
|
||||
|
||||
|
||||
def add_accounting_entry(chain_id, account, time, category, subcategory, token, amount, value=None, tx_id=None, note=None):
|
||||
if not is_tracked_address(account):
|
||||
return
|
||||
db.session.add(Accounting(account=account,
|
||||
time=time, category=category, subcategory=subcategory,
|
||||
token=token, amount=amount, value=value, note=note,
|
||||
@@ -229,15 +244,17 @@ async def add_accounting_row(account: str, block_hash: Optional[str], tx_id: Opt
|
||||
account_db = db.session.get(DbAccount, (current_chain.get(), account))
|
||||
new_amount = account_db.balances.get(token, dec(0)) + amount
|
||||
if new_amount < 0:
|
||||
log.error(f'negative balance for account {account} when applying accounting row {time} {category} {subcategory} {token} {amount} ${value}')
|
||||
log.error(
|
||||
f'negative balance for account {account} when applying accounting row {time} {category} {subcategory} {token} {amount} ${value}')
|
||||
account_db.balances[token] = new_amount
|
||||
db.session.add(account_db) # deep changes would not be detected by the ORM
|
||||
return value
|
||||
db.session.flush()
|
||||
|
||||
|
||||
async def adjust_balance(account: DbAccount, token=NATIVE_TOKEN, subcategory=AccountingSubcategory.InitialBalance, note=None):
|
||||
true_balance = await get_balance(account.address, token)
|
||||
amount = true_balance - account.balances.get(token, dec(0))
|
||||
await add_accounting_row(account.address, None, None, AccountingCategory.Special, subcategory, NATIVE_TOKEN, amount, note, adjust_decimals=False)
|
||||
await add_accounting_entry_m2m(account.address, None, None, AccountingCategory.Special, subcategory, NATIVE_TOKEN, amount, note, adjust_decimals=False)
|
||||
|
||||
|
||||
async def accounting_reconcile(account: DbAccount, block_id: Optional[str] = None, last_accounting_row_id: Optional[int] = None):
|
||||
65
src/dexorder/accounting/kraken.py
Normal file
65
src/dexorder/accounting/kraken.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import logging
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
import krakenex
|
||||
|
||||
from dexorder import timestamp
|
||||
from dexorder.bin.executable import execute
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
kraken_api_key=r'HqPHnGsAHunFtaP8YZTFsyh+LauVrcgFHi/US+RseR/4DiT+NG/JpONV'
|
||||
kraken_api_secret=r'4hvdMdaN5TlNlyk2PShdRCsOE/T4sFzeBrR7ZjC+LUGuAXhBehY8vvWDZSUSyna2OFeOJ9GntPvyXOhrpx70Bg=='
|
||||
|
||||
kraken = krakenex.API()
|
||||
|
||||
|
||||
# start and end should be timestamps or datetimes. inclusiveness is [start,end) as usual
|
||||
def kraken_get_ledger(start=None, end=None):
|
||||
entries = []
|
||||
offset=1 # 1-based ffs
|
||||
if start:
|
||||
start = timestamp(start) - 1 # kraken start is EXCLUSIVE for some reason
|
||||
if end:
|
||||
end = timestamp(end) - 1 # kraken end is INCLUSIVE. :/
|
||||
while True:
|
||||
kl = kraken.query_private('Ledgers', {'start':start, 'end':end, 'ofs':offset})
|
||||
print(repr(kl))
|
||||
break
|
||||
if kl.empty:
|
||||
break
|
||||
for t in kl.itertuples():
|
||||
print(t)
|
||||
# noinspection PyShadowingBuiltins
|
||||
offset += len(kl)
|
||||
return entries
|
||||
|
||||
|
||||
@dataclass
|
||||
class KrakenConfig:
|
||||
kraken_api_key: Optional[str] = None
|
||||
kraken_api_secret: Optional[str] = None
|
||||
kraken_start: Optional[str]= None # timestamp or date
|
||||
kraken_end: Optional[str] = None # timestamp or date
|
||||
|
||||
async def main(kconfig: KrakenConfig):
|
||||
load_kraken_key(kconfig)
|
||||
kraken_get_ledger()
|
||||
|
||||
|
||||
def load_kraken_key(kconfig):
|
||||
temp = tempfile.NamedTemporaryFile()
|
||||
if not kconfig.kraken_api_key or not kconfig.kraken_api_secret:
|
||||
log.error("Must set kraken_api_key= and kraken_api_secret= on the command line")
|
||||
exit(1)
|
||||
temp.write(kconfig.kraken_api_key.encode())
|
||||
temp.write(b'\n')
|
||||
temp.write(kconfig.kraken_api_secret.encode())
|
||||
temp.write(b'\n')
|
||||
kraken.load_key(temp.name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main, parse_args=KrakenConfig)
|
||||
@@ -2,16 +2,15 @@ import logging
|
||||
from typing import TypedDict
|
||||
|
||||
from dexorder import db
|
||||
from dexorder.base import OldPoolDict, OldGMXDict, OldTokenDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blockstate import BlockDict
|
||||
from dexorder.database.model import Pool
|
||||
from dexorder.database.model.pool import OldPoolDict
|
||||
from dexorder.database.model.token import Token, OldTokenDict
|
||||
from dexorder.database.model import Pool, Token
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# address_metadata is a polymorphic BlockDict which maps address keys to a dict of metadata describing the address
|
||||
# used for Tokens and Pools
|
||||
# used for Tokens and Pools and GMX Markets
|
||||
|
||||
|
||||
class AddressMetadata (TypedDict):
|
||||
@@ -19,7 +18,9 @@ class AddressMetadata (TypedDict):
|
||||
|
||||
|
||||
def save_addrmeta(address: str, meta: AddressMetadata):
|
||||
if meta['type'] == 'Token':
|
||||
if meta is None:
|
||||
pass
|
||||
elif meta['type'] == 'Token':
|
||||
meta: OldTokenDict
|
||||
updated = Token.load(meta)
|
||||
token = db.session.get(Token, (current_chain.get().id, address))
|
||||
@@ -43,8 +44,10 @@ def save_addrmeta(address: str, meta: AddressMetadata):
|
||||
pool.quote = updated.quote
|
||||
pool.fee = updated.fee
|
||||
pool.decimals = updated.decimals
|
||||
elif meta['type'] == 'GMX':
|
||||
pass
|
||||
else:
|
||||
log.warning(f'Address {address} had unknown metadata type {meta["type"]}')
|
||||
|
||||
|
||||
address_metadata: BlockDict[str,AddressMetadata] = BlockDict('a', redis=True, db=True, finalize_cb=save_addrmeta)
|
||||
address_metadata: BlockDict[str,OldPoolDict|OldTokenDict|OldGMXDict] = BlockDict('a', redis=True, db=True, finalize_cb=save_addrmeta)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import TypedDict, Union, Type, Any, Callable
|
||||
from typing import TypedDict, Union, Any, Callable
|
||||
from dexorder.base.metadecl import OldTokenDict, OldPoolDict, OldGMXDict
|
||||
|
||||
Address = str
|
||||
Quantity = Union[str,int]
|
||||
|
||||
65
src/dexorder/base/metadecl.py
Normal file
65
src/dexorder/base/metadecl.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import logging
|
||||
from typing import TypedDict, NotRequired
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenDict (TypedDict):
|
||||
"""
|
||||
Token metadata dictionary
|
||||
|
||||
Fields:
|
||||
a: The address of the token.
|
||||
n: The name of the token.
|
||||
s: The symbol of the token.
|
||||
d: Number of decimals.
|
||||
l: Indicates if approved ("listed").
|
||||
g: gmx synthetic flag
|
||||
x: Optional extra data.
|
||||
"""
|
||||
|
||||
a: str
|
||||
n: str
|
||||
s: str
|
||||
d: int
|
||||
l: NotRequired[bool]
|
||||
g: NotRequired[bool]
|
||||
x: NotRequired[dict]
|
||||
|
||||
|
||||
# OldTokenDict is the primary dict we use in-memory, with basic JSON-able types
|
||||
|
||||
class OldTokenDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
name: str
|
||||
symbol: str
|
||||
decimals: int
|
||||
approved: bool # whether this token is in the whitelist or not
|
||||
x: NotRequired[dict] # extra data
|
||||
|
||||
|
||||
class OldPoolDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
exchange: int
|
||||
base: str
|
||||
quote: str
|
||||
fee: int
|
||||
decimals: int
|
||||
|
||||
|
||||
|
||||
class OldGMXDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
exchange: int
|
||||
index: str
|
||||
long: str
|
||||
short: str
|
||||
leverage: int
|
||||
decimals: int
|
||||
|
||||
@@ -4,7 +4,7 @@ from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from dexorder import timestamp
|
||||
from dexorder import timestamp, from_timestamp
|
||||
from dexorder.util import hexbytes
|
||||
from dexorder.util.convert import decode_IEEE754
|
||||
|
||||
@@ -37,9 +37,10 @@ class SwapOrderState (Enum):
|
||||
|
||||
|
||||
class Exchange (Enum):
|
||||
Unknown = -1
|
||||
UniswapV2 = 0
|
||||
UniswapV3 = 1
|
||||
Unknown = -1
|
||||
OTC = 0
|
||||
UniswapV3 = 1
|
||||
GMX = 2
|
||||
|
||||
@dataclass
|
||||
class Route:
|
||||
@@ -75,6 +76,20 @@ class Line:
|
||||
return self.intercept, self.slope
|
||||
|
||||
|
||||
@dataclass
|
||||
class GMXOrder:
|
||||
reserve_amount: int # todo
|
||||
is_long: bool
|
||||
is_increase: bool
|
||||
|
||||
@staticmethod
|
||||
def load(obj: Optional[tuple[int,bool,bool]]):
|
||||
return GMXOrder(*obj) if obj is not None else None
|
||||
|
||||
def dump(self):
|
||||
return self.reserve_amount, self.is_long, self.is_increase
|
||||
|
||||
|
||||
@dataclass
|
||||
class SwapOrder:
|
||||
tokenIn: str
|
||||
@@ -87,6 +102,7 @@ class SwapOrder:
|
||||
inverted: bool
|
||||
conditionalOrder: int
|
||||
tranches: list['Tranche']
|
||||
gmx: Optional[GMXOrder] = None
|
||||
|
||||
@property
|
||||
def min_input_amount(self):
|
||||
@@ -95,7 +111,7 @@ class SwapOrder:
|
||||
@staticmethod
|
||||
def load(obj):
|
||||
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7], obj[8],
|
||||
[Tranche.load(t) for t in obj[9]])
|
||||
[Tranche.load(t) for t in obj[9]], GMXOrder.load(obj[10]) if len(obj) > 10 else None)
|
||||
|
||||
@staticmethod
|
||||
def load_from_chain(obj):
|
||||
@@ -106,7 +122,8 @@ class SwapOrder:
|
||||
return (self.tokenIn, self.tokenOut, self.route.dump(),
|
||||
str(self.amount), str(self.minFillAmount), self.amountIsInput,
|
||||
self.outputDirectlyToOwner, self.inverted, self.conditionalOrder,
|
||||
[t.dump() for t in self.tranches])
|
||||
[t.dump() for t in self.tranches],
|
||||
self.gmx.dump() if self.gmx is not None else None)
|
||||
|
||||
def __str__(self):
|
||||
msg = f'''
|
||||
@@ -250,6 +267,26 @@ class ElaboratedSwapOrderStatus:
|
||||
def copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
||||
def __str__(self):
|
||||
msg = f'''
|
||||
SwapOrder
|
||||
status: {self.state.name}
|
||||
in: {self.order.tokenIn}
|
||||
out: {self.order.tokenOut}
|
||||
exchange: {self.order.route.exchange.name, self.order.route.fee}
|
||||
amount: {"input" if self.order.amountIsInput else "output"} {self.filledIn if self.order.amountIsInput else self.filledOut}/{self.order.amount}{" to owner" if self.order.outputDirectlyToOwner else ""}
|
||||
minFill: {self.order.minFillAmount}
|
||||
inverted: {self.order.inverted}
|
||||
tranches:
|
||||
'''
|
||||
for i in range(len(self.trancheStatus)):
|
||||
tranche = self.order.tranches[i]
|
||||
ts = self.trancheStatus[i]
|
||||
msg += f' {tranche}\n'
|
||||
for fill in ts.fills:
|
||||
msg += f' {fill}\n'
|
||||
return msg
|
||||
|
||||
|
||||
NO_OCO = 18446744073709551615 # max uint64
|
||||
|
||||
@@ -263,6 +300,9 @@ DISTANT_FUTURE = 4294967295 # max uint32
|
||||
|
||||
MAX_FRACTION = 65535 # max uint16
|
||||
|
||||
MIN_SLIPPAGE = 0.0001 # one bip
|
||||
MIN_SLIPPAGE_EPSILON = 0.000000000003
|
||||
|
||||
|
||||
@dataclass
|
||||
class Tranche:
|
||||
@@ -344,7 +384,7 @@ class Tranche:
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
msg = f'{self.fraction/MAX_FRACTION:.1%} {"start+" if self.startTimeIsRelative else ""}{self.startTime} to {"start+" if self.startTimeIsRelative else ""}{self.endTime}'
|
||||
msg = f'{self.fraction/MAX_FRACTION:.1%} {"start+" if self.startTimeIsRelative else ""}{from_timestamp(self.startTime)} to {"start+" if self.startTimeIsRelative else ""}{from_timestamp(self.endTime)}'
|
||||
if self.marketOrder:
|
||||
# for marketOrders, minLine.intercept is the slippage
|
||||
msg += f' market order slippage {self.minLine.intercept:.2%}'
|
||||
@@ -352,11 +392,11 @@ class Tranche:
|
||||
if self.minLine.intercept or self.minLine.slope:
|
||||
msg += f' >{self.minLine.intercept:.5g}'
|
||||
if self.minLine.slope:
|
||||
msg += f'{self.minLine.slope:+.5g}/s({self.minLine.value():5g})'
|
||||
msg += f'{self.minLine.slope:+.5g}/s={self.minLine.value():5g}'
|
||||
if self.maxLine.intercept or self.maxLine.slope:
|
||||
msg += f' <{self.maxLine.intercept:.5g}'
|
||||
if self.maxLine.slope:
|
||||
msg += f'{self.maxLine.slope:+.5g}/s({self.maxLine.value():5g})'
|
||||
msg += f'{self.maxLine.slope:+.5g}/s={self.maxLine.value():5g}'
|
||||
if self.rateLimitPeriod:
|
||||
msg += f' {self.rateLimitFraction/MAX_FRACTION:.1%} every {self.rateLimitPeriod/60:.0} minutes'
|
||||
return msg
|
||||
|
||||
@@ -14,7 +14,7 @@ from dexorder.blockstate.fork import Fork
|
||||
from dexorder.configuration import parse_args
|
||||
from dexorder.contract import get_contract_event
|
||||
from dexorder.database import db
|
||||
from dexorder.event_handler import check_ohlc_rollover, handle_uniswap_swaps
|
||||
from dexorder.event_handler import handle_uniswap_swaps
|
||||
from dexorder.memcache import memcache
|
||||
from dexorder.memcache.memcache_state import RedisState, publish_all
|
||||
from dexorder.ohlc import recent_ohlcs, ohlc_save, ohlcs
|
||||
@@ -58,7 +58,7 @@ async def main():
|
||||
|
||||
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None, timer_period=0)
|
||||
runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
runner.add_callback(check_ohlc_rollover)
|
||||
# runner.add_callback(check_ohlc_rollover)
|
||||
runner.on_promotion.append(finalize_callback)
|
||||
if db:
|
||||
# noinspection PyUnboundLocalVariable
|
||||
@@ -74,4 +74,4 @@ async def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main())
|
||||
execute(main)
|
||||
|
||||
@@ -37,4 +37,4 @@ if __name__ == '__main__':
|
||||
time = parse_date(sys.argv[1], ignoretz=True).replace(tzinfo=timezone.utc)
|
||||
seconds_per_block = float(sys.argv[2])
|
||||
sys.argv = [sys.argv[0], *sys.argv[3:]]
|
||||
execute(main())
|
||||
execute(main)
|
||||
|
||||
95
src/dexorder/bin/examine.py
Normal file
95
src/dexorder/bin/examine.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from dexorder import db, blockchain
|
||||
from dexorder.base.order import OrderKey
|
||||
from dexorder.blocks import current_block, get_block
|
||||
from dexorder.blockstate import current_blockstate
|
||||
from dexorder.blockstate.blockdata import BlockData
|
||||
from dexorder.blockstate.db_state import DbState
|
||||
from dexorder.blockstate.fork import current_fork
|
||||
from dexorder.contract.dexorder import VaultContract
|
||||
from dexorder.order.orderstate import Order
|
||||
from dexorder.tokens import adjust_decimals
|
||||
from dexorder.util import json
|
||||
from dexorder.vault_blockdata import vault_balances, pretty_balances
|
||||
from dexorder.bin.executable import execute
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
async def dump_orders(orders, args):
|
||||
if args.json:
|
||||
print(json.dumps([order.status.dump() for order in orders]))
|
||||
else:
|
||||
first = True
|
||||
for order in orders:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
print()
|
||||
print(await order.pprint())
|
||||
|
||||
def command_vault_argparse(subparsers):
|
||||
parser = subparsers.add_parser('vault', help='show the vault\'s balances and orders')
|
||||
parser.add_argument('address', help='address of the vault')
|
||||
parser.add_argument('--all', help='show all orders including closed ones', action='store_true')
|
||||
parser.add_argument('--json', help='output in JSON format', action='store_true')
|
||||
|
||||
async def command_vault(args):
|
||||
balances = vault_balances.get(args.address, {})
|
||||
print(f'Vault {args.address} v{await VaultContract(args.address).version()}')
|
||||
print(f'Balances:')
|
||||
print(pretty_balances({k: (await adjust_decimals(k, v)) for k, v in balances.items()}))
|
||||
print(f'Orders:')
|
||||
i = 0
|
||||
orders = []
|
||||
while True:
|
||||
key = OrderKey(args.address, i)
|
||||
try:
|
||||
order = Order.of(key)
|
||||
except KeyError:
|
||||
break
|
||||
if args.all or order.is_open:
|
||||
orders.append(order)
|
||||
i += 1
|
||||
await dump_orders(orders, args)
|
||||
|
||||
|
||||
def command_open_argparse(subparsers):
|
||||
parser = subparsers.add_parser('open', help='show all open orders')
|
||||
parser.add_argument('--json', help='output in JSON format', action='store_true')
|
||||
|
||||
|
||||
async def command_open(args):
|
||||
await dump_orders([Order.of(key) for key in Order.open_orders], args)
|
||||
|
||||
|
||||
async def main(args: list):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--chain-id', default=None)
|
||||
subparsers = parser.add_subparsers(dest='command')
|
||||
for name in globals():
|
||||
if name.startswith('command_') and name.endswith('_argparse'):
|
||||
globals()[name](subparsers)
|
||||
parsed = parser.parse_args(args)
|
||||
print(parsed)
|
||||
try:
|
||||
subcommand = globals()[f'command_{parsed.command}']
|
||||
except KeyError:
|
||||
parser.print_help()
|
||||
exit(1)
|
||||
await blockchain.connect()
|
||||
db.connect()
|
||||
db_state = DbState(BlockData.by_opt('db'))
|
||||
with db.transaction():
|
||||
state = await db_state.load()
|
||||
# state.readonly = True
|
||||
current_blockstate.set(state)
|
||||
block = await get_block(state.root_hash)
|
||||
current_block.set(block)
|
||||
current_fork.set(state.root_fork)
|
||||
await subcommand(parsed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main, parse_args=True)
|
||||
@@ -7,10 +7,14 @@ import tomllib
|
||||
from asyncio import CancelledError
|
||||
from signal import Signals
|
||||
from traceback import print_exception
|
||||
from typing import Coroutine
|
||||
from typing import Coroutine, Callable, Union, Any
|
||||
|
||||
from omegaconf import OmegaConf
|
||||
|
||||
from dexorder import configuration, config
|
||||
from dexorder.alert import init_alerts
|
||||
from dexorder.configuration.load import config_file
|
||||
from dexorder.configuration.schema import Config
|
||||
from dexorder.metric.metric_startup import start_metrics_server
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -25,7 +29,27 @@ async def _shutdown_coro(_sig, _loop):
|
||||
if task is not this_task:
|
||||
task.cancel()
|
||||
|
||||
def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=True):
|
||||
|
||||
def split_args():
|
||||
omegaconf_args = []
|
||||
regular_args = []
|
||||
for arg in sys.argv[1:]:
|
||||
if '=' in arg and not arg.startswith('--'):
|
||||
key, value = arg.split('=', 1)
|
||||
if hasattr(Config, key):
|
||||
omegaconf_args.append(arg)
|
||||
continue
|
||||
regular_args.append(arg)
|
||||
return omegaconf_args, regular_args
|
||||
|
||||
|
||||
def execute(main:Callable[...,Coroutine[Any,Any,Any]], shutdown=None, *, parse_logging=True,
|
||||
parse_args: Union[Callable[[list[str]],Any], type, bool]=True):
|
||||
"""
|
||||
if parse_args is a function, then the command-line arguments are given to OmegaConf first, and any args parsed by
|
||||
OmegaConf are stripped from the args list. The remaining args are then passed to parse_args(args)
|
||||
if parse_args is a type, then the type is used to parse the extra command-line arguments using OmegaConf.
|
||||
"""
|
||||
# config
|
||||
configured = False
|
||||
if parse_logging:
|
||||
@@ -42,10 +66,24 @@ def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=Tru
|
||||
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
|
||||
log.setLevel(logging.DEBUG)
|
||||
log.info('Logging configured to default')
|
||||
log.info(f'Loaded main config from {config_file}')
|
||||
xconf = None
|
||||
if parse_args:
|
||||
# NOTE: there is special command-line argument handling in config/load.py to get a config filename.
|
||||
# The -c/--config flag MUST BE FIRST if present.
|
||||
configuration.parse_args()
|
||||
# The rest of the arguments are split by format into key=value for omegaconf and anything else is "regular args"
|
||||
omegaconf_args, regular_args = split_args()
|
||||
configuration.parse_args(omegaconf_args)
|
||||
# must check for `type` before `callable`, because types are also callables
|
||||
if isinstance(parse_args, type):
|
||||
# noinspection PyUnboundLocalVariable
|
||||
xconf = OmegaConf.merge(OmegaConf.structured(parse_args), OmegaConf.from_cli(regular_args))
|
||||
elif callable(parse_args):
|
||||
# noinspection PyUnboundLocalVariable
|
||||
xconf = parse_args(regular_args)
|
||||
else:
|
||||
# just pass the regular args to main
|
||||
xconf = regular_args
|
||||
|
||||
init_alerts()
|
||||
|
||||
@@ -59,7 +97,14 @@ def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=Tru
|
||||
loop.add_signal_handler(s, lambda sig=s: asyncio.create_task(_shutdown_coro(sig, loop), name=f'{s.name} handler'))
|
||||
|
||||
# main
|
||||
task = loop.create_task(main, name='main')
|
||||
num_args = len(inspect.signature(main).parameters)
|
||||
if num_args == 0:
|
||||
coro = main()
|
||||
elif num_args == 1:
|
||||
coro = main(xconf)
|
||||
else:
|
||||
raise Exception(f'main() must accept 0 or 1 arguments, not {num_args}')
|
||||
task = loop.create_task(coro, name='main')
|
||||
try:
|
||||
loop.run_until_complete(task)
|
||||
except CancelledError:
|
||||
|
||||
@@ -11,8 +11,9 @@ from dexorder.bin.executable import execute
|
||||
from dexorder.blocks import get_block_timestamp, get_block
|
||||
from dexorder.blockstate.fork import current_fork
|
||||
from dexorder.configuration import parse_args
|
||||
from dexorder.contract import get_contract_event
|
||||
from dexorder.event_handler import wire_dexorder_debug
|
||||
from dexorder.final_ohlc import FinalOHLCRepository
|
||||
from dexorder.gmx import gmx_wire_runner_late, gmx_wire_runner_early
|
||||
from dexorder.pools import get_uniswap_data
|
||||
from dexorder.util import hexstr
|
||||
from dexorder.util.shutdown import fatal
|
||||
@@ -56,10 +57,15 @@ async def main():
|
||||
ohlcs = FinalOHLCRepository()
|
||||
await blockchain.connect()
|
||||
walker = BlockWalker(flush_callback, timedelta(seconds=config.walker_flush_interval))
|
||||
walker.add_event_trigger(handle_backfill_uniswap_swaps,
|
||||
get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
# gmx_wire_runner_early(walker, backfill=ohlcs)
|
||||
gmx_wire_runner_early(walker) # todo re-enable backfill
|
||||
wire_dexorder_debug(walker)
|
||||
# todo re-enable uniswap
|
||||
# walker.add_event_trigger(handle_backfill_uniswap_swaps,
|
||||
# get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
gmx_wire_runner_late(walker)
|
||||
await walker.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main())
|
||||
execute(main)
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
from asyncio import CancelledError
|
||||
|
||||
from dexorder import db, blockchain
|
||||
from dexorder.accounting import initialize_accounting
|
||||
from dexorder.accounting import initialize_accounting_runner
|
||||
from dexorder.alert import infoAlert
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.bin.executable import execute
|
||||
@@ -14,7 +14,11 @@ from dexorder.contract import get_contract_event
|
||||
from dexorder.contract.dexorder import get_dexorder_contract
|
||||
from dexorder.event_handler import (init, dump_log, handle_vault_created, handle_order_placed,
|
||||
handle_transfer, handle_swap_filled, handle_order_canceled, handle_order_cancel_all,
|
||||
handle_uniswap_swaps, handle_vault_impl_changed, update_metrics)
|
||||
handle_uniswap_swaps, handle_vault_impl_changed, update_metrics,
|
||||
activate_new_price_triggers)
|
||||
from dexorder.gmx import gmx_wire_runner_early, gmx_wire_runner_late
|
||||
from dexorder.gmx._handle import gmx_wire_runner_init
|
||||
from dexorder.marks import publish_marks
|
||||
from dexorder.memcache import memcache
|
||||
from dexorder.memcache.memcache_state import RedisState, publish_all
|
||||
from dexorder.order.executionhandler import handle_dexorderexecutions, execute_tranches
|
||||
@@ -60,20 +64,23 @@ def setup_logevent_triggers(runner):
|
||||
|
||||
runner.add_callback(check_activate_orders)
|
||||
runner.add_callback(init)
|
||||
gmx_wire_runner_init(runner)
|
||||
|
||||
runner.add_event_trigger(handle_transaction_receipts)
|
||||
runner.add_event_trigger(handle_vault_created, get_contract_event('Vault', 'VaultCreated'))
|
||||
runner.add_event_trigger(handle_vault_impl_changed, get_contract_event('Vault', 'VaultImplChanged'))
|
||||
runner.add_event_trigger(handle_order_placed, get_contract_event('VaultImpl', 'DexorderSwapPlaced'))
|
||||
gmx_wire_runner_early(runner) # must come after DexorderSwapPlaced so the GMXOrder event can add data to the existing order
|
||||
runner.add_event_trigger(handle_transfer, get_contract_event('ERC20', 'Transfer'))
|
||||
runner.add_event_trigger(handle_uniswap_swaps, get_contract_event('IUniswapV3PoolEvents', 'Swap'), multi=True)
|
||||
runner.add_event_trigger(handle_swap_filled, get_contract_event('VaultImpl', 'DexorderSwapFilled'))
|
||||
runner.add_event_trigger(handle_order_canceled, get_contract_event('VaultImpl', 'DexorderSwapCanceled'))
|
||||
runner.add_event_trigger(handle_order_cancel_all, get_contract_event('VaultImpl', 'DexorderCancelAll'))
|
||||
|
||||
gmx_wire_runner_late(runner)
|
||||
runner.add_event_trigger(handle_dexorderexecutions, executions)
|
||||
runner.add_event_trigger(handle_vault_creation_requests)
|
||||
|
||||
runner.add_event_trigger(activate_new_price_triggers)
|
||||
runner.add_callback(end_trigger_updates)
|
||||
runner.add_callback(execute_tranches)
|
||||
|
||||
@@ -83,6 +90,7 @@ def setup_logevent_triggers(runner):
|
||||
# runner.add_callback(adjust_gas)
|
||||
|
||||
runner.add_callback(cleanup_jobs)
|
||||
runner.add_callback(publish_marks)
|
||||
runner.add_callback(update_metrics)
|
||||
|
||||
|
||||
@@ -115,9 +123,9 @@ async def main():
|
||||
if redis_state:
|
||||
# load initial state
|
||||
log.info('initializing redis with root state')
|
||||
await redis_state.save(state.root_fork, state.diffs_by_branch[state.root_branch.id])
|
||||
await redis_state.init(state, state.root_fork)
|
||||
|
||||
await initialize_accounting()
|
||||
await initialize_accounting_runner()
|
||||
|
||||
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None)
|
||||
setup_logevent_triggers(runner)
|
||||
@@ -138,4 +146,4 @@ async def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main())
|
||||
execute(main)
|
||||
|
||||
@@ -95,7 +95,7 @@ async def write_metadata( pools, mirror_pools ):
|
||||
pool_dicts = [get_pool(addr) for (addr,_inverted) in mirror_pools]
|
||||
pool_dicts = await asyncio.gather(*pool_dicts)
|
||||
for data, addr, (_,inverted) in zip(pool_dicts, pools, mirror_pools):
|
||||
data['x'] = dict(data=dict(uri=f'https://app.dexorder.trade/ohlc/', chain=42161, symbol=addr, inverted=inverted))
|
||||
data['x'] = dict(data=dict(uri=f'https://app.dexorder.com/ohlc/', chain=42161, symbol=addr, inverted=inverted))
|
||||
tokens = set(p['base'] for p in pool_dicts)
|
||||
tokens.update(p['quote'] for p in pool_dicts)
|
||||
tokens = await asyncio.gather(*[get_token(t) for t in tokens])
|
||||
@@ -190,6 +190,7 @@ async def main():
|
||||
while True:
|
||||
wake_up = now() + delay
|
||||
# log.debug(f'querying {pool}')
|
||||
tx = None
|
||||
try:
|
||||
price = await get_pool_price(pool)
|
||||
if price != last_prices.get(pool):
|
||||
@@ -200,7 +201,10 @@ async def main():
|
||||
addr, inverted = mirror_pools[pool]
|
||||
log.debug(f'Mirrored {addr} {price}')
|
||||
except Exception as x:
|
||||
log.debug(f'Could not update {pool}: {x}')
|
||||
log.debug(f'Could not update {pool}: {x} {tx}')
|
||||
if tx is not None:
|
||||
tx.account.reset_nonce()
|
||||
tx.account.release()
|
||||
continue
|
||||
try:
|
||||
pool = next(pool_iter)
|
||||
@@ -216,4 +220,4 @@ async def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main())
|
||||
execute(main)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import logging
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, text
|
||||
|
||||
from dexorder import db, blockchain
|
||||
from dexorder.accounting import accounting_reconcile, accounting_lock
|
||||
from dexorder.accounting import accounting_reconcile
|
||||
from dexorder.bin.executable import execute
|
||||
from dexorder.blocks import fetch_latest_block, current_block
|
||||
from dexorder.database.model import DbAccount
|
||||
@@ -15,7 +15,7 @@ async def main():
|
||||
db.connect()
|
||||
block = await fetch_latest_block()
|
||||
current_block.set(block)
|
||||
accounting_lock()
|
||||
db.session.execute(text("LOCK TABLE account, accounting, reconciliation IN EXCLUSIVE MODE"))
|
||||
try:
|
||||
accounts = db.session.execute(select(DbAccount)).scalars().all()
|
||||
for account in accounts:
|
||||
@@ -28,5 +28,4 @@ async def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main())
|
||||
|
||||
execute(main)
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from dexorder import blockchain, db
|
||||
from dexorder import dec
|
||||
from dexorder.bin.executable import execute
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
async def main():
|
||||
await blockchain.connect()
|
||||
db.connect()
|
||||
|
||||
@dataclass
|
||||
class RefillConfig:
|
||||
refill_level: dec
|
||||
refill_accounts: list[str]
|
||||
|
||||
|
||||
async def main(refill_config: RefillConfig):
|
||||
# await blockchain.connect()
|
||||
# db.connect()
|
||||
log.info(f'Refilling to {refill_config.refill_level:.18f} ETH')
|
||||
log.info(f'Refilling accounts: {refill_config.refill_accounts}')
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute(main())
|
||||
execute(main, parse_args=RefillConfig)
|
||||
|
||||
@@ -22,6 +22,11 @@ from dexorder.util import hexbytes
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def blocktime():
|
||||
""" timestamp of the most recent block seen in real-time, NOT the current block being worked on """
|
||||
return latest_block[current_chain.get().id].timestamp
|
||||
|
||||
|
||||
async def get_block_timestamp(block_id: Union[bytes,int]) -> int:
|
||||
block = await get_block(block_id)
|
||||
if block is None:
|
||||
|
||||
@@ -52,7 +52,7 @@ class BlockData (Generic[T]):
|
||||
def setitem(self, item, value: T, overwrite=True):
|
||||
state = current_blockstate.get()
|
||||
fork = current_fork.get()
|
||||
state.set(fork, self.series, item, value, overwrite)
|
||||
return state.set(fork, self.series, item, value, overwrite)
|
||||
|
||||
def getitem(self, item, default=NARG) -> T:
|
||||
state = current_blockstate.get()
|
||||
@@ -63,9 +63,11 @@ class BlockData (Generic[T]):
|
||||
result = default
|
||||
if self.lazy_getitem:
|
||||
lazy = self.lazy_getitem(self, item)
|
||||
if lazy is not NARG:
|
||||
state.set(state.root_fork, self.series, item, lazy)
|
||||
if lazy is not NARG and lazy is not DELETE:
|
||||
state.set(state.root_fork, self.series, item, lazy, readonly_override=True)
|
||||
result = lazy
|
||||
if result is DELETE:
|
||||
result = default
|
||||
if result is NARG:
|
||||
raise KeyError
|
||||
return result
|
||||
@@ -142,7 +144,7 @@ class BlockSet(Generic[T], Iterable[T], BlockData[T]):
|
||||
return self.contains(item)
|
||||
|
||||
def __iter__(self) -> Iterator[T]:
|
||||
yield from (k for k,v in self.iter_items(self.series))
|
||||
return self.iter_keys(self.series)
|
||||
|
||||
|
||||
class BlockDict(Generic[K,V], BlockData[V]):
|
||||
@@ -162,6 +164,9 @@ class BlockDict(Generic[K,V], BlockData[V]):
|
||||
def __contains__(self, item: K) -> bool:
|
||||
return self.contains(item)
|
||||
|
||||
def __iter__(self) -> Iterator[K]:
|
||||
return self.iter_keys(self.series)
|
||||
|
||||
def items(self) -> Iterable[tuple[K,V]]:
|
||||
return self.iter_items(self.series)
|
||||
|
||||
|
||||
@@ -53,7 +53,10 @@ class BlockState:
|
||||
with a diff height of the root branch or older is always part of the finalized blockchain.
|
||||
"""
|
||||
|
||||
class ReadOnlyError(Exception): ...
|
||||
|
||||
def __init__(self):
|
||||
self.readonly = False
|
||||
self._root_branch: Optional[Branch] = None
|
||||
self._root_fork: Optional[Fork] = None
|
||||
self.height: int = 0 # highest branch seen
|
||||
@@ -80,6 +83,8 @@ class BlockState:
|
||||
|
||||
@root_branch.setter
|
||||
def root_branch(self, value: Branch):
|
||||
if self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
self._root_branch = value
|
||||
self._root_fork = Fork([value])
|
||||
|
||||
@@ -92,6 +97,8 @@ class BlockState:
|
||||
return self._root_branch.head
|
||||
|
||||
def init_root_block(self, root_block: Block) -> Fork:
|
||||
if self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
assert self.root_branch is None
|
||||
return self.add_branch(Branch.from_block(root_block))
|
||||
|
||||
@@ -113,6 +120,8 @@ class BlockState:
|
||||
should only be set to False when it is assured that the branch may be joined by height alone, because
|
||||
the branch join is known to be at a live-blockchain-finalized height.
|
||||
"""
|
||||
if self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
assert branch.id not in self.branches_by_id
|
||||
|
||||
if self.root_branch is None:
|
||||
@@ -155,6 +164,8 @@ class BlockState:
|
||||
|
||||
|
||||
def remove_branch(self, branch: Branch, *, remove_series_diffs=True):
|
||||
if self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
if branch.height == self.height and len(self.branches_by_height[branch.height]) == 1:
|
||||
# this is the only branch at this height: compute the new lower height
|
||||
other_heights = [b.height for b in self.branches_by_id.values() if b is not branch]
|
||||
@@ -210,7 +221,9 @@ class BlockState:
|
||||
return DELETE
|
||||
|
||||
|
||||
def set(self, fork: Fork, series, key, value, overwrite=True):
|
||||
def set(self, fork: Fork, series, key, value, overwrite=True, *, readonly_override=False):
|
||||
if not readonly_override and self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
# first look for an existing value
|
||||
branch = fork.branch
|
||||
diffs = self.diffs_by_series.get(series,{}).get(key)
|
||||
@@ -219,8 +232,9 @@ class BlockState:
|
||||
for diff in diffs:
|
||||
if diff.branch_id == branch.id:
|
||||
# if there's an existing value for this branch, we replace it
|
||||
old_value = diff.value
|
||||
diff.value = value
|
||||
return
|
||||
return old_value
|
||||
elif self._fork_has_diff(fork, diff):
|
||||
# if there's an existing value on this fork, remember it
|
||||
old_value = diff.value
|
||||
@@ -236,6 +250,8 @@ class BlockState:
|
||||
return old_value
|
||||
|
||||
def unload(self, fork: Optional[Fork], series, key):
|
||||
if self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
self.unloads[fork.branch_id].append((series, key))
|
||||
|
||||
def iteritems(self, fork: Optional[Fork], series):
|
||||
@@ -285,6 +301,8 @@ class BlockState:
|
||||
|
||||
Returns the set of diffs for the promoted fork.
|
||||
"""
|
||||
if self.readonly:
|
||||
raise self.ReadOnlyError()
|
||||
found_root = False
|
||||
promotion_branches = []
|
||||
for branch in reversed(fork.branches):
|
||||
@@ -350,6 +368,7 @@ class FinalizedBlockState:
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.readonly = False
|
||||
self.data = {}
|
||||
self.by_hash = {}
|
||||
|
||||
@@ -361,6 +380,8 @@ class FinalizedBlockState:
|
||||
|
||||
def set(self, _fork: Optional[Fork], series, key, value, overwrite=True):
|
||||
assert overwrite
|
||||
if self.readonly:
|
||||
raise BlockState.ReadOnlyError()
|
||||
self.data.setdefault(series, {})[key] = value
|
||||
|
||||
def iteritems(self, _fork: Optional[Fork], series):
|
||||
@@ -373,6 +394,8 @@ class FinalizedBlockState:
|
||||
return self.data.get(series,{}).values()
|
||||
|
||||
def delete_series(self, _fork: Optional[Fork], series: str):
|
||||
if self.readonly:
|
||||
raise BlockState.ReadOnlyError()
|
||||
del self.data[series]
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ from omegaconf.errors import OmegaConfBaseException
|
||||
|
||||
from .schema import Config
|
||||
|
||||
schema = OmegaConf.structured(Config())
|
||||
schema = OmegaConf.structured(Config(), flags={'struct': False})
|
||||
|
||||
_config_file = 'dexorder.toml'
|
||||
config_file = 'dexorder.toml'
|
||||
|
||||
class ConfigException (Exception):
|
||||
pass
|
||||
@@ -21,7 +21,7 @@ def load_config():
|
||||
result:ConfigDict = OmegaConf.merge(
|
||||
schema,
|
||||
from_toml('.secret.toml'),
|
||||
from_toml(_config_file),
|
||||
from_toml(config_file),
|
||||
from_toml('config.toml'),
|
||||
from_env()
|
||||
)
|
||||
@@ -73,7 +73,7 @@ if len(sys.argv) > 1 and (sys.argv[1] == '-c' or sys.argv[1] == '--config'):
|
||||
if len(sys.argv) < 3:
|
||||
raise ConfigException('Missing config file argument')
|
||||
else:
|
||||
_config_file = sys.argv[2]
|
||||
config_file = sys.argv[2]
|
||||
sys.argv = [sys.argv[0], *sys.argv[3:]]
|
||||
|
||||
config = load_config()
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
from .load import config
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Optional
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
contract_version: Optional[str] = None # version tag of the contract deployment to use. if None then
|
||||
confirms: Optional[int] = None # number of blocks before data is considered finalized. if None then the chain's default setting is used
|
||||
batch_size: Optional[int] = None # max number of blocks to query in a single backfill rpc request
|
||||
rpc_url: str = 'http://localhost:8545' # may be a comma-separated list. may include names of entries in rpc_urls.
|
||||
@@ -16,6 +17,7 @@ class Config:
|
||||
ws_url: Optional[str] = 'ws://localhost:8545'
|
||||
rpc_urls: Optional[dict[str,str]] = field(default_factory=dict)
|
||||
db_url: Optional[str] = 'postgresql://dexorder:redroxed@localhost/dexorder'
|
||||
db_readonly: bool = False
|
||||
dump_sql: bool = False
|
||||
redis_url: Optional[str] = 'redis://localhost:6379'
|
||||
|
||||
@@ -41,12 +43,17 @@ class Config:
|
||||
fee_leeway = 0.1 # do not adjust fees if they are within this proportion
|
||||
min_gas: str = '0'
|
||||
|
||||
mark_publish_seconds: float = 60 # publish mark prices every this number of seconds
|
||||
|
||||
# Order slashing
|
||||
slash_kill_count: int = 5
|
||||
slash_delay_base: float = 60 # one minute
|
||||
slash_delay_mul: float = 2 # double the delay each time
|
||||
slash_delay_max: int = 15 * 60
|
||||
|
||||
# Tranches are paused for this long after they trigger a slippage control
|
||||
slippage_control_delay: float = 10 # matches the 10-second TWAP used by our uniswap router
|
||||
|
||||
walker_name: str = 'default'
|
||||
walker_flush_interval: float = 300
|
||||
walker_stop: Optional[int] = None # block number of the last block the walker should process
|
||||
@@ -60,6 +67,3 @@ class Config:
|
||||
stablecoins: list[str] = field(default_factory=list) # primary stablecoins which are marked to $1
|
||||
quotecoins: list[str] = field(default_factory=list) # quote tokens like WETH that have stablecoin markets
|
||||
nativecoin: Optional[str] = None # used for accounting of native values. e.g. address of WETH
|
||||
|
||||
# account: target_balance
|
||||
refill: dict[str,str] = field(default_factory=dict)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from eth_abi.exceptions import InsufficientDataBytes
|
||||
@@ -9,7 +10,7 @@ from web3.exceptions import BadFunctionCallOutput, ContractLogicError
|
||||
|
||||
from .abi import abis
|
||||
from .contract_proxy import ContractProxy
|
||||
from .. import current_w3
|
||||
from .. import current_w3, config
|
||||
from ..base.chain import current_chain
|
||||
|
||||
CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOutput)
|
||||
@@ -18,10 +19,28 @@ CONTRACT_ERRORS = (InsufficientDataBytes, ContractLogicError, BadFunctionCallOut
|
||||
# set initially to the string filename, then loaded on demand and set to the parsed JSON result
|
||||
_contract_data: dict[str,Union[str,dict]] = {}
|
||||
|
||||
# finds all .sol files and sets _contract_data with their pathname
|
||||
for _file in glob.glob('../contract/out/**/*.sol/*.json', recursive=True):
|
||||
if os.path.isfile(_file):
|
||||
_contract_data[os.path.basename(_file)[:-5]] = _file
|
||||
initialized = False
|
||||
_contract_path = ''
|
||||
|
||||
def get_contract_path():
|
||||
init_contract_data()
|
||||
return _contract_path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def init_contract_data():
|
||||
global initialized, _contract_path
|
||||
if initialized:
|
||||
return
|
||||
subpath = '' if config.contract_version is None else f'/deployment/{config.contract_version}'
|
||||
_contract_path = f'../contract{subpath}'
|
||||
|
||||
# finds all .json files in the out path and sets _contract_data with their pathname
|
||||
for _file in glob.glob(f'{_contract_path}/out/**/*.sol/*.json', recursive=True):
|
||||
if os.path.isfile(_file):
|
||||
_contract_data[os.path.basename(_file)[:-5]] = _file
|
||||
initialized = True
|
||||
log.info(f'Configured contracts from {_contract_path}')
|
||||
|
||||
|
||||
def get_abi(name):
|
||||
@@ -29,6 +48,7 @@ def get_abi(name):
|
||||
|
||||
|
||||
def get_contract_data(name):
|
||||
init_contract_data()
|
||||
try:
|
||||
return {'abi':abis[name]}
|
||||
except KeyError:
|
||||
@@ -43,9 +63,10 @@ def get_contract_data(name):
|
||||
|
||||
|
||||
def get_deployment_address(deployment_name, contract_name, *, chain_id=None):
|
||||
init_contract_data()
|
||||
if chain_id is None:
|
||||
chain_id = current_chain.get().id
|
||||
with open(f'../contract/broadcast/{deployment_name}.sol/{chain_id}/run-latest.json', 'rt') as file:
|
||||
with open(f'{_contract_path}/broadcast/{deployment_name}.sol/{chain_id}/run-latest.json', 'rt') as file:
|
||||
data = json.load(file)
|
||||
for tx in data.get('transactions',[]):
|
||||
if tx.get('contractName') == contract_name:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
abis = {
|
||||
# ERC20 where symbol() returns a bytes32 instead of a string
|
||||
# Special ERC20 definition where symbol() returns a bytes32 instead of a string
|
||||
'ERC20.sb': '''[{"type":"function","name":"symbol","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"name","inputs":[],"outputs":[{"name":"","type":"bytes32","internalType":"bytes32"}],"stateMutability":"view"}]'''
|
||||
# 'WMATIC': '''[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]''',
|
||||
}
|
||||
|
||||
@@ -60,14 +60,14 @@ class DeployTransaction (ContractTransaction):
|
||||
|
||||
|
||||
def call_wrapper(addr, name, func):
|
||||
async def f(*args, block_identifier=None, **kwargs):
|
||||
async def f(*args, block_identifier=None, kwargs=None):
|
||||
if block_identifier is None:
|
||||
try:
|
||||
block_identifier = current_block.get().height
|
||||
except (LookupError, AttributeError):
|
||||
block_identifier = 'latest'
|
||||
try:
|
||||
return await func(*args).call(block_identifier=block_identifier, **kwargs)
|
||||
return await func(*args).call(block_identifier=block_identifier, **(kwargs or {}))
|
||||
except Web3Exception as e:
|
||||
e.args += addr, name
|
||||
raise e
|
||||
@@ -75,26 +75,29 @@ def call_wrapper(addr, name, func):
|
||||
|
||||
|
||||
def transact_wrapper(addr, name, func):
|
||||
async def f(*args, **kwargs):
|
||||
tx = await func(*args).build_transaction(kwargs)
|
||||
async def f(*args, kwargs=None):
|
||||
tx = await func(*args).build_transaction(kwargs or {})
|
||||
ct = ContractTransaction(tx)
|
||||
account = await Account.acquire()
|
||||
if account is None:
|
||||
raise ValueError(f'No account to sign transaction {addr}.{name}()')
|
||||
await ct.sign(account)
|
||||
try:
|
||||
tx_id = await current_w3.get().eth.send_raw_transaction(ct.data)
|
||||
assert tx_id == ct.id_bytes
|
||||
return ct
|
||||
except Web3Exception as e:
|
||||
e.args += addr, name
|
||||
raise e
|
||||
await ct.sign(account)
|
||||
try:
|
||||
tx_id = await current_w3.get().eth.send_raw_transaction(ct.data)
|
||||
assert tx_id == ct.id_bytes
|
||||
return ct
|
||||
except Web3Exception as e:
|
||||
e.args += addr, name
|
||||
raise e
|
||||
finally:
|
||||
account.release()
|
||||
return f
|
||||
|
||||
|
||||
def build_wrapper(_addr, _name, func):
|
||||
async def f(*args, **kwargs):
|
||||
tx = await func(*args).build_transaction(kwargs)
|
||||
async def f(*args, kwargs=None):
|
||||
tx = await func(*args).build_transaction(kwargs or {})
|
||||
return ContractTransaction(tx)
|
||||
return f
|
||||
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import logging
|
||||
|
||||
from dexorder import db
|
||||
from dexorder.contract import ERC20, CONTRACT_ERRORS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def token_decimals(addr):
|
||||
key = f'td|{addr}'
|
||||
try:
|
||||
return db.kv[key]
|
||||
except KeyError:
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
decimals = await ERC20(addr).decimals()
|
||||
except CONTRACT_ERRORS:
|
||||
log.warning(f'token {addr} has no decimals()')
|
||||
decimals = 0
|
||||
except Exception:
|
||||
log.debug(f'could not get token decimals for {addr}')
|
||||
return None
|
||||
db.kv[key] = decimals
|
||||
return decimals
|
||||
@@ -6,26 +6,37 @@ from eth_utils import keccak, to_bytes, to_checksum_address
|
||||
from typing_extensions import Optional
|
||||
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.contract import ContractProxy, get_contract_path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
version = None
|
||||
chain_info = None
|
||||
|
||||
_factory = {}
|
||||
_dexorder = {}
|
||||
_vault_init_code_hash = {}
|
||||
_initialized = False
|
||||
|
||||
def _ensure_init():
|
||||
global version, chain_info
|
||||
with open(f'{get_contract_path()}/version.json') as version_file:
|
||||
version = json.load(version_file)
|
||||
log.info(f'Version: {version}')
|
||||
chain_info = version['chainInfo']
|
||||
for _chain_id, info in chain_info.items():
|
||||
_chain_id = int(_chain_id)
|
||||
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
|
||||
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'DexorderGMX')
|
||||
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
|
||||
|
||||
|
||||
with open('../contract/version.json') as version_file:
|
||||
version = json.load(version_file)
|
||||
log.info(f'Version: {version}')
|
||||
|
||||
chain_info = version['chainInfo']
|
||||
|
||||
for _chain_id, info in chain_info.items():
|
||||
_chain_id = int(_chain_id)
|
||||
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
|
||||
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'Dexorder')
|
||||
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
|
||||
def __getattr__(name):
|
||||
global _initialized
|
||||
if not _initialized:
|
||||
_ensure_init()
|
||||
_initialized = True
|
||||
raise AttributeError()
|
||||
|
||||
def get_by_chain(d):
|
||||
return d[current_chain.get().id]
|
||||
@@ -40,11 +51,12 @@ def get_vault_init_code_hash() -> bytes:
|
||||
return get_by_chain(_vault_init_code_hash)
|
||||
|
||||
def get_mockenv() -> Optional[ContractProxy]:
|
||||
addr = chain_info.get(str(current_chain.get().id),{}).get('mockenv')
|
||||
addr = globals()['chain_info'].get(str(current_chain.get().id), {}).get('mockenv')
|
||||
return ContractProxy(addr, 'MockEnv') if addr is not None else None
|
||||
|
||||
|
||||
def get_mirrorenv() -> Optional[ContractProxy]:
|
||||
addr = chain_info.get(str(current_chain.get().id),{}).get('mirrorenv')
|
||||
addr = globals()['chain_info'].get(str(current_chain.get().id), {}).get('mirrorenv')
|
||||
return ContractProxy(addr, 'MirrorEnv') if addr is not None else None
|
||||
|
||||
def vault_address(owner, num):
|
||||
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
from contextvars import ContextVar
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import Engine
|
||||
from sqlalchemy import Engine, event
|
||||
from sqlalchemy.orm import Session, SessionTransaction
|
||||
|
||||
from .migrate import migrate_database
|
||||
@@ -99,7 +99,7 @@ class Db:
|
||||
_session.set(None)
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
def connect(self, url=None, migrate=True, reconnect=False, dump_sql=None):
|
||||
def connect(self, url=None, migrate=True, reconnect=False, dump_sql=None, readonly:bool=None):
|
||||
if _engine.get() is not None and not reconnect:
|
||||
return None
|
||||
if url is None:
|
||||
@@ -114,6 +114,19 @@ class Db:
|
||||
if dump_sql is None:
|
||||
dump_sql = config.dump_sql
|
||||
engine = sqlalchemy.create_engine(url, echo=dump_sql, json_serializer=json.dumps, json_deserializer=json.loads)
|
||||
|
||||
if readonly is None:
|
||||
readonly = config.db_readonly
|
||||
if readonly:
|
||||
@event.listens_for(engine, "connect")
|
||||
def set_readonly(dbapi_connection, _connection_record):
|
||||
cursor = dbapi_connection.cursor()
|
||||
try:
|
||||
cursor.execute("SET default_transaction_read_only = on;")
|
||||
log.info('database connection set to READ ONLY')
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
if migrate:
|
||||
migrate_database(url)
|
||||
with engine.connect() as connection:
|
||||
|
||||
@@ -10,3 +10,4 @@ from .ofac import OFAC, OFACAlerts
|
||||
from .accounting import Accounting, DbAccount
|
||||
from .vaultcreationrequest import VaultCreationRequest
|
||||
from .tos import TOSAcceptance
|
||||
from .sharedata import ShareData
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from decimal import Decimal as dec
|
||||
from enum import Enum
|
||||
from enum import Enum, auto
|
||||
|
||||
from sqlalchemy import ForeignKeyConstraint
|
||||
from sqlalchemy.ext.mutable import MutableDict
|
||||
@@ -17,35 +17,37 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AccountingCategory (Enum):
|
||||
Transfer = 0
|
||||
Income = 1
|
||||
Expense = 2
|
||||
Trade = 3
|
||||
Special = 4
|
||||
Transfer = auto()
|
||||
Income = auto()
|
||||
Expense = auto()
|
||||
Trade = auto()
|
||||
Special = auto()
|
||||
|
||||
class AccountingSubcategory (Enum):
|
||||
# Income
|
||||
OrderFee = 0
|
||||
GasFee = 1
|
||||
FillFee = 2
|
||||
OrderFee = auto()
|
||||
GasFee = auto()
|
||||
FillFee = auto()
|
||||
|
||||
# Expense
|
||||
VaultCreation = 3
|
||||
Execution = 4
|
||||
FeeAdjustment = 5 # includes adjusting fee limits
|
||||
Admin = auto() # contract deployments and upgrades, changing adjuster address, etc.
|
||||
TransactionGas = auto()
|
||||
VaultCreation = auto()
|
||||
Execution = auto()
|
||||
FeeAdjustment = auto() # includes adjusting fee limits
|
||||
|
||||
# Transfer
|
||||
# Transfers have no subcategories, but the note field will be the address of the other account. Both a debit and a
|
||||
# credit entry will be created, one for each account participating in the transfer.
|
||||
|
||||
# Special Codes
|
||||
InitialBalance = 5
|
||||
InitialBalance = auto()
|
||||
|
||||
|
||||
class Accounting (Base):
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||
time: Mapped[datetime] = mapped_column(default=now(), index=True)
|
||||
chain_id: Mapped[int] = mapped_column(index=True)
|
||||
chain_id: Mapped[int] = mapped_column(index=True) # chain_id
|
||||
account: Mapped[str] = mapped_column(index=True)
|
||||
category: Mapped[AccountingCategory] = mapped_column(index=True)
|
||||
subcategory: Mapped[Optional[AccountingSubcategory]] = mapped_column(index=True)
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import TypedDict, Optional
|
||||
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from dexorder.base import OldPoolDict
|
||||
from dexorder.base.orderlib import Exchange
|
||||
from dexorder.database.column import Address, Blockchain
|
||||
from dexorder.database.model import Base
|
||||
@@ -20,17 +21,6 @@ class PoolDict (TypedDict):
|
||||
x: Optional[dict]
|
||||
|
||||
|
||||
class OldPoolDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
exchange: int
|
||||
base: str
|
||||
quote: str
|
||||
fee: int
|
||||
decimals: int
|
||||
|
||||
|
||||
class Pool (Base):
|
||||
__tablename__ = 'pool'
|
||||
|
||||
|
||||
12
src/dexorder/database/model/sharedata.py
Normal file
12
src/dexorder/database/model/sharedata.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import logging
|
||||
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from dexorder.database.model import Base
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class ShareData (Base):
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||
data: Mapped[dict] = mapped_column(JSONB)
|
||||
@@ -1,37 +1,15 @@
|
||||
import logging
|
||||
from typing import TypedDict, Optional, NotRequired
|
||||
|
||||
from sqlalchemy import Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from dexorder.base import OldTokenDict
|
||||
from dexorder.database.column import Address, Blockchain, Uint8
|
||||
from dexorder.database.model import Base
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenDict (TypedDict):
|
||||
a: str
|
||||
n: str
|
||||
s: str
|
||||
d: int
|
||||
w: Optional[bool] # approved ("w"hitelisted)
|
||||
x: NotRequired[dict] # extra data
|
||||
|
||||
|
||||
# OldTokenDict is the primary dict we use in-memory, with basic JSON-able types
|
||||
|
||||
class OldTokenDict (TypedDict):
|
||||
type: str
|
||||
chain: int
|
||||
address: str
|
||||
name: str
|
||||
symbol: str
|
||||
decimals: int
|
||||
approved: bool # whether this token is in the whitelist or not
|
||||
x: NotRequired[dict] # extra data
|
||||
|
||||
|
||||
# the database object is primarily write-only so we are able to index queries for pools-by-token from the nodejs server
|
||||
|
||||
class Token (Base):
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from eth_utils import keccak
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import db, metric, current_w3, timestamp
|
||||
from dexorder.accounting import accounting_fill, accounting_placement, accounting_transfer, is_tracked_address, \
|
||||
accounting_lock
|
||||
from dexorder.accounting import accounting_fill, accounting_placement
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.order import TrancheKey, OrderKey
|
||||
from dexorder.base.orderlib import SwapOrderState
|
||||
from dexorder.base.orderlib import SwapOrderState, Exchange, GMXOrder
|
||||
from dexorder.blocks import get_block_timestamp
|
||||
from dexorder.blockstate import current_blockstate
|
||||
from dexorder.contract.dexorder import VaultContract, get_factory_contract
|
||||
@@ -18,7 +18,8 @@ from dexorder.ohlc import ohlcs
|
||||
from dexorder.order.orderstate import Order
|
||||
from dexorder.order.triggers import (OrderTriggers, activate_order, update_balance_triggers, start_trigger_updates,
|
||||
update_price_triggers, TimeTrigger, PriceLineTrigger)
|
||||
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data
|
||||
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data, get_pool
|
||||
from dexorder.progressor import BlockProgressor
|
||||
from dexorder.util import hexstr
|
||||
from dexorder.vault_blockdata import vault_owners, adjust_balance, verify_vault, publish_vaults
|
||||
|
||||
@@ -32,7 +33,14 @@ def dump_log(eventlog):
|
||||
def init():
|
||||
new_pool_prices.clear()
|
||||
start_trigger_updates()
|
||||
accounting_lock()
|
||||
|
||||
|
||||
def wire_dexorder_debug(runner: BlockProgressor):
|
||||
runner.add_event_trigger(handle_dexorderdebug, None, {"topics":[keccak(text='DexorderDebug(string)')]})
|
||||
|
||||
def handle_dexorderdebug(events: list):
|
||||
for event in events:
|
||||
print(f'DexorderDebug {event}')
|
||||
|
||||
|
||||
async def handle_order_placed(event: EventData):
|
||||
@@ -59,8 +67,11 @@ async def handle_order_placed(event: EventData):
|
||||
obj = await contract.swapOrderStatus(index)
|
||||
log.debug(f'raw order status {obj}')
|
||||
order = Order.create(addr, index, event['transactionHash'], obj)
|
||||
if order.order.route.exchange == Exchange.GMX:
|
||||
gmxStatus = await contract.gmxOrderStatus(index)
|
||||
order.order.gmx = GMXOrder.load(gmxStatus[0])
|
||||
await activate_order(order)
|
||||
log.debug(f'new order {order.key}{order}')
|
||||
log.debug(f'new order {order.key} {await order.pprint()}')
|
||||
|
||||
|
||||
async def handle_swap_filled(event: EventData):
|
||||
@@ -83,9 +94,10 @@ async def handle_swap_filled(event: EventData):
|
||||
except KeyError:
|
||||
log.warning(f'DexorderSwapFilled IGNORED due to missing order {vault} {order_index}')
|
||||
return
|
||||
value = await accounting_fill(event, order.order.tokenOut)
|
||||
if value is not None:
|
||||
metric.volume.inc(float(value))
|
||||
usd_value = await accounting_fill(event, order.order.tokenOut)
|
||||
# from here down is almost the same as a section of handle_gmxorderexecuted()
|
||||
if usd_value is not None:
|
||||
metric.volume.inc(float(usd_value))
|
||||
order.status.trancheStatus[tranche_index].activationTime = next_execution_time # update rate limit
|
||||
try:
|
||||
triggers = OrderTriggers.instances[order.key]
|
||||
@@ -139,10 +151,11 @@ async def handle_transfer(transfer: EventData):
|
||||
vault = None
|
||||
if vault is not None:
|
||||
await adjust_balance(vault, token_address, amount)
|
||||
await update_balance_triggers(vault, token_address, amount)
|
||||
if is_tracked_address(to_address):
|
||||
# noinspection PyTypeChecker
|
||||
await accounting_transfer(transfer, token_address, from_address, to_address, amount, adjust_decimals=True)
|
||||
await update_balance_triggers(vault, token_address)
|
||||
# This wuold double-count fill fees. Instead, we book the transfer when sending money to the account as part of a refill.
|
||||
# if is_tracked_address(to_address):
|
||||
# # noinspection PyTypeChecker
|
||||
# await accounting_transfer(transfer, token_address, from_address, to_address, amount, adjust_decimals=True)
|
||||
|
||||
async def handle_uniswap_swaps(swaps: list[EventData]):
|
||||
# asynchronously prefetch the block timestamps we'll need
|
||||
@@ -159,12 +172,26 @@ async def handle_uniswap_swap(swap: EventData):
|
||||
return
|
||||
pool, time, price = data
|
||||
addr = pool['address']
|
||||
pool_prices[addr] = price
|
||||
await ohlcs.update_all(addr, time, price)
|
||||
await update_price_triggers(pool, price)
|
||||
await update_pool_price(addr, time, price, pool['decimals'])
|
||||
# log.debug(f'pool {addr} {minutely(time)} {price}')
|
||||
|
||||
|
||||
async def update_pool_price(addr, time, price, decimals):
|
||||
"""
|
||||
Price should be an adjusted price with decimals, not the raw price from the pool. The decimals are used to
|
||||
convert the price back to blockchain format for the triggers.
|
||||
"""
|
||||
pool_prices[addr] = price # this will update new_pool_prices if necessary
|
||||
await ohlcs.update_all(addr, time, price)
|
||||
update_price_triggers(addr, price, decimals)
|
||||
|
||||
|
||||
async def activate_new_price_triggers():
|
||||
for addr, price in new_pool_prices.items():
|
||||
pool = await get_pool(addr)
|
||||
update_price_triggers(addr, price, pool['decimals'])
|
||||
|
||||
|
||||
async def handle_vault_created(created: EventData):
|
||||
try:
|
||||
owner = created['args']['owner']
|
||||
@@ -221,7 +248,7 @@ async def update_metrics():
|
||||
metric.vaults.set(vault_owners.upper_len())
|
||||
metric.open_orders.set(Order.open_orders.upper_len())
|
||||
metric.triggers_time.set(len(TimeTrigger.all))
|
||||
metric.triggers_line.set(len(PriceLineTrigger.triggers_set))
|
||||
metric.triggers_line.set(sum(len(s) for s in PriceLineTrigger.by_pool.values()))
|
||||
|
||||
# slow updates
|
||||
global slow_metric_update
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncio
|
||||
import logging
|
||||
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.contract.dexorder import get_factory_contract, get_fee_manager_contract
|
||||
from dexorder.contract.dexorder import get_fee_manager_contract
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -242,6 +242,10 @@ class OHLCFileSeries:
|
||||
self.dirty_files = set()
|
||||
self.quote: Optional[tuple[datetime,dec]] = None
|
||||
|
||||
@property
|
||||
def exists(self) -> bool:
|
||||
return self.quote_file is not None or os.path.exists(self.quote_filename)
|
||||
|
||||
|
||||
@property
|
||||
def quote_filename(self):
|
||||
@@ -276,6 +280,16 @@ class OHLCFileSeries:
|
||||
self.dirty_files.add(file)
|
||||
|
||||
|
||||
# noinspection PyShadowingBuiltins
|
||||
def update_ohlc(self, period: timedelta, time: datetime, open: dec, high: dec, low: dec, close: dec):
|
||||
file = OHLCFile.get(self.base_dir, OHLCFilePath(self.symbol, period, time))
|
||||
file.update(time, open)
|
||||
file.update(time, high)
|
||||
file.update(time, low)
|
||||
file.update(time, close)
|
||||
self.dirty_files.add(file)
|
||||
|
||||
|
||||
def _load(self, time):
|
||||
#
|
||||
# load quote file
|
||||
@@ -359,14 +373,25 @@ class FinalOHLCRepository:
|
||||
"""
|
||||
def __init__(self):
|
||||
assert config.ohlc_dir
|
||||
self.dirty_series = set()
|
||||
self.dirty_series: set[OHLCFileSeries] = set()
|
||||
|
||||
def update(self, symbol: str, time: datetime, price: Optional[dec]):
|
||||
series = self.get_series(symbol)
|
||||
series.update(time, price)
|
||||
self.dirty_series.add(series)
|
||||
|
||||
# noinspection PyShadowingBuiltins
|
||||
def update_ohlc(self, symbol: str, period: timedelta, time: datetime, open: dec, high: dec, low: dec, close: dec):
|
||||
series = self.get_series(symbol)
|
||||
series.update_ohlc(period, time, open, high, low, close)
|
||||
self.dirty_series.add(series)
|
||||
|
||||
@staticmethod
|
||||
def get_series(symbol):
|
||||
chain_id = current_chain.get().id
|
||||
base_dir = os.path.join(config.ohlc_dir, str(chain_id))
|
||||
series = OHLCFileSeries.get(base_dir, symbol)
|
||||
series.update(time, price)
|
||||
self.dirty_series.add(series)
|
||||
return series
|
||||
|
||||
def flush(self) -> None:
|
||||
for series in self.dirty_series:
|
||||
@@ -378,3 +403,6 @@ class FinalOHLCRepository:
|
||||
closing.file.close()
|
||||
# noinspection PyProtectedMember
|
||||
OHLCFile._closing.clear()
|
||||
|
||||
def has_symbol(self, symbol: str):
|
||||
return self.get_series(symbol).exists
|
||||
|
||||
5
src/dexorder/gmx/__init__.py
Normal file
5
src/dexorder/gmx/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from ._base import gmx_prices, gmx_tk_in_flight, tk_gmx_in_flight
|
||||
from ._chaininfo import gmx_chain_info
|
||||
from ._handle import gmx_wire_runner_early, gmx_wire_runner_late
|
||||
from ._metadata import *
|
||||
|
||||
51
src/dexorder/gmx/_abi.py
Normal file
51
src/dexorder/gmx/_abi.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
from eth_utils import keccak
|
||||
|
||||
from dexorder.util import hexbytes, hexstr
|
||||
from dexorder.util.abiencode import abi_decoder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def no_ws(s):
|
||||
return re.sub(r"\s+", "", s)
|
||||
|
||||
EventLogDataType = '''
|
||||
(((string,address)[],(string,address[])[]),
|
||||
((string,uint256)[],(string,uint256[])[]),
|
||||
((string,int256)[], (string,int256[])[] ),
|
||||
((string,bool)[], (string,bool[])[] ),
|
||||
((string,bytes32)[],(string,bytes32[])[]),
|
||||
((string,bytes)[], (string,bytes[])[] ),
|
||||
((string,string)[], (string,string[])[] )
|
||||
)'''
|
||||
|
||||
EventLogType = f'EventLog( address, string, string, {EventLogDataType} )'
|
||||
EventLog1Type = f'EventLog1( address, string, string, bytes32, {EventLogDataType} )'
|
||||
EventLog2Type = f'EventLog2( address, string, string, bytes32, bytes32, {EventLogDataType} )'
|
||||
|
||||
EventLogTopic = hexstr(keccak(text=no_ws(EventLogType)))
|
||||
EventLog1Topic = hexstr(keccak(text=no_ws(EventLog1Type)).hex())
|
||||
EventLog2Topic = hexstr(keccak(text=no_ws(EventLog2Type)).hex())
|
||||
|
||||
|
||||
def topic_hash(signature):
|
||||
return hexstr(keccak(text=no_ws(signature)))
|
||||
|
||||
|
||||
def parse_event_log_data(event_log):
|
||||
event_log_data = event_log['data']
|
||||
if type(event_log_data) is str:
|
||||
event_log_data = hexbytes(event_log_data)
|
||||
sender, event_name, event_log_data = abi_decoder.decode(('address', 'string', no_ws(EventLogDataType),), event_log_data)
|
||||
|
||||
result = {'sender': sender, 'event': event_name, 'tx': hexstr(event_log['transactionHash'])}
|
||||
for items, array_items in event_log_data:
|
||||
for k, v in items:
|
||||
result[k] = v
|
||||
for k, v in array_items:
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
97
src/dexorder/gmx/_base.py
Normal file
97
src/dexorder/gmx/_base.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import NamedTuple
|
||||
|
||||
import requests
|
||||
from eth_utils import to_checksum_address
|
||||
|
||||
from ._chaininfo import GMX_API_BASE_URLS
|
||||
from .. import dec
|
||||
from ..base.chain import current_chain
|
||||
from ..base.order import TrancheKey
|
||||
from ..blockstate import BlockDict
|
||||
from ..util import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GMXPosition:
|
||||
# compound key fields
|
||||
market_token: str
|
||||
collateral_token: str
|
||||
is_long: bool
|
||||
|
||||
# non-key attrs
|
||||
size: dec = dec(0)
|
||||
|
||||
class Key (NamedTuple):
|
||||
market_token: str
|
||||
collateral_token: str
|
||||
is_long: bool
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.market_token}|{self.collateral_token}|{"L" if self.is_long else "S"}'
|
||||
|
||||
@staticmethod
|
||||
def str2key(keystring: str):
|
||||
market_token, collateral_token, is_long = keystring.split('|')
|
||||
return GMXPosition.Key(market_token.lower(), collateral_token.lower(), is_long == 'L')
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return GMXPosition.Key(self.market_token, self.collateral_token, self.is_long)
|
||||
|
||||
@staticmethod
|
||||
def load(d: dict):
|
||||
return GMXPosition(to_checksum_address(d['m']), to_checksum_address(d['c']), d['l'], dec(d['s']))
|
||||
|
||||
|
||||
def dump(self):
|
||||
return {
|
||||
'm': self.market_token,
|
||||
'c': self.collateral_token,
|
||||
'l': self.is_long,
|
||||
's': str(self.size),
|
||||
}
|
||||
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.key)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.key == other.key
|
||||
|
||||
|
||||
class GMXOrderType (Enum):
|
||||
MarketSwap = 0
|
||||
LimitSwap = 1
|
||||
MarketIncrease = 2
|
||||
LimitIncrease = 3
|
||||
MarketDecrease = 4
|
||||
LimitDecrease = 5
|
||||
StopLossDecrease = 6
|
||||
Liquidation = 7
|
||||
StopIncrease = 8
|
||||
|
||||
|
||||
GMX_API_BASE_URL = None
|
||||
|
||||
def gmx_api(method, **params):
|
||||
global GMX_API_BASE_URL
|
||||
if GMX_API_BASE_URL is None:
|
||||
GMX_API_BASE_URL = GMX_API_BASE_URLS[current_chain.get().id]
|
||||
return requests.get(GMX_API_BASE_URL+method, params=params, timeout=5).json()
|
||||
|
||||
|
||||
gmx_markets_by_index_token: BlockDict[str, list[str]] = BlockDict('gmx_t_m', redis=True, db=True, value2str=lambda mks: json.dumps(mks), str2value=lambda s: json.loads(s))
|
||||
gmx_prices: BlockDict[str, dec] = BlockDict('gmx_p', redis=True, str2value=dec)
|
||||
# open positions by vault
|
||||
gmx_positions: BlockDict[str, list[GMXPosition]] = BlockDict('gmx_pos', redis=True, db=True,
|
||||
value2str=lambda positions: json.dumps([p.dump() for p in positions]),
|
||||
str2value=lambda positions: [GMXPosition.load(p) for p in json.loads(positions)] )
|
||||
|
||||
# dual mappings of our TrancheKey to a GMX Order key exist only when a GMX order has been placed but not yet handled
|
||||
gmx_tk_in_flight: BlockDict[str, TrancheKey] = BlockDict('gmx_tif', db=True, str2value=TrancheKey.str2key)
|
||||
tk_gmx_in_flight: BlockDict[TrancheKey, str] = BlockDict('tk2gmx', db=True, str2key=TrancheKey.str2key)
|
||||
16
src/dexorder/gmx/_chaininfo.py
Normal file
16
src/dexorder/gmx/_chaininfo.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
gmx_chain_info = {
|
||||
42161: {
|
||||
'EventEmitter': '0xC8ee91A54287DB53897056e12D9819156D3822Fb',
|
||||
'DataStore': '0xFD70de6b91282D8017aA4E741e9Ae325CAb992d8',
|
||||
'Reader': '0x0537C767cDAC0726c76Bb89e92904fe28fd02fE1',
|
||||
}
|
||||
}
|
||||
|
||||
GMX_API_BASE_URLS={
|
||||
31337: 'https://arbitrum-api.gmxinfra.io/',
|
||||
42161: 'https://arbitrum-api.gmxinfra.io/',
|
||||
}
|
||||
24
src/dexorder/gmx/_contract.py
Normal file
24
src/dexorder/gmx/_contract.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import logging
|
||||
from functools import cache
|
||||
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.gmx._datastore import DataStore
|
||||
from dexorder.util import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_gmx_contract_info(name: str):
|
||||
with open(f'./resource/abi/42161/gmx/{name}.json') as file:
|
||||
info = json.load(file)
|
||||
return info
|
||||
|
||||
|
||||
@cache
|
||||
def get_gmx_contract(name: str):
|
||||
info = get_gmx_contract_info(name)
|
||||
if name == 'DataStore':
|
||||
clazz = DataStore
|
||||
else:
|
||||
clazz = ContractProxy
|
||||
return clazz(info['address'], abi=info['abi'])
|
||||
28
src/dexorder/gmx/_datastore.py
Normal file
28
src/dexorder/gmx/_datastore.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import logging
|
||||
|
||||
from eth_utils import keccak
|
||||
|
||||
from dexorder import dec
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.util.abiencode import abi_encoder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def combo_key(key_str, arg, arg_type='address'):
|
||||
key_bytes = keccak(abi_encoder.encode(['string'], [key_str]))
|
||||
return keccak(abi_encoder.encode(['bytes32', arg_type], [key_bytes, arg]))
|
||||
|
||||
IS_MARKET_DISABLED_KEY = 'IS_MARKET_DISABLED'
|
||||
MIN_COLLATERAL_FACTOR_KEY = 'MIN_COLLATERAL_FACTOR'
|
||||
|
||||
|
||||
class DataStore (ContractProxy):
|
||||
|
||||
async def is_market_disabled(self, market_addr: str):
|
||||
return await self.getBool(combo_key(IS_MARKET_DISABLED_KEY, market_addr))
|
||||
|
||||
async def min_collateral_factor(self, market_addr: str):
|
||||
result = await self.getUint(combo_key(MIN_COLLATERAL_FACTOR_KEY, market_addr))
|
||||
if result == 0:
|
||||
log.warning(f'no min collateral factor for market {market_addr}')
|
||||
return 2 * dec(result) / dec(1e30)
|
||||
292
src/dexorder/gmx/_error.py
Normal file
292
src/dexorder/gmx/_error.py
Normal file
@@ -0,0 +1,292 @@
|
||||
import logging
|
||||
|
||||
from dexorder.util.abiencode import abi_decoder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
gmx_error_map = {
|
||||
'b244a107': 'ActionAlreadySignalled()',
|
||||
'94fdaea2': 'ActionNotSignalled()',
|
||||
'3285dc57': 'AdlNotEnabled()',
|
||||
'd06ed8be': 'AdlNotRequired(int256,uint256)',
|
||||
'70657e04': 'ArrayOutOfBoundsBytes(bytes[],uint256,string)',
|
||||
'9d18e63b': 'ArrayOutOfBoundsUint256(uint256[],uint256,string)',
|
||||
'60c5e472': 'AvailableFeeAmountIsZero(address,address,uint256)',
|
||||
'11aeaf6b': 'BlockNumbersNotSorted(uint256,uint256)',
|
||||
'ec775484': 'BuybackAndFeeTokenAreEqual(address,address)',
|
||||
'd6b52b60': 'ChainlinkPriceFeedNotUpdated(address,uint256,uint256)',
|
||||
'ec6d89c8': 'CollateralAlreadyClaimed(uint256,uint256)',
|
||||
'bdec9c0d': 'CompactedArrayOutOfBounds(uint256[],uint256,uint256,string)',
|
||||
'5ebb87c9': 'ConfigValueExceedsAllowedRange(bytes32,uint256)',
|
||||
'413f9a54': 'DataStreamIdAlreadyExistsForToken(address)',
|
||||
'83f2ba20': 'DeadlinePassed(uint256,uint256)',
|
||||
'43e30ca8': 'DepositNotFound(bytes32)',
|
||||
'dd70e0c9': 'DisabledFeature(bytes32)',
|
||||
'09f8c937': 'DisabledMarket(address)',
|
||||
'd4064737': 'DuplicatedIndex(uint256,string)',
|
||||
'91c78b78': 'DuplicatedMarketInSwapPath(address)',
|
||||
'dd7016a2': 'EmptyAccount()',
|
||||
'e474a425': 'EmptyAddressInMarketTokenBalanceValidation(address,address)',
|
||||
'52dfddfd': 'EmptyChainlinkPaymentToken()',
|
||||
'8db88ccf': 'EmptyChainlinkPriceFeed(address)',
|
||||
'b86fffef': 'EmptyChainlinkPriceFeedMultiplier(address)',
|
||||
'616daf1f': 'EmptyClaimFeesMarket()',
|
||||
'62e402cc': 'EmptyDataStreamFeedId(address)',
|
||||
'088405c6': 'EmptyDataStreamMultiplier(address)',
|
||||
'95b66fe9': 'EmptyDeposit()',
|
||||
'01af8c24': 'EmptyDepositAmounts()',
|
||||
'd1c3d5bd': 'EmptyDepositAmountsAfterSwap()',
|
||||
'a14e1b3d': 'EmptyGlv(address)',
|
||||
'bd192971': 'EmptyGlvDeposit()',
|
||||
'03251ce6': 'EmptyGlvDepositAmounts()',
|
||||
'94409f52': 'EmptyGlvMarketAmount()',
|
||||
'93856b1a': 'EmptyGlvTokenSupply()',
|
||||
'0e5be78f': 'EmptyGlvWithdrawal()',
|
||||
'402a866f': 'EmptyGlvWithdrawalAmount()',
|
||||
'e9b78bd4': 'EmptyHoldingAddress()',
|
||||
'05fbc1ae': 'EmptyMarket()',
|
||||
'eb1947dd': 'EmptyMarketPrice(address)',
|
||||
'2ee3d69c': 'EmptyMarketTokenSupply()',
|
||||
'16307797': 'EmptyOrder()',
|
||||
'4dfbbff3': 'EmptyPosition()',
|
||||
'cd64a025': 'EmptyPrimaryPrice(address)',
|
||||
'd551823d': 'EmptyReceiver()',
|
||||
'6af5e96f': 'EmptyShift()',
|
||||
'60d5e84a': 'EmptyShiftAmount()',
|
||||
'3df42531': 'EmptySizeDeltaInTokens()',
|
||||
'9fc297fa': 'EmptyTokenTranferGasLimit(address)',
|
||||
'9231be69': 'EmptyValidatedPrices()',
|
||||
'6d4bb5e9': 'EmptyWithdrawal()',
|
||||
'01d6f7b1': 'EmptyWithdrawalAmount()',
|
||||
'4e48dcda': 'EndOfOracleSimulation()',
|
||||
'59afd6c6': 'ExternalCallFailed(bytes)',
|
||||
'2df6dc23': 'FeeBatchNotFound(bytes32)',
|
||||
'e44992d0': 'GlvAlreadyExists(bytes32,address)',
|
||||
'057058b6': 'GlvDepositNotFound(bytes32)',
|
||||
'30b8a225': 'GlvDisabledMarket(address,address)',
|
||||
'8da31161': 'GlvEnabledMarket(address,address)',
|
||||
'c8b70b2c': 'GlvInsufficientMarketTokenBalance(address,address,uint256,uint256)',
|
||||
'80ad6831': 'GlvInvalidLongToken(address,address,address)',
|
||||
'9673a10b': 'GlvInvalidShortToken(address,address,address)',
|
||||
'3aa9fc91': 'GlvMarketAlreadyExists(address,address)',
|
||||
'af7d3787': 'GlvMaxMarketCountExceeded(address,uint256)',
|
||||
'd859f947': 'GlvMaxMarketTokenBalanceAmountExceeded(address,address,uint256,uint256)',
|
||||
'66560e7d': 'GlvMaxMarketTokenBalanceUsdExceeded(address,address,uint256,uint256)',
|
||||
'155712e1': 'GlvNameTooLong()',
|
||||
'2e3780e5': 'GlvNegativeMarketPoolValue(address,address)',
|
||||
'3afc5e65': 'GlvNonZeroMarketBalance(address,address)',
|
||||
'6c00ed8a': 'GlvNotFound(address)',
|
||||
'232d7165': 'GlvShiftIntervalNotYetPassed(uint256,uint256,uint256)',
|
||||
'c906a05a': 'GlvShiftMaxPriceImpactExceeded(uint256,uint256)',
|
||||
'de45e162': 'GlvShiftNotFound(bytes32)',
|
||||
'9cb4f5c5': 'GlvSymbolTooLong()',
|
||||
'07e9c4d5': 'GlvUnsupportedMarket(address,address)',
|
||||
'20dcb068': 'GlvWithdrawalNotFound(bytes32)',
|
||||
'd90abe06': 'GmEmptySigner(uint256)',
|
||||
'ee6e8ecf': 'GmInvalidBlockNumber(uint256,uint256)',
|
||||
'b8aaa455': 'GmInvalidMinMaxBlockNumber(uint256,uint256)',
|
||||
'c7b44b28': 'GmMaxOracleSigners(uint256,uint256)',
|
||||
'0f885e52': 'GmMaxPricesNotSorted(address,uint256,uint256)',
|
||||
'5b1250e7': 'GmMaxSignerIndex(uint256,uint256)',
|
||||
'dc2a99e7': 'GmMinOracleSigners(uint256,uint256)',
|
||||
'cc7bbd5b': 'GmMinPricesNotSorted(address,uint256,uint256)',
|
||||
'a581f648': 'InsufficientBuybackOutputAmount(address,address,uint256,uint256)',
|
||||
'74cc815b': 'InsufficientCollateralAmount(uint256,int256)',
|
||||
'2159b161': 'InsufficientCollateralUsd(int256)',
|
||||
'5dac504d': 'InsufficientExecutionFee(uint256,uint256)',
|
||||
'bb416f93': 'InsufficientExecutionGas(uint256,uint256,uint256)',
|
||||
'79293964': 'InsufficientExecutionGasForErrorHandling(uint256,uint256)',
|
||||
'19d50093': 'InsufficientFundsToPayForCosts(uint256,string)',
|
||||
'd3dacaac': 'InsufficientGasForCancellation(uint256,uint256)',
|
||||
'79a2abad': 'InsufficientGasLeftForCallback(uint256,uint256)',
|
||||
'3083b9e5': 'InsufficientHandleExecutionErrorGas(uint256,uint256)',
|
||||
'82c8828a': 'InsufficientMarketTokens(uint256,uint256)',
|
||||
'd28d3eb5': 'InsufficientOutputAmount(uint256,uint256)',
|
||||
'23090a31': 'InsufficientPoolAmount(uint256,uint256)',
|
||||
'9cd76295': 'InsufficientRelayFee(uint256,uint256)',
|
||||
'315276c9': 'InsufficientReserve(uint256,uint256)',
|
||||
'b98c6179': 'InsufficientReserveForOpenInterest(uint256,uint256)',
|
||||
'a7aebadc': 'InsufficientSwapOutputAmount(uint256,uint256)',
|
||||
'041b3483': 'InsufficientWntAmount(uint256,uint256)',
|
||||
'3a78cd7e': 'InsufficientWntAmountForExecutionFee(uint256,uint256)',
|
||||
'1d4fc3c0': 'InvalidAdl(int256,int256)',
|
||||
'8ac146e6': 'InvalidAmountInForFeeBatch(uint256,uint256)',
|
||||
'eb19d3f5': 'InvalidBaseKey(bytes32)',
|
||||
'25e5dc07': 'InvalidBlockRangeSet(uint256,uint256)',
|
||||
'752fdb63': 'InvalidBuybackToken(address)',
|
||||
'89736584': 'InvalidCancellationReceiverForSubaccountOrder(address,address)',
|
||||
'5b3043dd': 'InvalidClaimAffiliateRewardsInput(uint256,uint256)',
|
||||
'42c0d1f2': 'InvalidClaimCollateralInput(uint256,uint256,uint256)',
|
||||
'7363cfa5': 'InvalidClaimFundingFeesInput(uint256,uint256)',
|
||||
'74cee48d': 'InvalidClaimUiFeesInput(uint256,uint256)',
|
||||
'6c2738d3': 'InvalidClaimableFactor(uint256)',
|
||||
'839c693e': 'InvalidCollateralTokenForMarket(address,address)',
|
||||
'4a591309': 'InvalidContributorToken(address)',
|
||||
'8d56bea1': 'InvalidDataStreamBidAsk(address,int192,int192)',
|
||||
'a4949e25': 'InvalidDataStreamFeedId(address,bytes32,bytes32)',
|
||||
'2a74194d': 'InvalidDataStreamPrices(address,int192,int192)',
|
||||
'6e0c29ed': 'InvalidDataStreamSpreadReductionFactor(address,uint256)',
|
||||
'9fbe2cbc': 'InvalidDecreaseOrderSize(uint256,uint256)',
|
||||
'751951f9': 'InvalidDecreasePositionSwapType(uint256)',
|
||||
'9b867f31': 'InvalidExecutionFee(uint256,uint256,uint256)',
|
||||
'99e26b44': 'InvalidExecutionFeeForMigration(uint256,uint256)',
|
||||
'831e9f11': 'InvalidExternalCallInput(uint256,uint256)',
|
||||
'be55c895': 'InvalidExternalCallTarget(address)',
|
||||
'e15f2701': 'InvalidExternalReceiversInput(uint256,uint256)',
|
||||
'fa804399': 'InvalidFeeBatchTokenIndex(uint256,uint256)',
|
||||
'cb9339d5': 'InvalidFeeReceiver(address)',
|
||||
'be6514b6': 'InvalidFeedPrice(address,int256)',
|
||||
'fc90fcc3': 'InvalidGlpAmount(uint256,uint256)',
|
||||
'bf16cb0a': 'InvalidGlvDepositInitialLongToken(address)',
|
||||
'df0f9a23': 'InvalidGlvDepositInitialShortToken(address)',
|
||||
'055ab8b9': 'InvalidGlvDepositSwapPath(uint256,uint256)',
|
||||
'993417d5': 'InvalidGmMedianMinMaxPrice(uint256,uint256)',
|
||||
'a54d4339': 'InvalidGmOraclePrice(address)',
|
||||
'8d648a7f': 'InvalidGmSignature(address,address)',
|
||||
'b21c863e': 'InvalidGmSignerMinMaxPrice(uint256,uint256)',
|
||||
'e5feddc0': 'InvalidKeeperForFrozenOrder(address)',
|
||||
'33a1ea6b': 'InvalidMarketTokenBalance(address,address,uint256,uint256)',
|
||||
'9dd026db': 'InvalidMarketTokenBalanceForClaimableFunding(address,address,uint256,uint256)',
|
||||
'808c464f': 'InvalidMarketTokenBalanceForCollateralAmount(address,address,uint256,uint256)',
|
||||
'c08bb8a0': 'InvalidMinGlvTokensForFirstGlvDeposit(uint256,uint256)',
|
||||
'3f9c06ab': 'InvalidMinMarketTokensForFirstDeposit(uint256,uint256)',
|
||||
'1608d41a': 'InvalidMinMaxForPrice(address,uint256,uint256)',
|
||||
'e71a51be': 'InvalidNativeTokenSender(address)',
|
||||
'05d102a2': 'InvalidOracleProvider(address)',
|
||||
'68b49e6c': 'InvalidOracleProviderForToken(address,address)',
|
||||
'f9996e9f': 'InvalidOracleSetPricesDataParam(uint256,uint256)',
|
||||
'dd51dc73': 'InvalidOracleSetPricesProvidersParam(uint256,uint256)',
|
||||
'c1b14c91': 'InvalidOracleSigner(address)',
|
||||
'0481a15a': 'InvalidOrderPrices(uint256,uint256,uint256,uint256)',
|
||||
'253c8c02': 'InvalidOutputToken(address,address)',
|
||||
'3c0ac199': 'InvalidPermitSpender(address,address)',
|
||||
'adaa688d': 'InvalidPoolValueForDeposit(int256)',
|
||||
'90a6af3b': 'InvalidPoolValueForWithdrawal(int256)',
|
||||
'182e30e3': 'InvalidPositionMarket(address)',
|
||||
'bff65b3f': 'InvalidPositionSizeValues(uint256,uint256)',
|
||||
'663de023': 'InvalidPrimaryPricesForSimulation(uint256,uint256)',
|
||||
'9cfea583': 'InvalidReceiver(address)',
|
||||
'77e8e698': 'InvalidReceiverForFirstDeposit(address,address)',
|
||||
'6eedac2f': 'InvalidReceiverForFirstGlvDeposit(address,address)',
|
||||
'4baab816': 'InvalidReceiverForSubaccountOrder(address,address)',
|
||||
'370abac2': 'InvalidRelayParams()',
|
||||
'530b2590': 'InvalidSetContributorPaymentInput(uint256,uint256)',
|
||||
'29a93dc4': 'InvalidSetMaxTotalContributorTokenAmountInput(uint256,uint256)',
|
||||
'2a34f7fe': 'InvalidSignature(string)',
|
||||
'720bb461': 'InvalidSizeDeltaForAdl(uint256,uint256)',
|
||||
'3044992f': 'InvalidSubaccountApprovalNonce(uint256,uint256)',
|
||||
'545e8f2b': 'InvalidSubaccountApprovalSubaccount()',
|
||||
'cb9bd134': 'InvalidSwapMarket(address)',
|
||||
'6ba3dd8b': 'InvalidSwapOutputToken(address,address)',
|
||||
'672e4fba': 'InvalidSwapPathForV1(address[],address)',
|
||||
'e6b0ddb6': 'InvalidTimelockDelay(uint256)',
|
||||
'53f81711': 'InvalidTokenIn(address,address)',
|
||||
'81468139': 'InvalidUiFeeFactor(uint256,uint256)',
|
||||
'f3d06236': 'InvalidUserNonce(uint256,uint256)',
|
||||
'1de2bca4': 'InvalidVersion(uint256)',
|
||||
'bc121108': 'LiquidatablePosition(string,int256,int256,int256)',
|
||||
'a38dfb2a': 'LongTokensAreNotEqual(address,address)',
|
||||
'25e34fa1': 'MarketAlreadyExists(bytes32,address)',
|
||||
'6918f9bf': 'MarketNotFound(address)',
|
||||
'143e2156': 'MaskIndexOutOfBounds(uint256,string)',
|
||||
'f0794a60': 'MaxAutoCancelOrdersExceeded(uint256,uint256)',
|
||||
'4e3f62a8': 'MaxBuybackPriceAgeExceeded(uint256,uint256,uint256)',
|
||||
'10aeb692': 'MaxCallbackGasLimitExceeded(uint256,uint256)',
|
||||
'4f82a998': 'MaxFundingFactorPerSecondLimitExceeded(uint256,uint256)',
|
||||
'2bf127cf': 'MaxOpenInterestExceeded(uint256,uint256)',
|
||||
'dd9c6b9a': 'MaxOracleTimestampRangeExceeded(uint256,uint256)',
|
||||
'6429ff3f': 'MaxPoolAmountExceeded(uint256,uint256)',
|
||||
'46169f04': 'MaxPoolUsdForDepositExceeded(uint256,uint256)',
|
||||
'2b6e7c3f': 'MaxPriceAgeExceeded(uint256,uint256)',
|
||||
'3d1986f7': 'MaxRefPriceDeviationExceeded(address,uint256,uint256,uint256)',
|
||||
'519ba753': 'MaxSubaccountActionCountExceeded(address,address,uint256,uint256)',
|
||||
'9da36043': 'MaxSwapPathLengthExceeded(uint256,uint256)',
|
||||
'faf66f0c': 'MaxTimelockDelayExceeded(uint256)',
|
||||
'c10ceac7': 'MaxTotalCallbackGasLimitForAutoCancelOrdersExceeded(uint256,uint256)',
|
||||
'043038f0': 'MaxTotalContributorTokenAmountExceeded(address,uint256,uint256)',
|
||||
'961b4025': 'MinContributorPaymentIntervalBelowAllowedRange(uint256)',
|
||||
'b9dc7b9d': 'MinContributorPaymentIntervalNotYetPassed(uint256)',
|
||||
'966fea10': 'MinGlvTokens(uint256,uint256)',
|
||||
'f442c0bc': 'MinLongTokens(uint256,uint256)',
|
||||
'6ce23460': 'MinMarketTokens(uint256,uint256)',
|
||||
'85efb31a': 'MinPositionSize(uint256,uint256)',
|
||||
'b4a196af': 'MinShortTokens(uint256,uint256)',
|
||||
'cc32db99': 'NegativeExecutionPrice(int256,uint256,uint256,int256,uint256)',
|
||||
'53410c43': 'NonAtomicOracleProvider(address)',
|
||||
'28f773e9': 'NonEmptyExternalCallsForSubaccountOrder()',
|
||||
'ef2df9b5': 'NonEmptyTokensWithPrices(uint256)',
|
||||
'730293fd': 'OpenInterestCannotBeUpdatedForSwapOnlyMarket(address)',
|
||||
'8cf95e58': 'OracleProviderAlreadyExistsForToken(address)',
|
||||
'd84b8ee8': 'OracleTimestampsAreLargerThanRequestExpirationTime(uint256,uint256,uint256)',
|
||||
'7d677abf': 'OracleTimestampsAreSmallerThanRequired(uint256,uint256)',
|
||||
'730d44b1': 'OrderAlreadyFrozen()',
|
||||
'59485ed9': 'OrderNotFound(bytes32)',
|
||||
'e09ad0e9': 'OrderNotFulfillableAtAcceptablePrice(uint256,uint256)',
|
||||
'9aba92cb': 'OrderNotUpdatable(uint256)',
|
||||
'8a4bd513': 'OrderTypeCannotBeCreated(uint256)',
|
||||
'cf9319d6': 'OrderValidFromTimeNotReached(uint256,uint256)',
|
||||
'b92fb250': 'PnlFactorExceededForLongs(int256,uint256)',
|
||||
'b0010694': 'PnlFactorExceededForShorts(int256,uint256)',
|
||||
'9f0bc7de': 'PnlOvercorrected(int256,uint256)',
|
||||
'426cfff0': 'PositionNotFound(bytes32)',
|
||||
'ee919dd9': 'PositionShouldNotBeLiquidated(string,int256,int256,int256)',
|
||||
'ded099de': 'PriceAlreadySet(address,uint256,uint256)',
|
||||
'd4141298': 'PriceFeedAlreadyExistsForToken(address)',
|
||||
'f0641c92': 'PriceImpactLargerThanOrderSize(int256,uint256)',
|
||||
'e8266438': 'RequestNotYetCancellable(uint256,uint256,string)',
|
||||
'e70f9152': 'SelfTransferNotSupported(address)',
|
||||
'032b3d00': 'SequencerDown()',
|
||||
'113cfc03': 'SequencerGraceDurationNotYetPassed(uint256,uint256)',
|
||||
'950227bb': 'ShiftFromAndToMarketAreEqual(address)',
|
||||
'b611f297': 'ShiftNotFound(bytes32)',
|
||||
'f54d8776': 'ShortTokensAreNotEqual(address,address)',
|
||||
'20b23584': 'SignalTimeNotYetPassed(uint256)',
|
||||
'26025b4e': 'SubaccountApprovalDeadlinePassed(uint256,uint256)',
|
||||
'9b539f07': 'SubaccountApprovalExpired(address,address,uint256,uint256)',
|
||||
'9be0a43c': 'SubaccountNotAuthorized(address,address)',
|
||||
'75885d69': 'SwapPriceImpactExceedsAmountIn(uint256,int256)',
|
||||
'd2e229e6': 'SwapsNotAllowedForAtomicWithdrawal(uint256,uint256)',
|
||||
'7bf8d2b3': 'SyncConfigInvalidInputLengths(uint256,uint256)',
|
||||
'624b5b13': 'SyncConfigInvalidMarketFromData(address,address)',
|
||||
'8b3d4655': 'SyncConfigUpdatesDisabledForMarket(address)',
|
||||
'0798d283': 'SyncConfigUpdatesDisabledForMarketParameter(address,string)',
|
||||
'8ea7eb18': 'SyncConfigUpdatesDisabledForParameter(string)',
|
||||
'b783c88a': 'ThereMustBeAtLeastOneRoleAdmin()',
|
||||
'282b5b70': 'ThereMustBeAtLeastOneTimelockMultiSig()',
|
||||
'979dc780': 'TokenTransferError(address,address,uint256)',
|
||||
'0e92b837': 'Uint256AsBytesLengthExceeds32Bytes(uint256)',
|
||||
'6afad778': 'UnableToGetBorrowingFactorEmptyPoolUsd()',
|
||||
'be4729a2': 'UnableToGetCachedTokenPrice(address,address)',
|
||||
'11423d95': 'UnableToGetFundingFactorEmptyOpenInterest()',
|
||||
'7a0ca681': 'UnableToGetOppositeToken(address,address)',
|
||||
'3a61a4a9': 'UnableToWithdrawCollateral(int256)',
|
||||
'a35b150b': 'Unauthorized(address,string)',
|
||||
'99b2d582': 'UnexpectedBorrowingFactor(uint256,uint256)',
|
||||
'cc3459ff': 'UnexpectedMarket()',
|
||||
'3b42e952': 'UnexpectedPoolValue(int256)',
|
||||
'814991c3': 'UnexpectedPositionState()',
|
||||
'e949114e': 'UnexpectedRelayFeeToken(address,address)',
|
||||
'a9721241': 'UnexpectedRelayFeeTokenAfterSwap(address,address)',
|
||||
'785ee469': 'UnexpectedTokenForVirtualInventory(address,address)',
|
||||
'3af14617': 'UnexpectedValidFromTime(uint256)',
|
||||
'3784f834': 'UnsupportedOrderType(uint256)',
|
||||
'0d0fcc0b': 'UnsupportedRelayFeeToken(address,address)',
|
||||
'eadaf93a': 'UsdDeltaExceedsLongOpenInterest(int256,uint256)',
|
||||
'2e949409': 'UsdDeltaExceedsPoolValue(int256,uint256)',
|
||||
'8af0d140': 'UsdDeltaExceedsShortOpenInterest(int256,uint256)',
|
||||
'60737bc0': 'WithdrawalNotFound(bytes32)',
|
||||
}
|
||||
gmx_error_map = {bytes.fromhex(k):v for k,v in gmx_error_map.items()}
|
||||
|
||||
|
||||
def gmx_parse_reason_bytes(e: bytes) -> str:
|
||||
sig_bytes = e[:4]
|
||||
sig = gmx_error_map.get(e)
|
||||
if sig is None:
|
||||
return f'Unknown GMX error {e.hex()}'
|
||||
name, types = sig.split('(',1)
|
||||
types = types[:-1]
|
||||
if len(e) > 4:
|
||||
data = e[4:]
|
||||
values = abi_decoder.decode(types.split(','), data)
|
||||
return f'{name}({",".join(map(str, values))})'
|
||||
return name
|
||||
446
src/dexorder/gmx/_handle.py
Normal file
446
src/dexorder/gmx/_handle.py
Normal file
@@ -0,0 +1,446 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from copy import copy
|
||||
from datetime import timedelta
|
||||
|
||||
from eth_utils import to_checksum_address
|
||||
from web3.types import EventData
|
||||
|
||||
from ._abi import parse_event_log_data
|
||||
from ._base import GMXPosition, gmx_positions, GMXOrderType, gmx_tk_in_flight, tk_gmx_in_flight, gmx_api, \
|
||||
gmx_markets_by_index_token
|
||||
from ._chaininfo import gmx_chain_info
|
||||
from ._error import gmx_parse_reason_bytes
|
||||
from ._metadata import gmx_update_metadata
|
||||
from .. import dec, from_timestamp
|
||||
from ..addrmeta import address_metadata
|
||||
from ..base import OldTokenDict, OldGMXDict
|
||||
from ..base.chain import current_chain
|
||||
from ..base.order import TrancheKey
|
||||
from ..contract import get_contract_event
|
||||
from ..contract.dexorder import get_dexorder_contract
|
||||
from ..event_handler import update_pool_price
|
||||
from ..final_ohlc import FinalOHLCRepository
|
||||
from ..ohlc import period_name
|
||||
from ..periodic import periodic
|
||||
from ..progressor import BlockProgressor
|
||||
from ..tokens import get_token
|
||||
from ..util import hexstr
|
||||
from ..util.async_util import maywait
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def gmx_wire_runner_init(runner: BlockProgressor):
|
||||
pass
|
||||
|
||||
def gmx_wire_runner_early(runner: BlockProgressor, backfill: FinalOHLCRepository = None):
|
||||
runner.add_event_trigger(handle_gmxcallbackerror_event, get_contract_event('GMXCallbackHandler', 'GMXCallbackError'))
|
||||
runner.add_callback(gmx_handle_metadata_update)
|
||||
if backfill is not None:
|
||||
runner.add_callback(create_backfill_handler(backfill) if backfill else gmx_update_prices)
|
||||
runner.add_event_trigger(handle_gmx_events, log_filter={'address':gmx_chain_info[current_chain.get().id]['EventEmitter'], })
|
||||
runner.add_event_trigger(handle_gmxorderplaced, get_contract_event('GMX', 'GMXOrderPlaced'))
|
||||
|
||||
|
||||
def gmx_wire_runner_late(runner: BlockProgressor):
|
||||
pass
|
||||
|
||||
def handle_gmxcallbackerror_event(event: EventData):
|
||||
log.error(f'GMX callback error {event["args"]["reason"]}')
|
||||
|
||||
# GMX orders wait on-chain a few blocks before the GMX Handlers execute or cancel them. Also, liquidation orders can
|
||||
# occur without any associated vault order. Therefore, we take the following approach:
|
||||
#
|
||||
# When orders are placed, a GMXOrderPlaced event is emitted alongside the DexorderSwapPlaced event, providing a mapping
|
||||
# between vault tranche keys and GMX order keys, as well as an in-flight locking mechanism in both the vault and
|
||||
# backend. In a few blocks' time, the GMX Handlers will deal with the order and emit an OrderCreated or OrderCancelled
|
||||
# event in addition to invoking the corresponding callback method on the vault, which unlocks the tranche, adjusts
|
||||
# rate limits, and emits the regular DexorderSwapFilled event, using amountOut as the USD amount filled and amountIn
|
||||
# as the "price," a virtual amount calculated to make the execution price equal amountOut/amountIn, matching the format
|
||||
# for non-inverted swaps.
|
||||
#
|
||||
# Therefore, the regular backend triggers and fill records act normally on GMX orders without modification.
|
||||
#
|
||||
# The backend in-flight lock and tranche-key to gmx-order-key mapping is maintained in gmx_in_flight using a vault event
|
||||
# to open and a GMX order event to close.
|
||||
#
|
||||
# The Position object is maintained by watching GMX PositionIncrease and PositionDecrease events, which capture
|
||||
# liquidations as well as vault-initiated orders to accurately maintain the Position state.
|
||||
|
||||
|
||||
def invalid_vault(vault):
|
||||
# return vault not in vault_owners
|
||||
return False # todo debug
|
||||
|
||||
|
||||
#
|
||||
# GMXOrderPlaced along with OrderCancelled and OrderExecuted maintain the gmx_in_flight lock and mapping to a tranche key
|
||||
#
|
||||
|
||||
def handle_gmxorderplaced(event: EventData):
|
||||
# This is emitted alongside the DexorderSwapPlaced event in order to provide additional information for GMX.
|
||||
# event GMXOrderPlaced(uint64 orderIndex, uint8 trancheIndex, bytes32 gmxOrderKey);
|
||||
log.info(f'GMXOrderPlaced {event}')
|
||||
vault = event['address']
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
order_index = event['args']['orderIndex']
|
||||
tranche_index = event['args']['trancheIndex']
|
||||
gmx_order_key = event['args']['gmxOrderKey']
|
||||
# register the gmx order key as in-flight
|
||||
keystr = hexstr(gmx_order_key)
|
||||
tk = TrancheKey(vault, order_index, tranche_index)
|
||||
# start gmx in flight. see end_gmx_in_flight()
|
||||
gmx_tk_in_flight[keystr] = tk
|
||||
tk_gmx_in_flight[tk] = keystr
|
||||
|
||||
|
||||
def handle_ordercancelled_event(event: dict, data: dict):
|
||||
log.info(f'GMX order cancelled {data}')
|
||||
vault = data['account']
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
reason = gmx_parse_reason_bytes(data['reasonBytes'])
|
||||
gmx_order_key = data['key']
|
||||
if gmx_order_key not in gmx_tk_in_flight:
|
||||
log.warning(f'GMX order cancelled but not in flight: {gmx_order_key}')
|
||||
return
|
||||
end_gmx_in_flight(gmx_order_key)
|
||||
log.info(f'GMX order cancelled due to {reason} in tx {data['tx']}')
|
||||
|
||||
|
||||
def handle_orderexecuted_event(event: dict, data: dict):
|
||||
log.info(f'GMX order executed {data}')
|
||||
vault = data['account']
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
gmx_order_key = data['key']
|
||||
if gmx_order_key not in gmx_tk_in_flight:
|
||||
# todo handle liquidation either here or with PositionDecrease events
|
||||
log.warning(f'GMX order executed but not in flight: {gmx_order_key}')
|
||||
return
|
||||
end_gmx_in_flight(gmx_order_key)
|
||||
|
||||
|
||||
def end_gmx_in_flight(gmx_order_key):
|
||||
gmx_order_key = hexstr(gmx_order_key)
|
||||
tk = gmx_tk_in_flight[gmx_order_key]
|
||||
del gmx_tk_in_flight[gmx_order_key]
|
||||
del tk_gmx_in_flight[tk]
|
||||
|
||||
#
|
||||
# GMXPositionIncrease and GMXPositionDecrease events maintain our Position records
|
||||
#
|
||||
|
||||
def handle_position_event(event: dict, data: dict, is_increase: bool):
|
||||
log.info(f'GMX position {"increase" if is_increase else "decrease"} {event}')
|
||||
# {'account': '0xdfc16a4247677d723d897aa4fe865a02f5d78746',
|
||||
# 'borrowingFactor': 250545812647447573795593810338,
|
||||
# 'collateralAmount': 1019200,
|
||||
# 'collateralDeltaAmount': 1019200,
|
||||
# 'collateralToken': '0xaf88d065e77c8cc2239327c5edb3a432268e5831',
|
||||
# 'collateralTokenPrice.max': 999856563986601850000000,
|
||||
# 'collateralTokenPrice.min': 999856563986601850000000,
|
||||
# 'event': 'PositionIncrease',
|
||||
# 'executionPrice': 3816407734365198,
|
||||
# 'fundingFeeAmountPerSize': 430546959972637644839,
|
||||
# 'increasedAtTime': 1753748680,
|
||||
# 'indexTokenPrice.max': 3817347116613155,
|
||||
# 'indexTokenPrice.min': 3817347116613155,
|
||||
# 'isLong': True,
|
||||
# 'longTokenClaimableFundingAmountPerSize': 4117446384759965489999004204,
|
||||
# 'market': '0x70d95587d40a2caf56bd97485ab3eec10bee6336',
|
||||
# 'orderKey': b'2\xe6\x8a\x07\xe9x\x839\x8f\xdd\xd5j\x16\x88\x80\xff[HY\xadk\x0f\xb4n3\xfe\xa2.\xd6\x97\x90\x9b',
|
||||
# 'orderType': 2,
|
||||
# 'positionKey': b"\xa8r\xc6\xcf^\x89\xf8k\xfa='\xe9\x19\x12\x11\xb8|;k3Df8\xee^\x9a\x9f)\xef8\x8c\x86",
|
||||
# 'priceImpactAmount': 128960267235,
|
||||
# 'priceImpactUsd': 492286104290598018742093888,
|
||||
# 'sender': '0xe68caaacdf6439628dfd2fe624847602991a31eb',
|
||||
# 'shortTokenClaimableFundingAmountPerSize': 7250294981528901831,
|
||||
# 'sizeDeltaInTokens': 524053020328728,
|
||||
# 'sizeDeltaUsd': 2000000000000000000000000000000,
|
||||
# 'sizeInTokens': 524053020328728,
|
||||
# 'sizeInUsd': 2000000000000000000000000000000,
|
||||
# 'tx': '0x74e3aee1e4a92d3fe4e05d8050197c080c51dc0170ac12e8e90dbbe9fb3cc4b5'}
|
||||
|
||||
vault = to_checksum_address(data['account'])
|
||||
if invalid_vault(vault):
|
||||
return
|
||||
order_type = GMXOrderType(data['orderType'])
|
||||
gmx_order_key = data['orderKey']
|
||||
is_long = data['isLong']
|
||||
size_delta = data['sizeDeltaUsd']
|
||||
size = data['sizeInUsd']
|
||||
market = data['market']
|
||||
collateral_token = data['collateralToken']
|
||||
collateral_amount = data['collateralAmount']
|
||||
collateral_delta = data['collateralDeltaAmount']
|
||||
price = data['executionPrice']
|
||||
|
||||
key = GMXPosition.Key(market, collateral_token, is_long)
|
||||
positions = gmx_positions.get(vault)
|
||||
pos = GMXPosition(key.market_token, key.collateral_token, key.is_long)
|
||||
if positions is None:
|
||||
positions = [pos]
|
||||
else:
|
||||
positions = list(positions)
|
||||
if pos in positions:
|
||||
old = [p for p in positions if p==pos][0]
|
||||
positions.remove(old)
|
||||
pos = copy(old)
|
||||
positions.append(pos)
|
||||
buy = is_long == is_increase
|
||||
if buy:
|
||||
if -size_delta < pos.size < 0:
|
||||
log.error(f'GMX short position becoming positive: {pos} + {size_delta}')
|
||||
pos.size += size_delta
|
||||
else:
|
||||
if 0 < pos.size < size_delta:
|
||||
log.error(f'GMX long position becoming negative: {pos} - {size_delta}')
|
||||
pos.size -= size_delta
|
||||
if pos.size != size:
|
||||
log.error(f'GMX position size mismatch: {pos} != {size}')
|
||||
if not pos.size:
|
||||
positions.remove(pos)
|
||||
if not positions:
|
||||
del gmx_positions[vault]
|
||||
else:
|
||||
gmx_positions[vault] = positions
|
||||
|
||||
|
||||
# todo DANNY: if a position is liquidated, should I cancel pending orders in that market?
|
||||
|
||||
|
||||
def handle_positionincrease_event(event: dict, data: dict):
|
||||
handle_position_event(event, data, True)
|
||||
|
||||
def handle_positiondecrease_event(event: dict, data: dict):
|
||||
handle_position_event(event, data, False)
|
||||
|
||||
# def handle_depositcreated_event(event: dict, data: dict):
|
||||
# log.info(f'GMX deposit created {event}')
|
||||
#
|
||||
# def handle_depositexecuted_event(event: dict, data: dict):
|
||||
# log.info(f'GMX deposit executed {event}')
|
||||
#
|
||||
# def handle_withdrawalcreated_event(event: dict, data: dict):
|
||||
# log.info(f'GMX withdrawal created {event}')
|
||||
#
|
||||
# def handle_withdrawalexecuted_event(event: dict, data: dict):
|
||||
# log.info(f'GMX withdrawal executed {event}')
|
||||
|
||||
|
||||
event_handlers = {
|
||||
'OraclePriceUpdate': None,
|
||||
|
||||
'MarketPoolValueInfo': None,
|
||||
'MarketPoolValueUpdated': None,
|
||||
|
||||
'DepositCreated': None,
|
||||
'DepositExecuted': None,
|
||||
'WithdrawalCreated': None,
|
||||
'WithdrawalExecuted': None,
|
||||
|
||||
'OrderCreated': None,
|
||||
'OrderUpdated': None,
|
||||
'OrderCancelled': handle_ordercancelled_event,
|
||||
'OrderExecuted': handle_orderexecuted_event,
|
||||
'OrderSizeDeltaAutoUpdated': None, # ADL?
|
||||
'OrderCollateralDeltaAmountAutoUpdated': None,
|
||||
|
||||
'PositionIncrease': handle_positionincrease_event,
|
||||
'PositionDecrease': handle_positiondecrease_event,
|
||||
'PositionFeesCollected': None,
|
||||
|
||||
'PositionImpactPoolAmountUpdated': None,
|
||||
'PositionImpactPoolDistributed': None,
|
||||
'VirtualPositionInventoryUpdated': None,
|
||||
|
||||
'ClaimableFeeAmountUpdated': None,
|
||||
'ClaimableFundingUpdated': None,
|
||||
'ClaimableFundingAmountPerSizeUpdated': None,
|
||||
'FundingFeeAmountPerSizeUpdated': None,
|
||||
'FundingFeesClaimed': None,
|
||||
|
||||
'CollateralSumUpdated': None,
|
||||
'CollateralClaimed': None,
|
||||
|
||||
'OpenInterestInTokensUpdated': None,
|
||||
'OpenInterestUpdated': None,
|
||||
|
||||
'SetAvailableFeeAmount': None,
|
||||
'BuybackFees': None,
|
||||
'FeesClaimed': None,
|
||||
|
||||
'ExecutionFeeRefundCallback': None,
|
||||
|
||||
'PoolAmountUpdated': None,
|
||||
|
||||
'SwapInfo': None,
|
||||
'SwapFeesCollected': None,
|
||||
'SwapImpactPoolAmountUpdated': None,
|
||||
'VirtualSwapInventoryUpdated': None,
|
||||
|
||||
'CumulativeBorrowingFactorUpdated': None,
|
||||
|
||||
'KeeperExecutionFee': None,
|
||||
'ExecutionFeeRefund': None,
|
||||
|
||||
'SetUint': None,
|
||||
# SetBytes32 presumably and others...
|
||||
'SyncConfig': None,
|
||||
|
||||
'ShiftCreated': None,
|
||||
'ShiftExecuted': None,
|
||||
|
||||
'GlvValueUpdated': None,
|
||||
'GlvDepositCreated': None,
|
||||
'GlvDepositExecuted': None,
|
||||
'GlvWithdrawalCreated': None,
|
||||
'GlvWithdrawalExecuted': None,
|
||||
'GlvShiftCreated': None,
|
||||
'GlvShiftExecuted': None,
|
||||
|
||||
'AffiliateRewardUpdated': None,
|
||||
'AffiliateRewardClaimed': None,
|
||||
|
||||
'SetMaxAllowedSubaccountActionCount': None,
|
||||
'IncrementSubaccountActionCount': None,
|
||||
'SetSubaccountAutoTopUpAmount': None,
|
||||
'SubaccountAutoTopUp': None,
|
||||
|
||||
}
|
||||
|
||||
|
||||
async def handle_gmx_events(events: list[dict]):
|
||||
for event in events:
|
||||
data = parse_event_log_data(event)
|
||||
log.info(f'GMX Event {data}')
|
||||
event_name = data['event']
|
||||
try:
|
||||
func = event_handlers[event_name]
|
||||
except KeyError:
|
||||
log.debug(f'Unknown event {event_name}')
|
||||
else:
|
||||
if func:
|
||||
await maywait(func(event, data))
|
||||
|
||||
|
||||
#
|
||||
# Metadata update triggers
|
||||
# todo These are here because they used to be blockchain event handlers and should be once again...
|
||||
#
|
||||
|
||||
initialized = False
|
||||
|
||||
@periodic(timedelta(hours=1))
|
||||
async def gmx_handle_metadata_update():
|
||||
global initialized
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
await gmx_update_metadata()
|
||||
initialized = True
|
||||
except:
|
||||
if not initialized:
|
||||
raise
|
||||
log.exception('Exception in gmx_handle_metadata_update()')
|
||||
|
||||
|
||||
# @periodic(timedelta(seconds=1))
|
||||
# async def gmx_handle_price_update():
|
||||
# updates = await fetch_price_updates()
|
||||
# # ticker updates have only one price per addr so we can parallelize setting prices
|
||||
# await asyncio.gather(*[update_pool_price(addr, time, price, 30) for addr, time, price in updates])
|
||||
|
||||
|
||||
def create_backfill_handler(ohlcs: FinalOHLCRepository):
|
||||
|
||||
@periodic(timedelta(seconds=1))
|
||||
async def gmx_handle_price_update_with_backfill():
|
||||
updates = await fetch_price_updates()
|
||||
backfill_addrs = [addr for addr, time, price in updates if not ohlcs.has_symbol(addr)]
|
||||
|
||||
if backfill_addrs:
|
||||
log.info(f'Backfilling {len(backfill_addrs)} new GMX tokens')
|
||||
await asyncio.gather(*[backfill_token(ohlcs, a) for a in backfill_addrs])
|
||||
|
||||
for addr, time, price in updates:
|
||||
ohlcs.update(addr, time, price)
|
||||
|
||||
return gmx_handle_price_update_with_backfill
|
||||
|
||||
|
||||
def push_candle(ohlcs, addr, period, candle):
|
||||
time, *prices = candle
|
||||
time = from_timestamp(time)
|
||||
prices = [dec(p) for p in prices]
|
||||
ohlcs.update_ohlc(addr, period, time, *prices)
|
||||
|
||||
|
||||
GMX_OHLC_PERIODS = [
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(hours=1),
|
||||
timedelta(hours=4),
|
||||
timedelta(days=1),
|
||||
]
|
||||
|
||||
async def backfill_token(ohlcs: FinalOHLCRepository, addr: str):
|
||||
token = await get_token(addr)
|
||||
addr = token['address']
|
||||
for period in GMX_OHLC_PERIODS:
|
||||
# Polling a large window is the only history method GMX provides :( It's also how their web client works!
|
||||
symbol = token['symbol']
|
||||
interval = period_name(period).lower()
|
||||
response = gmx_api('prices/candles', tokenSymbol=symbol, period=interval, limit=10_000)
|
||||
if 'error' in response:
|
||||
if not response['error'].startswith('unsupported period'):
|
||||
log.warning(f'Could not query token backfill for {token["symbol"]}: {response["error"]}')
|
||||
else:
|
||||
for c in reversed(response['candles']):
|
||||
push_candle(ohlcs, addr, period, c)
|
||||
log.info(f'Backfilled new GMX token {token["symbol"]}')
|
||||
|
||||
|
||||
@periodic(timedelta(seconds=1))
|
||||
async def gmx_update_prices():
|
||||
for token, time, price in await fetch_price_updates():
|
||||
for market in gmx_markets_by_index_token.get(token, []):
|
||||
info: OldGMXDict = address_metadata[market]['index']
|
||||
decimals = info['decimals']
|
||||
await update_pool_price(market, time, price*dec(10)**decimals, decimals)
|
||||
|
||||
|
||||
async def fetch_price_updates():
|
||||
tokens = list(gmx_markets_by_index_token.keys())
|
||||
prices = await get_dexorder_contract().getGMXPrices(tokens)
|
||||
factor = dec(10)**-30
|
||||
return [
|
||||
(addr, from_timestamp(timestamp), (dec(bid) + dec(ask)) / 2 * factor)
|
||||
for addr, (timestamp, bid, ask) in zip(tokens, prices)
|
||||
]
|
||||
|
||||
async def fetch_price_updates_using_gmx_api():
|
||||
updates = []
|
||||
# todo use on-chain oracle events
|
||||
for t in gmx_api('prices/tickers'):
|
||||
"""
|
||||
{
|
||||
"tokenAddress": "0x3Eea56A1ccCdbfB70A26aD381C71Ee17E4c8A15F",
|
||||
"tokenSymbol": "BOME",
|
||||
"minPrice": "1621019778803375000000",
|
||||
"maxPrice": "1621534421901125000000",
|
||||
"updatedAt": 1749849326251,
|
||||
"timestamp": 1749849325
|
||||
},
|
||||
"""
|
||||
addr = t['tokenAddress']
|
||||
if addr not in address_metadata:
|
||||
continue
|
||||
# GMX prices use 30 decimal places
|
||||
price = (dec(t['minPrice']) + dec(t['maxPrice'])) / 2 * dec(10) ** dec(-30)
|
||||
time = from_timestamp(t['timestamp'])
|
||||
updates.append((addr, time, price))
|
||||
return updates
|
||||
|
||||
93
src/dexorder/gmx/_metadata.py
Normal file
93
src/dexorder/gmx/_metadata.py
Normal file
@@ -0,0 +1,93 @@
|
||||
__all__ = ['gmx_update_metadata']
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from dexorder import ADDRESS_0
|
||||
from dexorder.addrmeta import address_metadata
|
||||
from dexorder.base import OldTokenDict, OldGMXDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.orderlib import Exchange
|
||||
from dexorder.gmx._base import gmx_api, gmx_markets_by_index_token
|
||||
from dexorder.gmx._contract import get_gmx_contract
|
||||
from dexorder.tokens import get_token
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def gmx_update_metadata():
|
||||
log.info('Updating GMX metadata')
|
||||
await gmx_detect_markets()
|
||||
|
||||
|
||||
token_response: Optional[dict] = None
|
||||
|
||||
async def gmx_get_token(addr: str):
|
||||
# The GMX API appears to be the only way to obtain the index token metadata, since there is no corresponding ERC20
|
||||
# on-chain at the synthetic address.
|
||||
found = await get_token(addr, squelch=True) # use our normal lookup first
|
||||
if found is not None:
|
||||
return found
|
||||
global token_response
|
||||
if token_response is None or addr not in token_response['tokens']:
|
||||
token_response = gmx_api('tokens')
|
||||
for info in token_response['tokens']:
|
||||
if info['address'] == addr:
|
||||
synthetic = info.get('synthetic',False)
|
||||
if not synthetic:
|
||||
log.warning('loading non-synthetic token via GMX API')
|
||||
name = f'GMX {info["symbol"]}'
|
||||
if synthetic:
|
||||
name += ' Synthetic'
|
||||
chain_id = current_chain.get().id
|
||||
approved = not re.search(r'deprecated', info['symbol'], re.IGNORECASE)
|
||||
token = OldTokenDict(type='Token', chain=chain_id, address=info['address'], name=name,
|
||||
symbol=info['symbol'], decimals=info['decimals'],
|
||||
approved=approved)
|
||||
address_metadata[info['address']] = token
|
||||
return token
|
||||
log.error(f'Could not find index token {addr} in GMX tokens API')
|
||||
return None
|
||||
|
||||
|
||||
async def gmx_detect_markets():
|
||||
ds = get_gmx_contract('DataStore')
|
||||
reader = get_gmx_contract('Reader')
|
||||
market_info = await reader.getMarkets(ds.address, 0, 1000)
|
||||
markets = [
|
||||
OldGMXDict(type='GMX', chain=current_chain.get().id, exchange=Exchange.GMX.value, address=market_token,
|
||||
index=index_token, long=long_token, short=short_token, decimals=0, leverage=0)
|
||||
for market_token, index_token, long_token, short_token in market_info
|
||||
# discard spot-only markets that do not have an index token
|
||||
# todo support single-asset markets
|
||||
if market_token != ADDRESS_0 and index_token != ADDRESS_0 and
|
||||
long_token != ADDRESS_0 and short_token != ADDRESS_0 and market_token not in address_metadata
|
||||
]
|
||||
market_disabled = await asyncio.gather(*[ds.is_market_disabled(m['address']) for m in markets])
|
||||
new_markets = [m for m,d in zip(markets, market_disabled) if not d and m['address'] not in address_metadata]
|
||||
|
||||
async def init_market(m: OldGMXDict):
|
||||
min_collateral_factor, token = await asyncio.gather(
|
||||
ds.min_collateral_factor(m['address']), gmx_get_token(m['index']))
|
||||
m['decimals'] = token['decimals']
|
||||
m['leverage'] = round(1 / min_collateral_factor)
|
||||
address_metadata[m['address']] = m
|
||||
cur = gmx_markets_by_index_token.get(m['index'])
|
||||
if cur is None:
|
||||
gmx_markets_by_index_token[m['index']] = [m['address']]
|
||||
else:
|
||||
if m['address'] not in cur:
|
||||
gmx_markets_by_index_token[m['index']] = cur + [m['address']]
|
||||
await asyncio.gather(*[init_market(m) for m in new_markets])
|
||||
token_addrs = set(t for m in new_markets for t in (m['address'], m['long'], m['short']))
|
||||
await asyncio.gather(*[get_token(t) for t in token_addrs])
|
||||
|
||||
|
||||
# Log the markets
|
||||
def t(addr):
|
||||
# noinspection PyTypedDict
|
||||
return address_metadata[addr]['symbol'] if addr in address_metadata and address_metadata[addr] else addr
|
||||
for m in new_markets:
|
||||
log.info(f'GMX:{m["address"]} {t(m["index"])}/USD [{t(m["long"])}-{t(m["short"])}] {m["leverage"]}x')
|
||||
44
src/dexorder/marks.py
Normal file
44
src/dexorder/marks.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
"marks" are mark-to-market USD values of a selected set of tokens called quote tokens. Publishing a set of USD marks
|
||||
for the quote tokens allows almost any token to be marked to USD via one hop.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from dexorder import dec, NATIVE_TOKEN, config
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blockstate import BlockDict
|
||||
from dexorder.pools import quotes, mark_to_market
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def pub_marks(_s,k,v):
|
||||
chain_id = current_chain.get().id
|
||||
return str(chain_id), 'marks.usd', (chain_id, k, str(v))
|
||||
|
||||
|
||||
marks: BlockDict[str, dec] = BlockDict('mark.usd', db=False, redis=True, pub=pub_marks, value2str=str)
|
||||
|
||||
class RateLimiter:
|
||||
def __init__(self, rate: float):
|
||||
self.rate = rate
|
||||
self.last_update = 0.0
|
||||
|
||||
def ready(self):
|
||||
now = time.monotonic()
|
||||
if now - self.last_update < self.rate:
|
||||
return False
|
||||
self.last_update = now
|
||||
return True
|
||||
|
||||
mark_publish_rate = RateLimiter(config.mark_publish_seconds)
|
||||
|
||||
def publish_marks():
|
||||
if mark_publish_rate.ready():
|
||||
for token_addr in [NATIVE_TOKEN]+quotes:
|
||||
# overwrite=False checks the previous value and does not generate a diff if the values match. This prevents
|
||||
# excessive updates to Redis
|
||||
value = mark_to_market(token_addr)
|
||||
if value is not None:
|
||||
marks.setitem(token_addr, value, overwrite=False)
|
||||
@@ -10,16 +10,70 @@ from dexorder import config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
BATCH_SIZE = 1_000
|
||||
|
||||
class PipelineProxy:
|
||||
def __init__(self, pipe: Pipeline):
|
||||
self.pipe = pipe
|
||||
self.ops = 0
|
||||
|
||||
async def push(self, num=1):
|
||||
self.ops += num
|
||||
if self.ops >= BATCH_SIZE:
|
||||
self.ops = 0
|
||||
await self.pipe.execute()
|
||||
|
||||
async def sadd(self, series, *keys):
|
||||
while keys:
|
||||
most = min(BATCH_SIZE-self.ops, len(keys))
|
||||
assert most > 0
|
||||
send = keys[:most]
|
||||
keys = keys[most:]
|
||||
await self.pipe.sadd(series, *send)
|
||||
await self.push(len(send))
|
||||
|
||||
async def srem(self, series, *keys):
|
||||
while keys:
|
||||
most = min(BATCH_SIZE-self.ops, len(keys))
|
||||
assert most > 0
|
||||
send = keys[:most]
|
||||
keys = keys[most:]
|
||||
await self.pipe.srem(series, *send)
|
||||
await self.push(len(send))
|
||||
|
||||
async def hset(self, series, *, mapping):
|
||||
items = list(mapping.items())
|
||||
while items:
|
||||
most = min(BATCH_SIZE-self.ops, len(items))
|
||||
assert most > 0
|
||||
send = items[:most]
|
||||
items = items[most:]
|
||||
await self.pipe.hset(series, mapping={k:v for k,v in send})
|
||||
await self.push(len(send))
|
||||
|
||||
async def hdel(self, series, *keys):
|
||||
while keys:
|
||||
most = min(BATCH_SIZE-self.ops, len(keys))
|
||||
assert most > 0
|
||||
send = keys[:most]
|
||||
keys = keys[most:]
|
||||
await self.pipe.hdel(series, *send)
|
||||
await self.push(len(send))
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.pipe, item)
|
||||
|
||||
|
||||
class Memcache:
|
||||
@staticmethod
|
||||
@asynccontextmanager
|
||||
async def batch():
|
||||
async def batch(transaction=True):
|
||||
old_redis: Redis = current_redis.get()
|
||||
pipe: Pipeline = old_redis.pipeline()
|
||||
pipe = old_redis.pipeline(transaction=transaction)
|
||||
# noinspection PyTypeChecker
|
||||
current_redis.set(pipe)
|
||||
try:
|
||||
yield pipe
|
||||
yield PipelineProxy(pipe)
|
||||
await pipe.execute()
|
||||
finally:
|
||||
current_redis.set(old_redis)
|
||||
|
||||
@@ -12,7 +12,7 @@ from dexorder.blockstate.blockdata import SeriesCollection, BlockData
|
||||
from dexorder.blockstate.diff import DiffEntryItem
|
||||
from dexorder.blockstate.fork import Fork
|
||||
from dexorder.blockstate.state import compress_diffs
|
||||
from dexorder.memcache import current_redis, memcache
|
||||
from dexorder.memcache import current_redis, memcache, PipelineProxy
|
||||
from dexorder.util import hexstr
|
||||
from dexorder.util.async_util import maywait
|
||||
from dexorder.util.json import json_encoder
|
||||
@@ -40,11 +40,11 @@ class RedisState (SeriesCollection):
|
||||
for series in self.datas.keys():
|
||||
for k, v in state.iteritems(fork, series):
|
||||
diffs.append(DiffItem(series, k, v))
|
||||
await self.save(fork, diffs)
|
||||
await self.save(fork, diffs, use_transaction=False, skip_pubs=True) # use_transaction=False if the data is too big
|
||||
|
||||
|
||||
# noinspection PyAsyncCall
|
||||
async def save(self, fork: Fork, diffs: Reversible[Union[DiffItem, DiffEntryItem]]):
|
||||
async def save(self, fork: Fork, diffs: Reversible[Union[DiffItem, DiffEntryItem]], *, use_transaction=True, skip_pubs=False):
|
||||
# the diffs must be already compressed such that there is only one action per key
|
||||
chain = current_chain.get()
|
||||
chain_id = chain.id
|
||||
@@ -91,22 +91,23 @@ class RedisState (SeriesCollection):
|
||||
hsets[series][key] = value
|
||||
else:
|
||||
raise NotImplementedError
|
||||
async with memcache.batch() as r:
|
||||
r: Pipeline
|
||||
|
||||
async with memcache.batch(use_transaction) as r:
|
||||
r: PipelineProxy
|
||||
for series, keys in sadds.items():
|
||||
r.sadd(series, *keys)
|
||||
await r.sadd(series, *keys)
|
||||
for series, keys in sdels.items():
|
||||
r.srem(series, *keys)
|
||||
await r.srem(series, *keys)
|
||||
for series, kvs in hsets.items():
|
||||
r.hset(series, mapping=kvs)
|
||||
await r.hset(series, mapping=kvs)
|
||||
for series, keys in hdels.items():
|
||||
r.hdel(series, *keys)
|
||||
await r.hdel(series, *keys)
|
||||
block_series = f'{chain_id}|head'
|
||||
headstr = hexstr(fork.head)
|
||||
r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
|
||||
pubs.append((str(chain_id), 'head', [fork.height, headstr]))
|
||||
# separate batch for pubs
|
||||
if pubs:
|
||||
if pubs and not skip_pubs:
|
||||
await publish_all(pubs)
|
||||
|
||||
|
||||
|
||||
@@ -26,10 +26,10 @@ import sys
|
||||
from typing import Union, Iterable, Optional
|
||||
|
||||
from dexorder import config, NARG
|
||||
from dexorder.base import OldPoolDict, OldTokenDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.database.model import Token, Pool
|
||||
from dexorder.database.model.pool import OldPoolDict, PoolDict
|
||||
from dexorder.database.model.token import OldTokenDict, TokenDict
|
||||
from dexorder.database.model.pool import PoolDict
|
||||
from dexorder.util import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -50,7 +50,6 @@ def dump_tokens(out, tokens, include_unapproved=False):
|
||||
approved_addrs = set()
|
||||
had_output = False
|
||||
for token in tokens:
|
||||
token: Token
|
||||
if isinstance(token, Token):
|
||||
token: Token
|
||||
a = token.address
|
||||
|
||||
@@ -342,11 +342,17 @@ class OHLCRepository:
|
||||
def add_symbol(symbol: str, period: timedelta = None):
|
||||
if period is not None:
|
||||
if (symbol, period) not in recent_ohlcs:
|
||||
recent_ohlcs[(symbol, period)] = [] # setting an empty value will initiate price capture
|
||||
recent_ohlcs[OHLCKey(symbol, period)] = [] # setting an empty value will initiate price capture
|
||||
else:
|
||||
for period in OHLC_PERIODS:
|
||||
if (symbol, period) not in recent_ohlcs:
|
||||
recent_ohlcs[(symbol, period)] = []
|
||||
recent_ohlcs[OHLCKey(symbol, period)] = []
|
||||
|
||||
|
||||
@staticmethod
|
||||
def has_symbol(symbol: str, period: timedelta):
|
||||
return OHLCKey(symbol, period) in recent_ohlcs
|
||||
|
||||
|
||||
async def update_all(self, symbol: str, time: datetime, price: dec, *, create: bool = True):
|
||||
""" the update_all() and update() methods generate bars for the recent_ohlcs BlockDict """
|
||||
@@ -359,45 +365,43 @@ class OHLCRepository:
|
||||
if price is None, then bars are advanced based on the time but no new price is added to the series.
|
||||
"""
|
||||
if OHLC_LIMIT_POOLS_DEBUG is not None and (symbol,period) not in OHLC_LIMIT_POOLS_DEBUG:
|
||||
return
|
||||
return None
|
||||
# logname = f'{symbol} {period_name(period)}'
|
||||
# log.debug(f'Updating OHLC {logname} {minutely(time)} {price}')
|
||||
if price is not None:
|
||||
self.quotes[symbol] = timestamp(time), str(price)
|
||||
key = symbol, period
|
||||
key = OHLCKey(symbol, period)
|
||||
# recent_ohlcs holds a list of "recent" NativeOHLC's stored as blockdata. we try to keep the recent array long
|
||||
# enough to extend prior the root block time
|
||||
historical: Optional[list[NativeOHLC]] = recent_ohlcs.get(key)
|
||||
# log.debug(f'got recent {historical}')
|
||||
if not historical:
|
||||
if create is False or price is None:
|
||||
return # do not track symbols which have not been explicity set up
|
||||
historical = []
|
||||
return None # do not track symbols which have not been explicity set up
|
||||
updated = [NativeOHLC(ohlc_start_time(time, period), price, price, price, price)]
|
||||
# log.debug(f'\tcreated new bars {updated}')
|
||||
else:
|
||||
updated = update_ohlc(historical[-1], period, time, price)
|
||||
# drop any historical bars that are older than we need
|
||||
# oldest_needed = cover the root block time plus one period prior
|
||||
root_branch = current_blockstate.get().root_branch
|
||||
root_hash = root_branch.head
|
||||
if root_hash is not None:
|
||||
root_timestamp = await get_block_timestamp(root_hash)
|
||||
oldest_needed = from_timestamp(root_timestamp) - period
|
||||
# noinspection PyTypeChecker
|
||||
trim = (oldest_needed - historical[0].start) // period
|
||||
if trim > 0:
|
||||
historical = historical[trim:]
|
||||
|
||||
# now overlap the updated data on top of the historical data
|
||||
if not historical or not updated:
|
||||
updated = historical + updated
|
||||
else:
|
||||
# overlap the updated OHLC's on top of the historical ones
|
||||
last_bar = historical[-1].start
|
||||
first_updated = updated[0].start
|
||||
overlap = (first_updated - last_bar) // period + 1
|
||||
updated = historical[:-overlap] + updated if overlap > 0 else historical + updated
|
||||
# log.debug(f'\tnew recents: {updated}')
|
||||
|
||||
# drop any bars that are older than we need
|
||||
# oldest_needed = cover the root block time plus one period prior
|
||||
root_branch = current_blockstate.get().root_branch
|
||||
root_hash = root_branch.head
|
||||
if root_hash is not None:
|
||||
root_timestamp = await get_block_timestamp(root_hash)
|
||||
oldest_needed = from_timestamp(root_timestamp) - period
|
||||
# noinspection PyTypeChecker
|
||||
trim = (oldest_needed - updated[0].start) // period
|
||||
if trim > 0:
|
||||
updated = updated[trim:]
|
||||
|
||||
# if len(updated) > 3:
|
||||
# log.debug(f'\tnew recents ({len(updated)}): {updated}')
|
||||
recent_ohlcs.setitem(key, updated)
|
||||
return updated
|
||||
|
||||
@@ -431,7 +435,7 @@ class OHLCRepository:
|
||||
return found
|
||||
|
||||
def flush(self) -> None:
|
||||
log.debug(f'flushing {len(self.dirty_chunks)} chunks')
|
||||
# log.debug(f'flushing {len(self.dirty_chunks)} chunks')
|
||||
for chunk in self.dirty_chunks:
|
||||
chunk.save()
|
||||
self.dirty_chunks.clear()
|
||||
|
||||
@@ -6,14 +6,17 @@ from uuid import UUID
|
||||
from web3.exceptions import ContractPanicError, ContractLogicError
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import db, metric
|
||||
from dexorder import db, metric, config
|
||||
from dexorder.accounting import accounting_transaction_gas
|
||||
from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_deserializers
|
||||
from dexorder.base.order import TrancheKey, OrderKey
|
||||
from dexorder.base.orderlib import PriceProof
|
||||
from dexorder.base.orderlib import PriceProof, Exchange
|
||||
from dexorder.contract import ContractProxy
|
||||
from dexorder.contract.contract_proxy import ContractTransaction
|
||||
from dexorder.contract.dexorder import get_dexorder_contract
|
||||
from dexorder.database.model.accounting import AccountingSubcategory
|
||||
from dexorder.database.model.transaction import TransactionJob
|
||||
from dexorder.gmx import tk_gmx_in_flight
|
||||
from dexorder.order.orderstate import Order
|
||||
from dexorder.order.triggers import (OrderTriggers,
|
||||
TrancheState, active_tranches, order_error)
|
||||
@@ -68,10 +71,18 @@ class TrancheExecutionHandler (TransactionHandler):
|
||||
def __init__(self):
|
||||
super().__init__('te')
|
||||
|
||||
async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> dict:
|
||||
async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> Optional[ContractTransaction]:
|
||||
tk = req.tranche_key
|
||||
try:
|
||||
return await get_dexorder_contract().build.execute(job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof))
|
||||
kwargs = {}
|
||||
if Order.of(tk).order.route.exchange == Exchange.GMX:
|
||||
if tk_gmx_in_flight.get(tk):
|
||||
return None # a GMX order is already in flight
|
||||
fee = await ContractProxy(req.vault, 'IVaultGMX').gmxExecutionFee(False)
|
||||
kwargs['value'] = round(fee * 1.1) # extra 10% because gas prices can change quickly
|
||||
return await get_dexorder_contract().build.execute(
|
||||
job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof),
|
||||
kwargs=kwargs)
|
||||
except ContractPanicError as x:
|
||||
exception = x
|
||||
errcode = ''
|
||||
@@ -121,6 +132,11 @@ async def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
||||
if trig is not None:
|
||||
trig.touch()
|
||||
|
||||
def delay(secs=None):
|
||||
trig = get_trigger()
|
||||
if trig is not None:
|
||||
trig.deactivate(secs if secs is not None else config.slippage_control_delay)
|
||||
|
||||
if error is None:
|
||||
metric.executions.inc()
|
||||
else:
|
||||
@@ -162,6 +178,7 @@ async def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
||||
retry()
|
||||
elif error == 'RL':
|
||||
log.debug(f'tranche {tk} execution failed due to "RL" rate limit')
|
||||
delay()
|
||||
retry()
|
||||
elif error == 'TE':
|
||||
log.debug(f'tranche {tk} execution failed due to "TE" too early')
|
||||
@@ -227,6 +244,10 @@ async def handle_dexorderexecutions(event: EventData):
|
||||
if job is None:
|
||||
log.warning(f'Job {exe_id} not found!')
|
||||
return
|
||||
# verify that the transaction hash of the event is the same as that of our request
|
||||
if job.tx_id != event['transactionHash']:
|
||||
log.warning(f'Ignoring rogue DexorderExecutions {exe_id} with wrong txid {job.tx_id} != {event["transactionHash"]}')
|
||||
return
|
||||
# noinspection PyTypeChecker
|
||||
req: TrancheExecutionRequest = job.request
|
||||
tk = TrancheKey(req.vault, req.order_index, req.tranche_index)
|
||||
|
||||
@@ -3,13 +3,14 @@ import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import overload
|
||||
|
||||
from dexorder import DELETE, db, order_log
|
||||
from dexorder import DELETE, db, order_log, from_timestamp
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.order import OrderKey, TrancheKey
|
||||
from dexorder.base.orderlib import SwapOrderState, ElaboratedSwapOrderStatus, Fill
|
||||
from dexorder.blockstate import BlockDict, BlockSet
|
||||
from dexorder.database.model.orderindex import OrderIndex
|
||||
from dexorder.routing import pool_address
|
||||
from dexorder.tokens import adjust_decimals
|
||||
from dexorder.util import json
|
||||
from dexorder.vault_blockdata import vault_owners
|
||||
|
||||
@@ -127,7 +128,7 @@ class Order:
|
||||
key = a if b is None else OrderKey(a, b)
|
||||
assert key not in Order.instances
|
||||
self.key = key
|
||||
self.status: ElaboratedSwapOrderStatus = Order.order_statuses[key].copy()
|
||||
self._status: ElaboratedSwapOrderStatus = Order.order_statuses[key].copy()
|
||||
self.pool_address: str = pool_address(self.status.order)
|
||||
self.tranche_keys = [TrancheKey(key.vault, key.order_index, i) for i in range(len(self.status.trancheStatus))]
|
||||
# flattenings of various static data
|
||||
@@ -138,6 +139,14 @@ class Order:
|
||||
self.tranche_amounts = [t.fraction_of(self.amount) for t in self.order.tranches]
|
||||
Order.instances[self.key] = self
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, v):
|
||||
self._status = Order.order_statuses[self.key] = v
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self.status.state
|
||||
@@ -279,6 +288,40 @@ class Order:
|
||||
Order.vault_recently_closed_orders.listremove(key.vault, key.order_index)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return str(self.key)
|
||||
|
||||
|
||||
async def pprint(self):
|
||||
amount_token = self.order.tokenIn if self.order.amountIsInput else self.order.tokenOut
|
||||
msg = f'''
|
||||
SwapOrder {self.key}
|
||||
status: {self.state.name}
|
||||
placed: {from_timestamp(self.status.startTime)}
|
||||
in: {self.order.tokenIn}
|
||||
out: {self.order.tokenOut}
|
||||
exchange: {self.order.route.exchange.name, self.order.route.fee}
|
||||
amount: {"input" if self.order.amountIsInput else "output"} {await adjust_decimals(amount_token, self.filled):f}/{await adjust_decimals(amount_token, self.amount):f}{" to owner" if self.order.outputDirectlyToOwner else ""}
|
||||
minFill: {await adjust_decimals(amount_token, self.min_fill_amount):f}
|
||||
inverted: {self.order.inverted}
|
||||
'''
|
||||
if self.order.gmx:
|
||||
msg += f'''
|
||||
gmx order: {"increase" if self.order.gmx.is_increase else "decrease"} {"long" if self.order.gmx.is_long else "short"}
|
||||
collateral: {self.order.gmx.reserve_amount}
|
||||
'''
|
||||
msg += '''
|
||||
tranches:
|
||||
'''
|
||||
for i in range(len(self.order.tranches)):
|
||||
tranche = self.order.tranches[i]
|
||||
msg += f' {tranche}'
|
||||
filled_amount = self.tranche_filled(i)
|
||||
if filled_amount:
|
||||
msg += f' filled {await adjust_decimals(amount_token, filled_amount)}'
|
||||
msg += '\n'
|
||||
return msg
|
||||
|
||||
# ORDER STATE
|
||||
# various blockstate fields hold different aspects of an order's state.
|
||||
|
||||
@@ -310,8 +353,6 @@ class Order:
|
||||
'of', db=True, redis=True, pub=pub_order_fills,
|
||||
str2key=OrderKey.str2key, value2str=lambda v: json.dumps(v.dump()), str2value=lambda s:OrderFilled.load(json.loads(s)))
|
||||
|
||||
def __str__(self):
|
||||
return str(self.order)
|
||||
|
||||
# "active" means the order wants to be executed now. this is not BlockData because it's cleared every block
|
||||
active_orders: dict[OrderKey,Order] = {}
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Sequence
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
import numpy as np
|
||||
from sortedcontainers import SortedList
|
||||
|
||||
from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, DISTANT_PAST, Line
|
||||
from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, DISTANT_PAST, Line, MIN_SLIPPAGE, \
|
||||
MIN_SLIPPAGE_EPSILON
|
||||
from dexorder.blockstate import BlockDict
|
||||
from .orderstate import Order
|
||||
from .. import dec, order_log, timestamp, from_timestamp, config
|
||||
from .. import dec, order_log, timestamp, config
|
||||
from ..base import OldPoolDict
|
||||
from ..base.chain import current_clock
|
||||
from ..base.order import OrderKey, TrancheKey
|
||||
from ..contract import ERC20
|
||||
from ..database.model.pool import OldPoolDict
|
||||
from ..pools import ensure_pool_price, pool_prices, get_pool
|
||||
from ..routing import pool_address
|
||||
from ..vault_blockdata import vault_balances, adjust_balance
|
||||
@@ -36,7 +37,7 @@ execution should be attempted on the tranche.
|
||||
"""
|
||||
|
||||
|
||||
# tranches which have passed all constraints and should be executed
|
||||
# tranches which have passed all constraints and should be executed. This set gets checked against already in-
|
||||
active_tranches: BlockDict[TrancheKey, Optional[PriceProof]] = BlockDict('at')
|
||||
|
||||
|
||||
@@ -55,13 +56,13 @@ class OrderTriggers:
|
||||
self.order = order
|
||||
self.triggers = triggers
|
||||
OrderTriggers.instances[order.key] = self
|
||||
log.debug(f'created OrderTriggers for {order.key}')
|
||||
# log.debug(f'created OrderTriggers for {order.key}')
|
||||
|
||||
def disable(self):
|
||||
for t in self.triggers:
|
||||
t.disable()
|
||||
del OrderTriggers.instances[self.order.key]
|
||||
log.debug(f'disabled OrderTriggers for {self.order.key}')
|
||||
# log.debug(f'disabled OrderTriggers for {self.order.key}')
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
@@ -71,6 +72,10 @@ class OrderTriggers:
|
||||
def open(self):
|
||||
return not self.closed
|
||||
|
||||
@property
|
||||
def error(self):
|
||||
return any(t.error for t in self.triggers)
|
||||
|
||||
def check_complete(self):
|
||||
if self.closed:
|
||||
final_state = SwapOrderState.Filled if self.order.remaining == 0 or self.order.remaining < self.order.min_fill_amount else SwapOrderState.Expired
|
||||
@@ -99,15 +104,16 @@ def start_trigger_updates():
|
||||
PriceLineTrigger.clear_data()
|
||||
|
||||
|
||||
async def update_balance_triggers(vault: str, token: str, balance: int):
|
||||
async def update_balance_triggers(vault: str, token: str):
|
||||
balance = vault_balances.get(vault, {}).get(token)
|
||||
updates = [bt.update(balance) for bt in BalanceTrigger.by_vault_token.get((vault, token), [])]
|
||||
await asyncio.gather(*updates)
|
||||
|
||||
|
||||
async def update_price_triggers(pool: OldPoolDict, price: dec):
|
||||
price = price * dec(10) ** dec(-pool['decimals']) # adjust for pool decimals to get onchain price
|
||||
def update_price_triggers(addr: str, price: dec, decimals: int):
|
||||
price = price * dec(10) ** dec(-decimals) # adjust for pool decimals to get onchain price
|
||||
price = float(price) # since we use SIMD operations to evaluate lines, we must convert to float
|
||||
for pt in PriceLineTrigger.by_pool.get(pool['address'], []):
|
||||
for pt in PriceLineTrigger.by_pool.get(addr, []):
|
||||
pt.update(price)
|
||||
|
||||
|
||||
@@ -171,6 +177,7 @@ class Trigger:
|
||||
Expiration = 2
|
||||
MinLine = 3
|
||||
MaxLine = 4
|
||||
GMXInFlight = 5
|
||||
|
||||
def __init__(self, trigger_type: TriggerType, tk: TrancheKey, value: bool):
|
||||
"""
|
||||
@@ -204,19 +211,17 @@ class Trigger:
|
||||
|
||||
def _value_changed(self): pass
|
||||
|
||||
|
||||
@abstractmethod
|
||||
def remove(self): ...
|
||||
def remove(self): pass
|
||||
|
||||
|
||||
async def has_funds(tk: TrancheKey):
|
||||
log.debug(f'has funds? {tk.vault}')
|
||||
# log.debug(f'has funds? {tk.vault}')
|
||||
order = Order.of(tk)
|
||||
balances = vault_balances.get(tk.vault, {})
|
||||
log.debug(f'balances {balances}')
|
||||
# log.debug(f'balances {balances}')
|
||||
token_addr = order.status.order.tokenIn
|
||||
token_balance = balances.get(token_addr)
|
||||
log.debug(f'amount of {token_addr} = {token_balance}')
|
||||
# log.debug(f'amount of {token_addr} = {token_balance}')
|
||||
if token_balance is None:
|
||||
# unknown balance
|
||||
token_balance = balances[token_addr] = await ERC20(token_addr).balanceOf(tk.vault)
|
||||
@@ -226,6 +231,7 @@ async def has_funds(tk: TrancheKey):
|
||||
|
||||
|
||||
async def input_amount_is_sufficient(order, token_balance):
|
||||
# todo modify for GMX
|
||||
# log.debug(f'input is sufficient? {order.min_fill_amount}')
|
||||
if order.amount_is_input:
|
||||
# log.debug(f'amount is input: {token_balance} >= {order.min_fill_amount}')
|
||||
@@ -256,11 +262,12 @@ class BalanceTrigger (Trigger):
|
||||
self.order = Order.of(self.tk)
|
||||
self.vault_token = self.tk.vault, self.order.status.order.tokenIn
|
||||
BalanceTrigger.by_vault_token[self.vault_token].add(self)
|
||||
self._value_changed()
|
||||
# log.debug(f'initializing Balance Trigger {id(self)} {tk} {value} {self.value}')
|
||||
|
||||
async def update(self, balance):
|
||||
self.value = await input_amount_is_sufficient(self.order, balance)
|
||||
# log.debug(f'update balance {balance} was sufficient? {self.value}')
|
||||
# log.debug(f'update balance {balance} was sufficient? {self.value} {self.order.key}')
|
||||
|
||||
def remove(self):
|
||||
try:
|
||||
@@ -268,6 +275,17 @@ class BalanceTrigger (Trigger):
|
||||
except (KeyError, ValueError):
|
||||
pass
|
||||
|
||||
def _value_changed(self):
|
||||
ok = self.value
|
||||
order = Order.of(self.tk)
|
||||
old_state = order.status.state
|
||||
if not ok and old_state == SwapOrderState.Open:
|
||||
order.status = order.status.copy()
|
||||
order.status.state = SwapOrderState.Underfunded
|
||||
elif ok and old_state == SwapOrderState.Underfunded:
|
||||
order.status = order.status.copy()
|
||||
order.status.state = SwapOrderState.Open
|
||||
|
||||
|
||||
class TimeTrigger (Trigger):
|
||||
|
||||
@@ -304,8 +322,8 @@ class TimeTrigger (Trigger):
|
||||
if time == self._time:
|
||||
return
|
||||
self._time = time
|
||||
self.remove()
|
||||
self.update_active(time_now)
|
||||
in_future = time_now >= time
|
||||
self.value = in_future is self.is_start
|
||||
|
||||
def update_active(self, time_now: int = None, time: int = None):
|
||||
if time_now is None:
|
||||
@@ -374,7 +392,7 @@ class PriceLineTrigger (Trigger):
|
||||
if inverted:
|
||||
price_now = 1/price_now
|
||||
activated = value_now < price_now if is_min else value_now > price_now
|
||||
log.debug(f'initial price line {value_now} {"<" if is_min else ">"} {price_now} {activated}')
|
||||
# log.debug(f'initial price line {value_now} {"<" if is_min else ">"} {price_now} {activated}')
|
||||
trigger_type = Trigger.TriggerType.MinLine if is_min else Trigger.TriggerType.MaxLine
|
||||
super().__init__(trigger_type, tk, activated)
|
||||
self.inverted = inverted
|
||||
@@ -489,7 +507,8 @@ async def activate_order(order: Order):
|
||||
triggers = await OrderTriggers.create(order)
|
||||
if triggers.closed:
|
||||
log.debug(f'order {order.key} was immediately closed')
|
||||
final_state = SwapOrderState.Filled if order.remaining == 0 or order.remaining < order.min_fill_amount \
|
||||
final_state = SwapOrderState.Error if triggers.error \
|
||||
else SwapOrderState.Filled if order.remaining == 0 or order.remaining < order.min_fill_amount \
|
||||
else SwapOrderState.Expired
|
||||
order.complete(final_state)
|
||||
|
||||
@@ -550,13 +569,14 @@ class TrancheTrigger:
|
||||
|
||||
tranche_remaining = tranche.fraction_of(order.amount) - order.tranche_filled(self.tk.tranche_index)
|
||||
self.status = \
|
||||
TrancheState.Error if self.market_order and self.slippage < MIN_SLIPPAGE - MIN_SLIPPAGE_EPSILON else \
|
||||
TrancheState.Filled if tranche_remaining == 0 or tranche_remaining < self.order.min_fill_amount else \
|
||||
TrancheState.Expired if self.expiration_trigger is not None and not self.expiration_trigger else \
|
||||
TrancheState.Early if self.activation_trigger is None and not self.activation_trigger else \
|
||||
TrancheState.Early if self.activation_trigger is not None and not self.activation_trigger else \
|
||||
TrancheState.Active
|
||||
_dirty.add(tk)
|
||||
TrancheTrigger.all[tk] = self
|
||||
log.debug(f'Tranche {tk} initial status {self.status} {self}')
|
||||
# log.debug(f'Tranche {tk} initial status {self.status} {self}')
|
||||
|
||||
|
||||
@property
|
||||
@@ -572,7 +592,7 @@ class TrancheTrigger:
|
||||
|
||||
|
||||
def fill(self, _amount_in, _amount_out, _next_activation_time ):
|
||||
if _next_activation_time != DISTANT_PAST:
|
||||
if _next_activation_time != 0:
|
||||
# rate limit
|
||||
if self.activation_trigger is None:
|
||||
self.activation_trigger = TimeTrigger(True, self.tk, _next_activation_time, timestamp())
|
||||
@@ -587,7 +607,8 @@ class TrancheTrigger:
|
||||
else:
|
||||
order_log.debug(f'tranche part-filled {self.tk} in:{_amount_in} out:{_amount_out} remaining:{remaining}')
|
||||
if self.market_order:
|
||||
self.expire()
|
||||
order_log.debug(f'tranche {self.tk} delayed {config.slippage_control_delay} seconds due to slippage control')
|
||||
self.deactivate(config.slippage_control_delay)
|
||||
self.slash_count = 0 # reset slash count
|
||||
|
||||
def touch(self):
|
||||
@@ -599,11 +620,11 @@ class TrancheTrigger:
|
||||
self.order_trigger.expire_tranche(self.tk.tranche_index)
|
||||
|
||||
def expire(self):
|
||||
self.disable()
|
||||
if self.closed:
|
||||
return
|
||||
order_log.debug(f'tranche expired {self.tk}')
|
||||
self.status = TrancheState.Expired
|
||||
self.disable()
|
||||
|
||||
def kill(self):
|
||||
order_log.warning(f'tranche KILLED {self.tk}')
|
||||
@@ -619,15 +640,26 @@ class TrancheTrigger:
|
||||
self.kill()
|
||||
else:
|
||||
delay = round(config.slash_delay_base * config.slash_delay_mul ** (self.slash_count-1))
|
||||
self.deactivate(timestamp()+delay)
|
||||
self.deactivate(delay)
|
||||
|
||||
def deactivate(self, until):
|
||||
def deactivate(self, interval: Union[timedelta, int, float]):
|
||||
# todo this timestamp should be consistent with the trigger time which is blockchain
|
||||
now = current_clock.get().timestamp
|
||||
self.deactivate_until(now + (interval.total_seconds() if isinstance(interval, timedelta) else interval))
|
||||
|
||||
def deactivate_until(self, until):
|
||||
# Temporarily deactivate the tranche due to a rate limit. Use disable() to permanently halt the trigger.
|
||||
log.debug(f'deactivating tranche {self.tk} until {from_timestamp(until)}')
|
||||
now = current_clock.get().timestamp
|
||||
if until < now:
|
||||
return
|
||||
if self.activation_trigger is None:
|
||||
self.activation_trigger = TimeTrigger.create(True, self.tk, until)
|
||||
else:
|
||||
self.activation_trigger.time = until
|
||||
self.activation_trigger.time = max(until, self.activation_trigger.time)
|
||||
try:
|
||||
del active_tranches[self.tk]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def disable(self):
|
||||
# permanently stop this trigger and deconstruct
|
||||
@@ -665,6 +697,10 @@ class TrancheTrigger:
|
||||
def open(self):
|
||||
return not self.closed
|
||||
|
||||
@property
|
||||
def error(self):
|
||||
return self.status == TrancheState.Error
|
||||
|
||||
def __str__(self):
|
||||
trigs = []
|
||||
if self.balance_trigger is not None:
|
||||
|
||||
36
src/dexorder/periodic.py
Normal file
36
src/dexorder/periodic.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from datetime import timedelta
|
||||
import time
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
|
||||
def periodic(period: timedelta|float):
|
||||
"""
|
||||
Decorator to allow only one execution of a function or coroutine per period.
|
||||
Works for both sync and async functions.
|
||||
"""
|
||||
def decorator(func):
|
||||
last_called = {'time': 0.}
|
||||
period_seconds = period.total_seconds() if isinstance(period, timedelta) else period
|
||||
|
||||
@wraps(func)
|
||||
def sync_wrapper(*args, **kwargs):
|
||||
now = time.monotonic()
|
||||
if now - last_called['time'] >= period_seconds:
|
||||
last_called['time'] = now
|
||||
return func(*args, **kwargs)
|
||||
return None
|
||||
|
||||
@wraps(func)
|
||||
async def async_wrapper(*args, **kwargs):
|
||||
now = time.monotonic()
|
||||
if now - last_called['time'] >= period_seconds:
|
||||
last_called['time'] = now
|
||||
return await func(*args, **kwargs)
|
||||
return None
|
||||
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
return async_wrapper
|
||||
else:
|
||||
return sync_wrapper
|
||||
|
||||
return decorator
|
||||
@@ -4,18 +4,18 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from web3.exceptions import ContractLogicError
|
||||
from web3.exceptions import ContractLogicError, BadFunctionCallOutput
|
||||
from web3.types import EventData
|
||||
|
||||
from dexorder import dec, ADDRESS_0, from_timestamp, db, config, NATIVE_TOKEN
|
||||
from dexorder.addrmeta import address_metadata
|
||||
from dexorder.base import OldPoolDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.base.orderlib import Exchange
|
||||
from dexorder.blocks import get_block_timestamp
|
||||
from dexorder.blockstate import BlockDict
|
||||
from dexorder.blockstate.blockdata import K, V
|
||||
from dexorder.database.model import Pool
|
||||
from dexorder.database.model.pool import OldPoolDict
|
||||
from dexorder.tokens import get_token, adjust_decimals as adj_dec
|
||||
from dexorder.uniswap import UniswapV3Pool, uniswapV3_pool_address
|
||||
|
||||
@@ -64,7 +64,7 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
|
||||
log.debug(f'new UniswapV3 pool {token0["symbol"]}/{token1["symbol"]} {fee/1_000_000:.2%} '
|
||||
f'{("."+str(decimals)) if decimals >= 0 else (str(-decimals)+".")} {address}')
|
||||
add_mark_pool(address, t0, t1, fee)
|
||||
except ContractLogicError:
|
||||
except (ContractLogicError, BadFunctionCallOutput):
|
||||
pass
|
||||
except ValueError as v:
|
||||
try:
|
||||
@@ -85,8 +85,9 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
|
||||
|
||||
class PoolPrices (BlockDict[str, dec]):
|
||||
def __setitem__(self, item: K, value: V) -> None:
|
||||
super().__setitem__(item, value)
|
||||
new_pool_prices[item] = value
|
||||
old = self.setitem(item, value)
|
||||
if value != old:
|
||||
new_pool_prices[item] = value
|
||||
|
||||
|
||||
def pub_pool_price(_s,k,v):
|
||||
@@ -148,7 +149,7 @@ class MarkPool:
|
||||
|
||||
mark_pools: dict[str, MarkPool] = {}
|
||||
|
||||
quotes = [] # ordered list of preferred quote tokens
|
||||
quotes = [] # ordered list of preferred quote token addresses
|
||||
|
||||
|
||||
def add_mark_pool(addr: str, base: str, quote: str, fee: int):
|
||||
@@ -200,7 +201,7 @@ async def mark_to_market_adj_dec(token: str, amount: dec, adjust_decimals=True)
|
||||
return mark_to_market(token, amount)
|
||||
|
||||
|
||||
def mark_to_market(token: str, amount: dec) -> Optional[dec]:
|
||||
def mark_to_market(token: str, amount: dec = dec(1)) -> Optional[dec]:
|
||||
"""
|
||||
amount must already be adjusted for decimals
|
||||
"""
|
||||
|
||||
@@ -40,8 +40,8 @@ class BlockProgressor(metaclass=ABCMeta):
|
||||
def add_event_trigger(self,
|
||||
# callback takes either a single event if multi=False, or if multi=True then a list of all events in the processing range
|
||||
callback: Union[
|
||||
Callable[[EventData], Maywaitable[None]],
|
||||
Callable[[list[EventData]], Maywaitable[None]],
|
||||
Callable[[EventData|dict], Maywaitable[None]],
|
||||
Callable[[list[EventData|dict]], Maywaitable[None]],
|
||||
Callable[[], Maywaitable[None]],
|
||||
],
|
||||
event: ContractEvents = None,
|
||||
|
||||
@@ -5,7 +5,6 @@ from datetime import timedelta
|
||||
from typing import Any, Iterable, Callable, Optional
|
||||
|
||||
from eth_bloom import BloomFilter
|
||||
# noinspection PyPackageRequirements
|
||||
from websockets.exceptions import ConnectionClosedError
|
||||
|
||||
from dexorder import Blockchain, db, current_pub, async_yield, current_w3, config, now, timestamp, metric
|
||||
@@ -81,8 +80,7 @@ class BlockStateRunner(BlockProgressor):
|
||||
async with w3ws as w3ws:
|
||||
log.debug('connecting to ws provider')
|
||||
await w3ws.provider.connect()
|
||||
subscription = await w3ws.eth.subscribe('newHeads') # the return value of this call is not consistent between anvil/hardhat/rpc.
|
||||
# log.debug(f'subscribed to newHeads {subscription}')
|
||||
await w3ws.eth.subscribe('newHeads') # the return value of this call is not consistent between anvil/hardhat/rpc.
|
||||
while self.running:
|
||||
async for message in w3ws.ws.process_subscriptions():
|
||||
block = Block(chain_id, message['result'])
|
||||
@@ -94,11 +92,15 @@ class BlockStateRunner(BlockProgressor):
|
||||
if not self.running:
|
||||
break
|
||||
await async_yield()
|
||||
except (ConnectionClosedError, TimeoutError, asyncio.TimeoutError) as e:
|
||||
except (TimeoutError, asyncio.TimeoutError) as e:
|
||||
log.debug(f'runner timeout {e}')
|
||||
except ConnectionClosedError as e:
|
||||
log.info(f'websocket connection closed {e}')
|
||||
except ConnectionRefusedError:
|
||||
log.warning(f'Could not connect to websocket {config.ws_url}')
|
||||
await asyncio.sleep(1)
|
||||
except StopAsyncIteration:
|
||||
log.info(f'websocket stream ended')
|
||||
except Exception:
|
||||
log.exception(f'Unhandled exception during run_ws()')
|
||||
finally:
|
||||
@@ -314,7 +316,7 @@ class BlockStateRunner(BlockProgressor):
|
||||
else:
|
||||
lf = dict(log_filter)
|
||||
lf['blockHash'] = hexstr(block.hash)
|
||||
has_logs = any(bytes(hexbytes(topic)) in bloom for topic in lf['topics'])
|
||||
has_logs = 'topics' not in lf or any(bytes(hexbytes(topic)) in bloom for topic in lf['topics'])
|
||||
# log.debug(f'has {event.__class__.__name__}? {has_logs}')
|
||||
if not has_logs:
|
||||
get_logs = None
|
||||
@@ -397,21 +399,21 @@ class BlockStateRunner(BlockProgressor):
|
||||
# propragate to the DB or Redis.
|
||||
# TIME TICKS ARE DISABLED FOR THIS REASON
|
||||
return
|
||||
current_fork.set(fork)
|
||||
session = db.session
|
||||
session.begin()
|
||||
try:
|
||||
for callback, on_timer in self.callbacks:
|
||||
if on_timer:
|
||||
# noinspection PyCallingNonCallable
|
||||
await maywait(callback())
|
||||
except BaseException:
|
||||
session.rollback()
|
||||
raise
|
||||
else:
|
||||
session.commit()
|
||||
finally:
|
||||
db.close_session()
|
||||
# current_fork.set(fork)
|
||||
# session = db.session
|
||||
# session.begin()
|
||||
# try:
|
||||
# for callback, on_timer in self.callbacks:
|
||||
# if on_timer:
|
||||
# # noinspection PyCallingNonCallable
|
||||
# await maywait(callback())
|
||||
# except BaseException:
|
||||
# session.rollback()
|
||||
# raise
|
||||
# else:
|
||||
# session.commit()
|
||||
# finally:
|
||||
# db.close_session()
|
||||
|
||||
|
||||
async def do_state_init_cbs(self):
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
@@ -6,11 +7,11 @@ from web3.exceptions import BadFunctionCallOutput
|
||||
|
||||
from dexorder import ADDRESS_0, db, NATIVE_TOKEN, dec, current_w3
|
||||
from dexorder.addrmeta import address_metadata
|
||||
from dexorder.base import OldTokenDict
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blocks import current_block
|
||||
from dexorder.contract import ERC20, ContractProxy, CONTRACT_ERRORS
|
||||
from dexorder.database.model import Token
|
||||
from dexorder.database.model.token import OldTokenDict
|
||||
from dexorder.metadata import get_metadata
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -45,18 +46,19 @@ async def get_native_balance(addr, *, adjust_decimals=True) -> dec:
|
||||
return value
|
||||
|
||||
|
||||
async def get_token(address) -> Optional[OldTokenDict]:
|
||||
async def get_token(address, *, squelch=False) -> Optional[OldTokenDict]:
|
||||
if address == ADDRESS_0:
|
||||
raise ValueError('No token at address 0')
|
||||
try:
|
||||
# noinspection PyTypeChecker
|
||||
return address_metadata[address]
|
||||
except KeyError:
|
||||
result = address_metadata[address] = await load_token(address)
|
||||
# noinspection PyTypeChecker
|
||||
result = address_metadata[address] = await load_token(address, squelch=squelch)
|
||||
return result
|
||||
|
||||
|
||||
async def load_token(address: str) -> Optional[OldTokenDict]:
|
||||
async def load_token(address: str, *, squelch=False) -> Optional[OldTokenDict]:
|
||||
contract = ERC20(address)
|
||||
chain_id = current_chain.get().id
|
||||
if db:
|
||||
@@ -74,7 +76,8 @@ async def load_token(address: str) -> Optional[OldTokenDict]:
|
||||
try:
|
||||
rb: bytes = await ContractProxy(address, 'ERC20.sb').symbol()
|
||||
except CONTRACT_ERRORS:
|
||||
log.warning(f'token {address} has broken {func_name}()')
|
||||
if not squelch:
|
||||
log.warning(f'token {address} has broken {func_name}()')
|
||||
return None
|
||||
end = rb.find(b'\x00')
|
||||
if end == -1:
|
||||
@@ -82,21 +85,20 @@ async def load_token(address: str) -> Optional[OldTokenDict]:
|
||||
try:
|
||||
return rb[:end].decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
log.warning(f'token {address} has an invalid {func_name}() {rb}')
|
||||
if not squelch:
|
||||
log.warning(f'token {address} has an invalid {func_name}() {rb}')
|
||||
return None
|
||||
|
||||
dec_prom = contract.decimals()
|
||||
symbol_prom = get_string_or_bytes32('symbol')
|
||||
name_prom = get_string_or_bytes32('name')
|
||||
try:
|
||||
decimals = await dec_prom
|
||||
except CONTRACT_ERRORS:
|
||||
log.warning(f'token {address} has no decimals()')
|
||||
decimals = 0
|
||||
if not squelch:
|
||||
log.info(f'token {address} has no decimals()')
|
||||
return None # we do not support coins that don't specify decimals.
|
||||
approved = False # never approve new coins
|
||||
chain_id = current_chain.get().id
|
||||
symbol = await symbol_prom
|
||||
name = await name_prom
|
||||
name, symbol = await asyncio.gather(get_string_or_bytes32('name'), get_string_or_bytes32('symbol'))
|
||||
td = OldTokenDict(type='Token', chain=chain_id, address=address,
|
||||
name=name, symbol=symbol, decimals=decimals, approved=approved)
|
||||
md = get_metadata(address, chain_id=chain_id)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from abc import abstractmethod, ABC
|
||||
from typing import Optional
|
||||
from uuid import uuid4
|
||||
|
||||
@@ -18,7 +18,7 @@ from dexorder.util import hexstr
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TransactionHandler:
|
||||
class TransactionHandler (ABC):
|
||||
instances: dict[str,'TransactionHandler'] = {}
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -37,7 +37,7 @@ def dumps(obj):
|
||||
return dumpb(obj).decode('utf8')
|
||||
|
||||
def dumpb(obj):
|
||||
opts = orjson.OPT_PASSTHROUGH_SUBCLASS
|
||||
opts = orjson.OPT_PASSTHROUGH_SUBCLASS | orjson.OPT_SERIALIZE_DATACLASS
|
||||
return orjson.dumps(obj, default=_serialize, option=opts)
|
||||
|
||||
|
||||
|
||||
@@ -2,12 +2,12 @@ import asyncio
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from dexorder import current_pub
|
||||
from dexorder import current_pub, dec
|
||||
from dexorder.base.chain import current_chain
|
||||
from dexorder.blockstate import BlockDict
|
||||
from dexorder.contract import ERC20, CONTRACT_ERRORS
|
||||
from dexorder.contract.dexorder import VaultContract, vault_address
|
||||
from dexorder.util import json
|
||||
from dexorder.util import json, align_decimal
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -102,3 +102,6 @@ async def refresh_vault_balances(vault, *tokens):
|
||||
result[t] = a
|
||||
return result
|
||||
vault_balances.modify(vault, functools.partial(_adjust, vault, tokens, amounts))
|
||||
|
||||
def pretty_balances(b: dict[str,dec], padding=8) -> str:
|
||||
return '\n'.join(f'{k:>} {align_decimal(v,padding)}' for k,v in b.items())
|
||||
|
||||
Reference in New Issue
Block a user