Compare commits
82 Commits
67ba314d46
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4936150c3b | ||
| 88057607d5 | |||
| 36d0a863c6 | |||
| 89ce46793e | |||
| 2bcf5d043c | |||
| 71942d5b8f | |||
| ef44973646 | |||
| ce55609297 | |||
| a27300b5e4 | |||
| f3faaa3dd6 | |||
| 0bb670b356 | |||
| 52b406ba17 | |||
| 3d0342d19d | |||
| dbf960bae9 | |||
| d49f142fe3 | |||
| 34fa439b3c | |||
| 41a1e2d9fe | |||
| 66229e67bb | |||
| 31b6ddd314 | |||
| 07c6423fd5 | |||
| 4740687167 | |||
| a06eeeb10d | |||
| 4492d23c47 | |||
| 1c0c2f0e63 | |||
| f3bdfdf97b | |||
| be8c8bf019 | |||
| ecf1d21d5f | |||
| b7ed91d1c0 | |||
| 646449e456 | |||
| 1bcf73de22 | |||
| af0f35eba5 | |||
| e868ea5a4b | |||
| c132f40164 | |||
| eccf81c3c8 | |||
| 61ab34a9f7 | |||
| aed6c36746 | |||
| 31a2edd0c6 | |||
| 6c76a9efd7 | |||
| 5ef92caa51 | |||
| 473e0ec516 | |||
| 979f31dfe0 | |||
| afb1ee49a4 | |||
| 8b541bd76d | |||
| 04d7686c30 | |||
| 603dd64dc4 | |||
| 920109ba27 | |||
| 67ab504a40 | |||
| f2e7749c7b | |||
| c9245615cb | |||
| 7d929db304 | |||
| 56e7c32705 | |||
| 14b19dcc78 | |||
| f2eb4ea96b | |||
| 58b17f21a6 | |||
| d942666e16 | |||
| 44d1c4a920 | |||
| 9dbc7e0378 | |||
| 3c7d7f5d57 | |||
| b18eeb5069 | |||
| 91973304e2 | |||
| adba28db32 | |||
| 90d6440c5a | |||
| 08e421712a | |||
| 7f501222f8 | |||
| 5bcbae1d94 | |||
| 416cff80b0 | |||
| b22c044028 | |||
| d838412b2b | |||
| e95acda52d | |||
| 3bec4b8611 | |||
| cda2446c0e | |||
| adebbb833c | |||
| 48fdfeeb3f | |||
| bc6a196bfa | |||
| 3b2c58671b | |||
| b133999314 | |||
| 39be05adaa | |||
| 51852c1250 | |||
| 40d8d44676 | |||
| 2a95dd26df | |||
| 6844f73e4b | |||
| 829ec58f8f |
123
alembic/versions/509010f13e8b_accounting_vaultcreation_ofac.py
Normal file
123
alembic/versions/509010f13e8b_accounting_vaultcreation_ofac.py
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
"""accounting, vaultcreation, ofac
|
||||||
|
|
||||||
|
Revision ID: 509010f13e8b
|
||||||
|
Revises: 86afa7b6415d
|
||||||
|
Create Date: 2025-01-03 19:11:22.073682
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
import dexorder.database
|
||||||
|
import dexorder.database.column_types
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '509010f13e8b'
|
||||||
|
down_revision: Union[str, None] = '86afa7b6415d'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.execute("ALTER TYPE transactionjobstate ADD VALUE 'Declined'")
|
||||||
|
op.create_table('accounting',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('account', sa.String(), nullable=False),
|
||||||
|
sa.Column('category', sa.Enum('Transfer', 'Income', 'Expense', 'Trade', 'Special', name='accountingcategory'), nullable=False),
|
||||||
|
sa.Column('subcategory', sa.Enum('OrderFee', 'GasFee', 'FillFee', 'Admin', 'TransactionGas', 'VaultCreation', 'Execution', 'FeeAdjustment', 'InitialBalance', name='accountingsubcategory'), nullable=True),
|
||||||
|
sa.Column('token', sa.String(), nullable=False),
|
||||||
|
sa.Column('amount', dexorder.database.column_types.DecimalNumeric(), nullable=False),
|
||||||
|
sa.Column('value', dexorder.database.column_types.DecimalNumeric(), nullable=True),
|
||||||
|
sa.Column('chain_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('tx_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('note', sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_accounting_category'), 'accounting', ['category'], unique=False)
|
||||||
|
op.create_index(op.f('ix_accounting_subcategory'), 'accounting', ['subcategory'], unique=False)
|
||||||
|
op.create_index(op.f('ix_accounting_time'), 'accounting', ['time'], unique=False)
|
||||||
|
op.create_index(op.f('ix_accounting_token'), 'accounting', ['token'], unique=False)
|
||||||
|
op.create_index(op.f('ix_accounting_account'), 'accounting', ['account'], unique=False)
|
||||||
|
op.create_index(op.f('ix_accounting_chain_id'), 'accounting', ['chain_id'], unique=False)
|
||||||
|
op.create_table('ofac',
|
||||||
|
sa.Column('address', sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('address')
|
||||||
|
)
|
||||||
|
op.create_table('ofacalerts',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('address', sa.String(), nullable=False),
|
||||||
|
sa.Column('ip', sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_table('vaultcreationrequest',
|
||||||
|
sa.Column('chain', dexorder.database.column_types.Blockchain(), nullable=False),
|
||||||
|
sa.Column('owner', sa.String(), nullable=False),
|
||||||
|
sa.Column('num', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('ipaddr', postgresql.INET(), nullable=False),
|
||||||
|
sa.Column('vault', sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('chain', 'owner', 'num')
|
||||||
|
)
|
||||||
|
op.create_index('ix_vault_address_not_null', 'vaultcreationrequest', ['vault'], unique=False, postgresql_where='vault IS NOT NULL')
|
||||||
|
op.create_table('account',
|
||||||
|
sa.Column('chain', dexorder.database.column_types.Blockchain(), nullable=False),
|
||||||
|
sa.Column('address', sa.String(), nullable=False),
|
||||||
|
sa.Column('kind', sa.Enum('Admin', 'OrderFee', 'GasFee', 'FillFee', 'Execution', name='accountkind'), nullable=False),
|
||||||
|
sa.Column('balances', dexorder.database.column_types.Balances(astext_type=sa.Text()), server_default='{}', nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('chain', 'address')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_account_kind'), 'account', ['kind'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('reconciliation',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('chain', dexorder.database.column_types.Blockchain(), nullable=False),
|
||||||
|
sa.Column('address', sa.String(), nullable=False),
|
||||||
|
sa.Column('accounting_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('height', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('balances', dexorder.database.column_types.Balances(astext_type=sa.Text()), server_default='{}', nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['chain', 'address'], ['account.chain', 'account.address'], ondelete='CASCADE'),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_reconciliation_accounting_id'), 'reconciliation', ['accounting_id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_reconciliation_address'), 'reconciliation', ['address'], unique=False)
|
||||||
|
op.create_index(op.f('ix_reconciliation_chain'), 'reconciliation', ['chain'], unique=False)
|
||||||
|
op.create_index(op.f('ix_reconciliation_height'), 'reconciliation', ['height'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('tosacceptance',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('ipaddr', sa.String(), nullable=False),
|
||||||
|
sa.Column('time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('version', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table('tosacceptance')
|
||||||
|
op.drop_index(op.f('ix_reconciliation_height'), table_name='reconciliation')
|
||||||
|
op.drop_index(op.f('ix_reconciliation_chain'), table_name='reconciliation')
|
||||||
|
op.drop_index(op.f('ix_reconciliation_address'), table_name='reconciliation')
|
||||||
|
op.drop_index(op.f('ix_reconciliation_accounting_id'), table_name='reconciliation')
|
||||||
|
op.drop_table('reconciliation')
|
||||||
|
op.drop_index(op.f('ix_account_kind'), table_name='account')
|
||||||
|
op.drop_table('account')
|
||||||
|
op.drop_index('ix_vault_address_not_null', table_name='vaultcreationrequest', postgresql_where='vault IS NOT NULL')
|
||||||
|
op.drop_table('vaultcreationrequest')
|
||||||
|
op.drop_table('ofacalerts')
|
||||||
|
op.drop_table('ofac')
|
||||||
|
op.drop_index(op.f('ix_accounting_chain_id'), table_name='accounting')
|
||||||
|
op.drop_index(op.f('ix_accounting_account'), table_name='accounting')
|
||||||
|
op.drop_index(op.f('ix_accounting_token'), table_name='accounting')
|
||||||
|
op.drop_index(op.f('ix_accounting_time'), table_name='accounting')
|
||||||
|
op.drop_index(op.f('ix_accounting_subcategory'), table_name='accounting')
|
||||||
|
op.drop_index(op.f('ix_accounting_category'), table_name='accounting')
|
||||||
|
op.drop_table('accounting')
|
||||||
|
op.execute('drop type accountkind') # enum type
|
||||||
|
op.execute('drop type accountingcategory') # enum type
|
||||||
|
op.execute('drop type accountingsubcategory') # enum type
|
||||||
30
alembic/versions/e47d1bca4b3d_sharedata.py
Normal file
30
alembic/versions/e47d1bca4b3d_sharedata.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""sharedata
|
||||||
|
|
||||||
|
Revision ID: e47d1bca4b3d
|
||||||
|
Revises: 509010f13e8b
|
||||||
|
Create Date: 2025-04-23 11:23:10.809341
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'e47d1bca4b3d'
|
||||||
|
down_revision: Union[str, None] = '509010f13e8b'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table('sharedata',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table('sharedata')
|
||||||
15
bin/examine
Executable file
15
bin/examine
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
kubectl port-forward postgres-0 5431:5432 &
|
||||||
|
PF_PID=$!
|
||||||
|
|
||||||
|
shutdown () {
|
||||||
|
kill $PF_PID
|
||||||
|
wait
|
||||||
|
}
|
||||||
|
|
||||||
|
trap shutdown INT TERM
|
||||||
|
|
||||||
|
PYTHONPATH=src python -m dexorder.bin.examine rpc_url=arbitrum_dxod db_url=postgres://dexorder@localhost:5431/dexorder "$@"
|
||||||
|
|
||||||
|
shutdown
|
||||||
@@ -1,5 +1,20 @@
|
|||||||
|
rpc_url = 'arbitrum_dxod'
|
||||||
|
ws_url = 'arbitrum_dxod_ws'
|
||||||
|
archive_url = 'arbitrum_alchemy'
|
||||||
|
concurrent_rpc_connections=8
|
||||||
|
metrics_port=9090
|
||||||
metadata = '' # this setting approves no tokens
|
metadata = '' # this setting approves no tokens
|
||||||
account = '${accounts.gas}'
|
stablecoins = [
|
||||||
rpc_url = '${rpc_urls.arbitrum_alchemy}'
|
# in order of preference
|
||||||
ws_url = '${rpc_urls.arbitrum_alchemy_ws}'
|
'0xaf88d065e77c8cC2239327C5EDb3A432268e5831', # USDC
|
||||||
concurrent_rpc_connections=16
|
'0xFd086bC7CD5C481DCC9C85ebE478A1C0b69FCbb9', # USDT
|
||||||
|
'0xFF970A61A04b1cA14834A43f5dE4533eBDDB5CC8', # USDC.e
|
||||||
|
]
|
||||||
|
|
||||||
|
quotecoins = [
|
||||||
|
# in order of preference
|
||||||
|
'0x2f2a2543B76A4166549F7aaB2e75Bef0aefC5B0f', # WBTC
|
||||||
|
'0x82aF49447D8a07e3bd95BD0d56f35241523fBab1', # WETH
|
||||||
|
]
|
||||||
|
|
||||||
|
nativecoin = '0x82aF49447D8a07e3bd95BD0d56f35241523fBab1' # WETH
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,8 @@
|
|||||||
rpc_url = '${rpc_urls.finaldata}'
|
rpc_url = 'arbitrum_dxod'
|
||||||
|
archive_url = 'finaldata'
|
||||||
ws_url = ''
|
ws_url = ''
|
||||||
redis_url = ''
|
redis_url = ''
|
||||||
ohlc_dir = '/ohlc'
|
ohlc_dir = '/ohlc'
|
||||||
walker_flush_interval=25
|
walker_flush_interval=25
|
||||||
concurrent_rpc_connections=9999
|
concurrent_rpc_connections=9999
|
||||||
|
pagerduty='' # disable
|
||||||
|
|||||||
@@ -10,9 +10,15 @@ level='DEBUG'
|
|||||||
|
|
||||||
[handlers.console]
|
[handlers.console]
|
||||||
class='logging.StreamHandler'
|
class='logging.StreamHandler'
|
||||||
formatter='notime'
|
formatter='default'
|
||||||
stream='ext://sys.stdout'
|
stream='ext://sys.stdout'
|
||||||
|
|
||||||
|
[formatters.default]
|
||||||
|
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||||
|
format='%(asctime)s %(levelname)s %(name)s %(message)s'
|
||||||
|
# https://docs.python.org/3/library/time.html#time.strftime
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
[formatters.notime]
|
[formatters.notime]
|
||||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||||
format='%(levelname)s %(name)s %(message)s'
|
format='%(levelname)s %(name)s %(message)s'
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
metadata='metadata.json' # the Dockerfile will move metadata-finaldata.json into positon
|
metadata='metadata.json' # the Dockerfile will move metadata-finaldata.json into positon
|
||||||
account = '${accounts.admin}' # todo switch back to accounts.gas
|
accounts = [
|
||||||
|
# dev account #4
|
||||||
|
'0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6', # 0x90F79bf6EB2c4f870365E785982E1f101E93b906
|
||||||
|
]
|
||||||
rpc_url = '${rpc_urls.arbsep_alchemy}'
|
rpc_url = '${rpc_urls.arbsep_alchemy}'
|
||||||
mirror_source_rpc_url='${rpc_urls.arbsep_alchemy}'
|
mirror_source_rpc_url='${rpc_urls.arbsep_alchemy}'
|
||||||
|
|
||||||
|
|||||||
@@ -10,9 +10,15 @@ level='DEBUG'
|
|||||||
|
|
||||||
[handlers.console]
|
[handlers.console]
|
||||||
class='logging.StreamHandler'
|
class='logging.StreamHandler'
|
||||||
formatter='notime'
|
formatter='default'
|
||||||
stream='ext://sys.stdout'
|
stream='ext://sys.stdout'
|
||||||
|
|
||||||
|
[formatters.default]
|
||||||
|
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||||
|
format='%(asctime)s %(levelname)s %(name)s %(message)s'
|
||||||
|
# https://docs.python.org/3/library/time.html#time.strftime
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
[formatters.notime]
|
[formatters.notime]
|
||||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||||
format='%(levelname)s %(name)s %(message)s'
|
format='%(levelname)s %(name)s %(message)s'
|
||||||
|
|||||||
@@ -1,6 +1,18 @@
|
|||||||
[accounts]
|
|
||||||
|
|
||||||
[rpc_urls]
|
[rpc_urls]
|
||||||
local='http://localhost:8545'
|
local='http://localhost:8545'
|
||||||
local_ws='ws://localhost:8545'
|
local_ws='ws://localhost:8545'
|
||||||
arbitrum_alchemy='https://arb-mainnet.g.alchemy.com/v2/opbIf1mSo9GMXLhA1a4nhwtEzscgGdhW'
|
arbitrum_alchemy='https://arb-mainnet.g.alchemy.com/v2/opbIf1mSo9GMXLhA1a4nhwtEzscgGdhW'
|
||||||
|
|
||||||
|
accounts = [
|
||||||
|
# these are the first ten dev accounts in anvil/hardhat
|
||||||
|
# '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80', # 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
|
||||||
|
# '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d', # 0x70997970C51812dc3A010C7d01b50e0d17dc79C8
|
||||||
|
# '0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a', # 0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC
|
||||||
|
# '0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6', # 0x90F79bf6EB2c4f870365E785982E1f101E93b906
|
||||||
|
# '0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a', # 0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65
|
||||||
|
'0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba', # 0x9965507D1a55bcC2695C58ba16FB37d819B0A4dc
|
||||||
|
'0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e', # 0x976EA74026E726554dB657fA54763abd0C3a0aa9
|
||||||
|
# '0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356', # 0x14dC79964da2C08b23698B3D3cc7Ca32193d9955
|
||||||
|
# '0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97', # 0x23618e81E3f5cdF7f54C3d65f7FBc0aBf5B21E8f
|
||||||
|
# '0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6', # 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
account='test1'
|
|
||||||
rpc_url='local'
|
rpc_url='local'
|
||||||
ws_url='local_ws'
|
ws_url='local_ws'
|
||||||
|
|||||||
@@ -1,20 +1,24 @@
|
|||||||
aiohttp==3.9.5
|
aiohappyeyeballs==2.4.3
|
||||||
|
aiohttp==3.11.13
|
||||||
aiosignal==1.3.1
|
aiosignal==1.3.1
|
||||||
alembic==1.13.3
|
alembic==1.15.1
|
||||||
annotated-types==0.7.0
|
annotated-types==0.7.0
|
||||||
antlr4-python3-runtime==4.9.3
|
antlr4-python3-runtime==4.9.3
|
||||||
asn1crypto==1.5.1
|
asn1crypto==1.5.1
|
||||||
async-lru==2.0.4
|
async-lru==2.0.4
|
||||||
attrs==23.2.0
|
attrs==23.2.0
|
||||||
bip-utils==2.9.3
|
bip-utils==2.9.3
|
||||||
bitarray==2.9.2
|
bitarray==3.1.1
|
||||||
cachetools==5.5.0
|
cachetools==5.5.2
|
||||||
|
cattrs==24.1.2
|
||||||
cbor2==5.6.4
|
cbor2==5.6.4
|
||||||
certifi==2024.2.2
|
certifi==2024.2.2
|
||||||
cffi==1.16.0
|
cffi==1.16.0
|
||||||
charset-normalizer==3.3.2
|
charset-normalizer==3.4.1
|
||||||
ckzg==1.0.2
|
ckzg==1.0.2
|
||||||
|
click==8.1.8
|
||||||
coincurve==20.0.0
|
coincurve==20.0.0
|
||||||
|
coremltools==8.2
|
||||||
crcmod==1.7
|
crcmod==1.7
|
||||||
cytoolz==0.12.3
|
cytoolz==0.12.3
|
||||||
defaultlist==1.0.0
|
defaultlist==1.0.0
|
||||||
@@ -22,64 +26,115 @@ durationpy==0.9
|
|||||||
ecdsa==0.19.0
|
ecdsa==0.19.0
|
||||||
ed25519-blake2b==1.4.1
|
ed25519-blake2b==1.4.1
|
||||||
eth-account==0.11.3
|
eth-account==0.11.3
|
||||||
eth-bloom==3.0.1
|
eth-bloom==3.1.0
|
||||||
eth-hash==0.7.0
|
eth-hash==0.7.0
|
||||||
eth-keyfile==0.8.1
|
eth-keyfile==0.8.1
|
||||||
eth-keys==0.5.1
|
eth-keys==0.6.1
|
||||||
eth-rlp==1.0.1
|
eth-rlp==1.0.1
|
||||||
eth-typing==4.4.0
|
eth-typing==4.4.0
|
||||||
eth-utils==4.1.1
|
eth-utils==4.1.1
|
||||||
eth_abi==5.1.0
|
eth_abi==5.2.0
|
||||||
|
filelock==3.17.0
|
||||||
frozenlist==1.4.1
|
frozenlist==1.4.1
|
||||||
|
fsspec==2025.2.0
|
||||||
google-auth==2.35.0
|
google-auth==2.35.0
|
||||||
greenlet==3.0.3
|
greenlet==3.0.3
|
||||||
hexbytes==0.3.1
|
hexbytes==0.3.1
|
||||||
hiredis==3.0.0
|
hiredis==3.0.0
|
||||||
idna==3.7
|
idna==3.7
|
||||||
|
imageio==2.37.0
|
||||||
|
importlib_resources==6.5.2
|
||||||
|
Jinja2==3.1.6
|
||||||
|
joblib==1.4.2
|
||||||
jsonschema==4.21.1
|
jsonschema==4.21.1
|
||||||
jsonschema-specifications==2023.12.1
|
jsonschema-specifications==2023.12.1
|
||||||
|
kraken==5.3.0
|
||||||
kubernetes==31.0.0
|
kubernetes==31.0.0
|
||||||
|
lazy_loader==0.4
|
||||||
|
lightning==2.4.0
|
||||||
|
lightning-utilities==0.14.0
|
||||||
lru-dict==1.2.0
|
lru-dict==1.2.0
|
||||||
|
lxml==5.3.1
|
||||||
Mako==1.3.3
|
Mako==1.3.3
|
||||||
|
markdown-it-py==3.0.0
|
||||||
MarkupSafe==2.1.5
|
MarkupSafe==2.1.5
|
||||||
|
mdurl==0.1.2
|
||||||
|
mpmath==1.3.0
|
||||||
msgpack-python==0.5.6
|
msgpack-python==0.5.6
|
||||||
multidict==6.0.5
|
multidict==6.0.5
|
||||||
numpy==2.1.2
|
networkx==3.4.2
|
||||||
|
numpy==2.0.2
|
||||||
|
nvidia-cublas-cu12==12.1.3.1
|
||||||
|
nvidia-cuda-cupti-cu12==12.1.105
|
||||||
|
nvidia-cuda-nvrtc-cu12==12.1.105
|
||||||
|
nvidia-cuda-runtime-cu12==12.1.105
|
||||||
|
nvidia-cudnn-cu12==9.1.0.70
|
||||||
|
nvidia-cufft-cu12==11.0.2.54
|
||||||
|
nvidia-curand-cu12==10.3.2.106
|
||||||
|
nvidia-cusolver-cu12==11.4.5.107
|
||||||
|
nvidia-cusparse-cu12==12.1.0.106
|
||||||
|
nvidia-nccl-cu12==2.20.5
|
||||||
|
nvidia-nvjitlink-cu12==12.8.93
|
||||||
|
nvidia-nvtx-cu12==12.1.105
|
||||||
oauthlib==3.2.2
|
oauthlib==3.2.2
|
||||||
omegaconf==2.3.0
|
omegaconf==2.3.0
|
||||||
orjson==3.10.7
|
orjson==3.10.15
|
||||||
|
packaging==24.2
|
||||||
|
pagerduty==1.0.0
|
||||||
parsimonious==0.10.0
|
parsimonious==0.10.0
|
||||||
pdpyras==5.3.0
|
pillow==11.1.0
|
||||||
|
prometheus_client==0.21.1
|
||||||
|
propcache==0.2.0
|
||||||
protobuf==5.26.1
|
protobuf==5.26.1
|
||||||
psycopg2-binary==2.9.9
|
psycopg2-binary==2.9.10
|
||||||
py-sr25519-bindings==0.2.0
|
py-sr25519-bindings==0.2.0
|
||||||
|
pyaml==25.1.0
|
||||||
|
pyarrow==19.0.1
|
||||||
pyasn1==0.6.1
|
pyasn1==0.6.1
|
||||||
pyasn1_modules==0.4.1
|
pyasn1_modules==0.4.1
|
||||||
pycparser==2.22
|
pycparser==2.22
|
||||||
pycryptodome==3.20.0
|
pycryptodome==3.20.0
|
||||||
pydantic==2.9.2
|
pydantic==2.9.2
|
||||||
pydantic_core==2.23.4
|
pydantic_core==2.23.4
|
||||||
|
Pygments==2.19.1
|
||||||
PyNaCl==1.5.0
|
PyNaCl==1.5.0
|
||||||
|
python-bidi==0.6.6
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
|
pytorch-lightning==2.5.0.post0
|
||||||
|
pytz==2025.1
|
||||||
pyunormalize==15.1.0
|
pyunormalize==15.1.0
|
||||||
PyYAML==6.0.1
|
PyYAML==6.0.1
|
||||||
redis==5.1.1
|
redis==5.2.1
|
||||||
referencing==0.35.0
|
referencing==0.35.0
|
||||||
regex==2024.4.28
|
regex==2024.4.28
|
||||||
requests==2.31.0
|
requests==2.32.3
|
||||||
requests-oauthlib==2.0.0
|
requests-oauthlib==2.0.0
|
||||||
|
rich==13.9.4
|
||||||
rlp==4.0.1
|
rlp==4.0.1
|
||||||
rpds-py==0.18.0
|
rpds-py==0.18.0
|
||||||
rsa==4.9
|
rsa==4.9
|
||||||
|
scikit-image==0.24.0
|
||||||
|
scikit-learn==1.5.2
|
||||||
|
scipy==1.13.1
|
||||||
|
setuptools==75.8.2
|
||||||
|
shapely==2.0.7
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
socket.io-emitter==0.1.5.1
|
socket.io-emitter==0.1.5.1
|
||||||
sortedcontainers==2.4.0
|
sortedcontainers==2.4.0
|
||||||
SQLAlchemy==2.0.35
|
SQLAlchemy==2.0.38
|
||||||
|
sympy==1.13.3
|
||||||
|
threadpoolctl==3.5.0
|
||||||
|
tifffile==2025.2.18
|
||||||
toolz==0.12.1
|
toolz==0.12.1
|
||||||
|
torch==2.4.1
|
||||||
|
torchmetrics==1.6.2
|
||||||
|
torchvision==0.19.1
|
||||||
|
tqdm==4.67.1
|
||||||
|
triton==3.0.0
|
||||||
types-requests==2.32.0.20240914
|
types-requests==2.32.0.20240914
|
||||||
typing_extensions==4.11.0
|
typing_extensions==4.12.2
|
||||||
urllib3==2.2.1
|
urllib3==2.2.1
|
||||||
web3==6.20.3
|
web3==6.20.4
|
||||||
websocket-client==1.8.0
|
websocket-client==1.8.0
|
||||||
websockets==13.1
|
websockets==13.1
|
||||||
yarl==1.9.4
|
yarl==1.17.2
|
||||||
|
|||||||
@@ -16,5 +16,18 @@ eth-bloom
|
|||||||
python-dateutil
|
python-dateutil
|
||||||
eth_abi
|
eth_abi
|
||||||
eth_utils
|
eth_utils
|
||||||
pdpyras # pagerduty
|
eth_typing
|
||||||
|
eth-keys
|
||||||
|
eth-account
|
||||||
|
eth-utils
|
||||||
|
eth-typing
|
||||||
|
pagerduty
|
||||||
numpy
|
numpy
|
||||||
|
bitarray
|
||||||
|
typing_extensions
|
||||||
|
requests
|
||||||
|
aiohttp
|
||||||
|
charset-normalizer
|
||||||
|
pytz
|
||||||
|
prometheus_client
|
||||||
|
krakenex
|
||||||
|
|||||||
@@ -3,13 +3,14 @@ import logging
|
|||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from typing import Callable, Any
|
from typing import Callable, Any, Union, Optional
|
||||||
|
|
||||||
from web3 import AsyncWeb3
|
from web3 import AsyncWeb3
|
||||||
|
|
||||||
order_log = logging.getLogger('dexorder.order.log')
|
order_log = logging.getLogger('dexorder.order.log')
|
||||||
|
|
||||||
dec = Decimal
|
dec = Decimal
|
||||||
|
|
||||||
def now():
|
def now():
|
||||||
return datetime.now(timezone.utc)
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
@@ -34,13 +35,16 @@ class _Token:
|
|||||||
def __repr__(self): return self.__token_name
|
def __repr__(self): return self.__token_name
|
||||||
def __str__(self): return self.__token_name
|
def __str__(self): return self.__token_name
|
||||||
|
|
||||||
class _FalseToken (_Token):
|
class _FalseyToken (_Token):
|
||||||
def __bool__(self): return False
|
def __bool__(self): return False
|
||||||
|
|
||||||
|
|
||||||
NARG = _FalseToken('NARG')
|
NARG = _FalseyToken('NARG')
|
||||||
DELETE = _FalseToken('DELETE') # used as a value token to indicate removal of the key
|
DELETE = _FalseyToken('DELETE') # used as a value token to indicate removal of the key
|
||||||
ADDRESS_0 = '0x0000000000000000000000000000000000000000'
|
ADDRESS_0 = '0x0000000000000000000000000000000000000000'
|
||||||
|
NATIVE_TOKEN = '0x0000000000000000000000000000000000000001' # We use 0x01 to indicate the use of native ETH wherever a token address is normally required
|
||||||
|
USD_FIAT = '0x0000000000000000000000000000000000000055' # We use 0x55 (ASCII 'U') to indicate the use of fiat USD
|
||||||
|
CHAIN_ID_OFFCHAIN = -1
|
||||||
WEI = 1
|
WEI = 1
|
||||||
GWEI = 1_000_000_000
|
GWEI = 1_000_000_000
|
||||||
ETH = 1_000_000_000_000_000_000
|
ETH = 1_000_000_000_000_000_000
|
||||||
@@ -55,7 +59,7 @@ _cwd() # do this first so that config has the right current working directory
|
|||||||
|
|
||||||
# ordering here is important!
|
# ordering here is important!
|
||||||
from .base.chain import Blockchain # the singletons are loaded into the dexorder.blockchain.* namespace
|
from .base.chain import Blockchain # the singletons are loaded into the dexorder.blockchain.* namespace
|
||||||
from .util import async_yield
|
from .util import async_yield, json
|
||||||
from .base.fixed import Fixed2, FixedDecimals, Dec18
|
from .base.fixed import Fixed2, FixedDecimals, Dec18
|
||||||
from .configuration import config
|
from .configuration import config
|
||||||
from .base.account import Account
|
from .base.account import Account
|
||||||
|
|||||||
1
src/dexorder/accounting/__init__.py
Normal file
1
src/dexorder/accounting/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
from .accounting import *
|
||||||
321
src/dexorder/accounting/accounting.py
Normal file
321
src/dexorder/accounting/accounting.py
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
from sqlalchemy import select, func, text
|
||||||
|
from typing_extensions import Optional
|
||||||
|
from web3.exceptions import ContractLogicError
|
||||||
|
from web3.types import EventData
|
||||||
|
|
||||||
|
from dexorder import db, dec, NATIVE_TOKEN, from_timestamp, config, ADDRESS_0, now, Account, metric
|
||||||
|
from dexorder.base import TransactionReceiptDict
|
||||||
|
from dexorder.base.chain import current_chain
|
||||||
|
from dexorder.blocks import get_block_timestamp, get_block, current_block
|
||||||
|
from dexorder.contract import ContractProxy
|
||||||
|
from dexorder.contract.dexorder import get_factory_contract, get_mirrorenv, get_mockenv
|
||||||
|
from dexorder.database.model.accounting import AccountingSubcategory, Accounting, AccountingCategory, AccountKind, \
|
||||||
|
DbAccount, Reconciliation
|
||||||
|
from dexorder.feemanager import FeeManager
|
||||||
|
from dexorder.pools import mark_to_market, pool_prices, get_pool, add_mark_pool, quotes
|
||||||
|
from dexorder.tokens import adjust_decimals as adj_dec, get_token, get_balance
|
||||||
|
from dexorder.util import hexstr
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
accounting_initialized = False
|
||||||
|
|
||||||
|
_tracked_addrs = set()
|
||||||
|
|
||||||
|
def is_tracked_address(addr: str) -> bool:
|
||||||
|
return addr in _tracked_addrs
|
||||||
|
|
||||||
|
|
||||||
|
class ReconciliationException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_accounting():
|
||||||
|
global accounting_initialized
|
||||||
|
if not accounting_initialized:
|
||||||
|
load_accounts_cache()
|
||||||
|
accounting_initialized = True
|
||||||
|
|
||||||
|
|
||||||
|
async def initialize_accounting_runner():
|
||||||
|
global accounting_initialized
|
||||||
|
if not accounting_initialized:
|
||||||
|
await _initialize_mark_to_market() # set up mark-to-market first, so accounts can value their initial balances
|
||||||
|
await _initialize_accounts()
|
||||||
|
load_accounts_cache()
|
||||||
|
accounting_initialized = True
|
||||||
|
log.info(f'accounting initialized\n\tstablecoins: {config.stablecoins}\n\tquotecoins: {config.quotecoins}\n\tnativecoin: {config.nativecoin}')
|
||||||
|
|
||||||
|
|
||||||
|
async def _initialize_accounts():
|
||||||
|
# Since this is called by top-level main functions outside the Runner, we trigger an explicit db commit/rollback
|
||||||
|
try:
|
||||||
|
# noinspection PyStatementEffect
|
||||||
|
await _initialize_accounts_2()
|
||||||
|
db.session.commit()
|
||||||
|
except:
|
||||||
|
db.session.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def _initialize_accounts_2():
|
||||||
|
fm = await FeeManager.get()
|
||||||
|
of_account = ensure_account(fm.order_fee_account_addr, AccountKind.OrderFee)
|
||||||
|
gf_account = ensure_account(fm.gas_fee_account_addr, AccountKind.GasFee)
|
||||||
|
ff_account = ensure_account(fm.fill_fee_account_addr, AccountKind.FillFee)
|
||||||
|
exe_accounts = [ensure_account(account.address, AccountKind.Execution) for account in Account.all()]
|
||||||
|
if current_chain.get().id in [1337, 31337]:
|
||||||
|
log.debug('adjusting debug account balances')
|
||||||
|
await asyncio.gather(
|
||||||
|
*map(adjust_balance, (of_account, gf_account, ff_account, *exe_accounts))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_accounts_cache(*, chain=None):
|
||||||
|
if chain is None:
|
||||||
|
chain = current_chain.get()
|
||||||
|
for db_account in db.session.execute(select(DbAccount).where(DbAccount.chain==chain)).scalars():
|
||||||
|
_tracked_addrs.add(db_account.address)
|
||||||
|
log.info(f'tracking account {db_account.chain.id} {db_account.address}')
|
||||||
|
|
||||||
|
|
||||||
|
async def _initialize_mark_to_market():
|
||||||
|
quotes.clear()
|
||||||
|
quotes.extend(config.stablecoins)
|
||||||
|
quotes.extend(config.quotecoins)
|
||||||
|
if not quotes and current_chain.get().id in [1337, 31337]:
|
||||||
|
weth = meh = usdc = usxd = None
|
||||||
|
mirror = get_mirrorenv()
|
||||||
|
if mirror is not None:
|
||||||
|
num_tokens = await mirror.numTokens()
|
||||||
|
for i in range(num_tokens):
|
||||||
|
try:
|
||||||
|
token_key = await mirror.tokenKeys(i)
|
||||||
|
except ContractLogicError:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
mirror_token = await mirror.tokens(token_key)
|
||||||
|
token = await get_token(mirror_token)
|
||||||
|
log.info(f'found mirror token {token["symbol"]} {mirror_token}')
|
||||||
|
if token['symbol'] == 'WETH':
|
||||||
|
weth = mirror_token
|
||||||
|
elif token['symbol'] == 'USDC':
|
||||||
|
usdc = mirror_token
|
||||||
|
|
||||||
|
mock = get_mockenv()
|
||||||
|
if mock is not None:
|
||||||
|
meh = await mock.COIN()
|
||||||
|
usxd = await mock.USD()
|
||||||
|
|
||||||
|
config.stablecoins = [t for t in (usdc, usxd) if t is not None]
|
||||||
|
config.quotecoins = [t for t in (weth, meh) if t is not None]
|
||||||
|
if not config.nativecoin:
|
||||||
|
config.nativecoin = weth if weth is not None else meh if meh is not None else None
|
||||||
|
elif not config.nativecoin:
|
||||||
|
factory = get_factory_contract()
|
||||||
|
impl_addr = await factory.implementation()
|
||||||
|
wrapper = await ContractProxy(impl_addr, 'Vault').wrapper()
|
||||||
|
if wrapper != ADDRESS_0:
|
||||||
|
log.info(f'Detected native coin wrapper {wrapper}')
|
||||||
|
config.nativecoin = wrapper
|
||||||
|
quotes.clear()
|
||||||
|
quotes.extend(config.stablecoins)
|
||||||
|
quotes.extend(config.quotecoins)
|
||||||
|
for addr in pool_prices.keys():
|
||||||
|
pool = await get_pool(addr)
|
||||||
|
add_mark_pool(addr, pool['base'], pool['quote'], pool['fee'])
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_account(addr: str, kind: AccountKind, *, chain=None) -> DbAccount:
|
||||||
|
if chain is None:
|
||||||
|
chain = current_chain.get()
|
||||||
|
found = db.session.get(DbAccount, (chain, addr))
|
||||||
|
if found:
|
||||||
|
if found.kind != kind:
|
||||||
|
log.warning(f'Account {addr} has wrong kind {found.kind} != {kind}')
|
||||||
|
# found.kind = kind
|
||||||
|
db.session.add(found)
|
||||||
|
_tracked_addrs.add(found.address)
|
||||||
|
else:
|
||||||
|
found = DbAccount(chain=chain, address=addr, kind=kind, balances={})
|
||||||
|
db.session.add(found)
|
||||||
|
_tracked_addrs.add(found.address)
|
||||||
|
return found
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_feeaccountschanged(fee_accounts: EventData):
|
||||||
|
try:
|
||||||
|
order_fee_account_addr = fee_accounts['args']['orderFeeAccount']
|
||||||
|
gas_fee_account_addr = fee_accounts['args']['gasFeeAccount']
|
||||||
|
fill_fee_account_addr = fee_accounts['args']['fillFeeAccount']
|
||||||
|
except KeyError:
|
||||||
|
log.warning(f'Could not parse FeeAccountsChanged {fee_accounts}')
|
||||||
|
return
|
||||||
|
fm = await FeeManager.get()
|
||||||
|
fm.order_fee_account_addr = order_fee_account_addr
|
||||||
|
fm.gas_fee_account_addr = gas_fee_account_addr
|
||||||
|
fm.fill_fee_account_addr = fill_fee_account_addr
|
||||||
|
await _initialize_accounts_2()
|
||||||
|
|
||||||
|
|
||||||
|
async def accounting_transfer(receipt: TransactionReceiptDict, token: str,
|
||||||
|
sender: str, receiver: str, amount: Union[dec,int], adjust_decimals=True):
|
||||||
|
block_hash = hexstr(receipt['blockHash'])
|
||||||
|
tx_id = hexstr(receipt['transactionHash'])
|
||||||
|
await asyncio.gather(
|
||||||
|
accounting_transaction_gas(receipt),
|
||||||
|
add_accounting_entry_m2m(sender, block_hash, tx_id, AccountingCategory.Transfer, None,
|
||||||
|
token, -amount, receiver, adjust_decimals=adjust_decimals),
|
||||||
|
add_accounting_entry_m2m(receiver, block_hash, tx_id, AccountingCategory.Transfer, None,
|
||||||
|
token, amount, sender, adjust_decimals=adjust_decimals),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def accounting_transaction_gas(receipt: TransactionReceiptDict, subcategory: AccountingSubcategory = AccountingSubcategory.TransactionGas):
|
||||||
|
""" Accounts for the gas spent on the given transaction """
|
||||||
|
amount = dec(receipt['gasUsed']) * dec(receipt['effectiveGasPrice'])
|
||||||
|
await add_accounting_entry_m2m(receipt['from'],
|
||||||
|
hexstr(receipt['blockHash']), hexstr(receipt['transactionHash']),
|
||||||
|
AccountingCategory.Expense, subcategory, NATIVE_TOKEN, -amount
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def accounting_placement(order_placed: EventData):
|
||||||
|
block_hash = hexstr(order_placed['blockHash'])
|
||||||
|
tx_id = hexstr(order_placed['transactionHash'])
|
||||||
|
try:
|
||||||
|
order_fee = int(order_placed['args']['orderFee'])
|
||||||
|
gas_fee = int(order_placed['args']['gasFee'])
|
||||||
|
except KeyError:
|
||||||
|
log.warning(f'Rogue DexorderPlacedEvent in tx {hexstr(tx_id)}')
|
||||||
|
return
|
||||||
|
fm = await FeeManager.get()
|
||||||
|
await add_accounting_entry_m2m(fm.order_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||||
|
AccountingSubcategory.OrderFee, NATIVE_TOKEN, order_fee)
|
||||||
|
await add_accounting_entry_m2m(fm.gas_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||||
|
AccountingSubcategory.GasFee, NATIVE_TOKEN, gas_fee)
|
||||||
|
|
||||||
|
|
||||||
|
async def accounting_fill(fill: EventData, out_token: str) -> dec:
|
||||||
|
"""
|
||||||
|
Returns the mark-to-market USD value of the transaction.
|
||||||
|
"""
|
||||||
|
block_hash = hexstr(fill['blockHash'])
|
||||||
|
tx_id = hexstr(fill['transactionHash'])
|
||||||
|
fee = int(fill['args']['fillFee'])
|
||||||
|
fm = await FeeManager.get()
|
||||||
|
return await add_accounting_entry_m2m(fm.fill_fee_account_addr, block_hash, tx_id, AccountingCategory.Income,
|
||||||
|
AccountingSubcategory.FillFee, out_token, fee)
|
||||||
|
|
||||||
|
|
||||||
|
async def add_accounting_entry_m2m(account: str, block_hash: Optional[str], tx_id: Optional[str], category, subcategory, token, amount, note=None,
|
||||||
|
*, adjust_decimals=True) -> dec:
|
||||||
|
"""
|
||||||
|
Returns the mark-to-market USD value of the entry.
|
||||||
|
"""
|
||||||
|
if amount == 0:
|
||||||
|
return dec(0)
|
||||||
|
# Adjust database account if it exists
|
||||||
|
if not is_tracked_address(account):
|
||||||
|
return dec(0)
|
||||||
|
if adjust_decimals:
|
||||||
|
amount = await adj_dec(token, amount)
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
time = now() if block_hash is None else from_timestamp(await get_block_timestamp(block_hash))
|
||||||
|
value = mark_to_market(token, amount)
|
||||||
|
log.debug(f'accounting row {time} {account} {category} {subcategory} {token} {amount} ${value}')
|
||||||
|
chain_id = current_chain.get().id
|
||||||
|
add_accounting_entry(chain_id, account, time, category, subcategory, token, amount, value, tx_id, note)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def add_accounting_entry(chain_id, account, time, category, subcategory, token, amount, value=None, tx_id=None, note=None):
|
||||||
|
if not is_tracked_address(account):
|
||||||
|
return
|
||||||
|
db.session.add(Accounting(account=account,
|
||||||
|
time=time, category=category, subcategory=subcategory,
|
||||||
|
token=token, amount=amount, value=value, note=note,
|
||||||
|
chain_id=chain_id, tx_id=tx_id,
|
||||||
|
))
|
||||||
|
account_db = db.session.get(DbAccount, (current_chain.get(), account))
|
||||||
|
new_amount = account_db.balances.get(token, dec(0)) + amount
|
||||||
|
if new_amount < 0:
|
||||||
|
log.error(
|
||||||
|
f'negative balance for account {account} when applying accounting row {time} {category} {subcategory} {token} {amount} ${value}')
|
||||||
|
account_db.balances[token] = new_amount
|
||||||
|
db.session.add(account_db) # deep changes would not be detected by the ORM
|
||||||
|
db.session.flush()
|
||||||
|
|
||||||
|
|
||||||
|
async def adjust_balance(account: DbAccount, token=NATIVE_TOKEN, subcategory=AccountingSubcategory.InitialBalance, note=None):
|
||||||
|
true_balance = await get_balance(account.address, token)
|
||||||
|
amount = true_balance - account.balances.get(token, dec(0))
|
||||||
|
await add_accounting_entry_m2m(account.address, None, None, AccountingCategory.Special, subcategory, NATIVE_TOKEN, amount, note, adjust_decimals=False)
|
||||||
|
|
||||||
|
|
||||||
|
async def accounting_reconcile(account: DbAccount, block_id: Optional[str] = None, last_accounting_row_id: Optional[int] = None):
|
||||||
|
# Fetch the latest reconciliation for the account
|
||||||
|
latest_recon = db.session.execute(
|
||||||
|
select(Reconciliation).where(
|
||||||
|
Reconciliation.account == account
|
||||||
|
).order_by(Reconciliation.accounting_id.desc()).limit(1)
|
||||||
|
).scalar_one_or_none()
|
||||||
|
first_accounting_row_id = latest_recon.accounting_id+1 if latest_recon else None
|
||||||
|
|
||||||
|
# Retrieve the end height corresponding to the block_id
|
||||||
|
block = await get_block(block_id) if block_id else current_block.get()
|
||||||
|
chain_id = current_chain.get().id
|
||||||
|
|
||||||
|
balances = dict(latest_recon.balances) if latest_recon else {}
|
||||||
|
|
||||||
|
# Retrieve all accounting rows for this account within the reconciliation period
|
||||||
|
accounting_query = select(Accounting).where(
|
||||||
|
Accounting.chain_id == chain_id,
|
||||||
|
Accounting.account == account.address,
|
||||||
|
)
|
||||||
|
if first_accounting_row_id is not None:
|
||||||
|
accounting_query = accounting_query.where(Accounting.id >= first_accounting_row_id)
|
||||||
|
if last_accounting_row_id is not None:
|
||||||
|
accounting_query = accounting_query.where(Accounting.id <= last_accounting_row_id)
|
||||||
|
accounting_query = accounting_query.order_by(Accounting.id)
|
||||||
|
accounting_rows = db.session.execute(accounting_query).scalars().all()
|
||||||
|
|
||||||
|
if last_accounting_row_id is None:
|
||||||
|
last_accounting_row_id = db.session.execute(select(func.max(Accounting.id))).scalar_one_or_none()
|
||||||
|
if last_accounting_row_id is None:
|
||||||
|
log.warning("No records found in the Accounting table")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Update balances using accounting rows
|
||||||
|
for row in accounting_rows:
|
||||||
|
balances[row.token] = balances.get(row.token, dec(0)) + row.amount
|
||||||
|
|
||||||
|
# Verify balances with the stored DbAccount balances
|
||||||
|
for token, balance in balances.items():
|
||||||
|
db_balance = account.balances.get(token, dec(0))
|
||||||
|
if balance != db_balance:
|
||||||
|
raise ReconciliationException(
|
||||||
|
f"DB mismatch in balances for account {account.address} token {token}: accounting={balance} db={db_balance}"
|
||||||
|
)
|
||||||
|
on_chain_balance = await get_balance(account.address, token)
|
||||||
|
if balance != on_chain_balance:
|
||||||
|
raise ReconciliationException(
|
||||||
|
f"Blockchain mismatch for account {account.address} token {token}: accounting={balances[token]} on-chain={on_chain_balance}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a new reconciliation record
|
||||||
|
new_recon = Reconciliation(
|
||||||
|
chain = current_chain.get(),
|
||||||
|
address=account.address,
|
||||||
|
accounting_id=last_accounting_row_id,
|
||||||
|
height=block.height,
|
||||||
|
balances=balances,
|
||||||
|
)
|
||||||
|
db.session.add(new_recon)
|
||||||
|
db.session.commit()
|
||||||
|
log.info(f'reconciled account {account.address} at height {block.height}')
|
||||||
|
|
||||||
65
src/dexorder/accounting/kraken.py
Normal file
65
src/dexorder/accounting/kraken.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import logging
|
||||||
|
import tempfile
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import krakenex
|
||||||
|
|
||||||
|
from dexorder import timestamp
|
||||||
|
from dexorder.bin.executable import execute
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
kraken_api_key=r'HqPHnGsAHunFtaP8YZTFsyh+LauVrcgFHi/US+RseR/4DiT+NG/JpONV'
|
||||||
|
kraken_api_secret=r'4hvdMdaN5TlNlyk2PShdRCsOE/T4sFzeBrR7ZjC+LUGuAXhBehY8vvWDZSUSyna2OFeOJ9GntPvyXOhrpx70Bg=='
|
||||||
|
|
||||||
|
kraken = krakenex.API()
|
||||||
|
|
||||||
|
|
||||||
|
# start and end should be timestamps or datetimes. inclusiveness is [start,end) as usual
|
||||||
|
def kraken_get_ledger(start=None, end=None):
|
||||||
|
entries = []
|
||||||
|
offset=1 # 1-based ffs
|
||||||
|
if start:
|
||||||
|
start = timestamp(start) - 1 # kraken start is EXCLUSIVE for some reason
|
||||||
|
if end:
|
||||||
|
end = timestamp(end) - 1 # kraken end is INCLUSIVE. :/
|
||||||
|
while True:
|
||||||
|
kl = kraken.query_private('Ledgers', {'start':start, 'end':end, 'ofs':offset})
|
||||||
|
print(repr(kl))
|
||||||
|
break
|
||||||
|
if kl.empty:
|
||||||
|
break
|
||||||
|
for t in kl.itertuples():
|
||||||
|
print(t)
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
|
offset += len(kl)
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class KrakenConfig:
|
||||||
|
kraken_api_key: Optional[str] = None
|
||||||
|
kraken_api_secret: Optional[str] = None
|
||||||
|
kraken_start: Optional[str]= None # timestamp or date
|
||||||
|
kraken_end: Optional[str] = None # timestamp or date
|
||||||
|
|
||||||
|
async def main(kconfig: KrakenConfig):
|
||||||
|
load_kraken_key(kconfig)
|
||||||
|
kraken_get_ledger()
|
||||||
|
|
||||||
|
|
||||||
|
def load_kraken_key(kconfig):
|
||||||
|
temp = tempfile.NamedTemporaryFile()
|
||||||
|
if not kconfig.kraken_api_key or not kconfig.kraken_api_secret:
|
||||||
|
log.error("Must set kraken_api_key= and kraken_api_secret= on the command line")
|
||||||
|
exit(1)
|
||||||
|
temp.write(kconfig.kraken_api_key.encode())
|
||||||
|
temp.write(b'\n')
|
||||||
|
temp.write(kconfig.kraken_api_secret.encode())
|
||||||
|
temp.write(b'\n')
|
||||||
|
kraken.load_key(temp.name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
execute(main, parse_args=KrakenConfig)
|
||||||
@@ -19,7 +19,9 @@ class AddressMetadata (TypedDict):
|
|||||||
|
|
||||||
|
|
||||||
def save_addrmeta(address: str, meta: AddressMetadata):
|
def save_addrmeta(address: str, meta: AddressMetadata):
|
||||||
if meta['type'] == 'Token':
|
if meta is None:
|
||||||
|
pass
|
||||||
|
elif meta['type'] == 'Token':
|
||||||
meta: OldTokenDict
|
meta: OldTokenDict
|
||||||
updated = Token.load(meta)
|
updated = Token.load(meta)
|
||||||
token = db.session.get(Token, (current_chain.get().id, address))
|
token = db.session.get(Token, (current_chain.get().id, address))
|
||||||
|
|||||||
@@ -1,30 +1,33 @@
|
|||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import pdpyras
|
import pagerduty
|
||||||
|
|
||||||
from dexorder import NARG, config
|
from dexorder import NARG, config
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def alert(title, message, dedup_key=NARG, log_level=logging.ERROR, do_log=True):
|
def alert(title, message, dedup_key=NARG, log_level=logging.ERROR, do_log=True, severity='critical'):
|
||||||
if dedup_key is NARG:
|
if dedup_key is NARG:
|
||||||
dedup_key = str(hash(title + '|' + message))
|
dedup_key = str(hash(title))
|
||||||
if do_log:
|
if do_log:
|
||||||
msg = f'{title}: {message}'
|
msg = f'{title}: {message}'
|
||||||
log.log(log_level, msg) # if log_level=CRITICAL for example, make sure this does not re-alert!
|
log.log(log_level, msg) # if log_level=CRITICAL for example, make sure this does not re-alert!
|
||||||
alert_pagerduty(title, message, dedup_key, log_level)
|
alert_pagerduty(title, message, dedup_key, severity)
|
||||||
|
|
||||||
|
|
||||||
def warningAlert(title, message, dedup_key=NARG, log_level=logging.WARNING):
|
def warningAlert(title, message, dedup_key=NARG, log_level=logging.WARNING):
|
||||||
return alert(title, message, dedup_key, log_level)
|
return alert(title, message, dedup_key, log_level, severity='warning')
|
||||||
|
|
||||||
|
def infoAlert(title, message, dedup_key=NARG, log_level=logging.INFO):
|
||||||
|
return alert(title, message, dedup_key, log_level, severity='info')
|
||||||
|
|
||||||
|
|
||||||
pagerduty_session = None
|
pagerduty_session = None
|
||||||
hostname = None
|
hostname = None
|
||||||
|
|
||||||
def alert_pagerduty(title, message, dedup_key, log_level):
|
def alert_pagerduty(title, message, dedup_key, severity):
|
||||||
if not config.pagerduty:
|
if not config.pagerduty:
|
||||||
return
|
return
|
||||||
# noinspection PyBroadException
|
# noinspection PyBroadException
|
||||||
@@ -32,10 +35,9 @@ def alert_pagerduty(title, message, dedup_key, log_level):
|
|||||||
global pagerduty_session
|
global pagerduty_session
|
||||||
global hostname
|
global hostname
|
||||||
if pagerduty_session is None:
|
if pagerduty_session is None:
|
||||||
pagerduty_session = pdpyras.EventsAPISession(config.pagerduty)
|
pagerduty_session = pagerduty.EventsApiV2Client(config.pagerduty)
|
||||||
hostname = socket.gethostname()
|
hostname = socket.gethostname()
|
||||||
sev = 'error' if log_level >= logging.ERROR else 'warning'
|
pagerduty_session.trigger(title, hostname, severity=severity, custom_details={'message': message}, dedup_key=dedup_key, payload=dict(severity=severity))
|
||||||
pagerduty_session.trigger(title, hostname, severity=sev, custom_details={'message': message}, dedup_key=dedup_key)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
log.warning('Could not notify PagerDuty!', exc_info=True)
|
log.warning('Could not notify PagerDuty!', exc_info=True)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
|
from abc import abstractmethod
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TypedDict, Union, Type
|
from typing import TypedDict, Union, Type, Any, Callable
|
||||||
|
|
||||||
Address = str
|
Address = str
|
||||||
Quantity = Union[str,int]
|
Quantity = Union[str,int]
|
||||||
@@ -13,8 +14,16 @@ class TransactionRequest:
|
|||||||
"""
|
"""
|
||||||
type: str
|
type: str
|
||||||
|
|
||||||
|
def __init__(self, type: str, key: Any):
|
||||||
|
"""
|
||||||
|
key is used to deduplicate requests
|
||||||
|
"""
|
||||||
|
self.type = type
|
||||||
|
self.key = key
|
||||||
|
|
||||||
|
|
||||||
# subclasses of TransactionRequest must register their type code here so the appropriate dataclass may be constructed
|
# subclasses of TransactionRequest must register their type code here so the appropriate dataclass may be constructed
|
||||||
transaction_request_registry: dict[str, Type[TransactionRequest]] = {}
|
transaction_request_deserializers: dict[str, Callable[[...],TransactionRequest]] = {}
|
||||||
|
|
||||||
|
|
||||||
TransactionDict = TypedDict( 'TransactionDict', {
|
TransactionDict = TypedDict( 'TransactionDict', {
|
||||||
|
|||||||
@@ -1,81 +1,114 @@
|
|||||||
from contextvars import ContextVar
|
import asyncio
|
||||||
from typing import Union, Optional
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import eth_account
|
import eth_account
|
||||||
from eth_account.signers.local import LocalAccount
|
from eth_account.signers.local import LocalAccount
|
||||||
from web3.middleware import construct_sign_and_send_raw_middleware
|
from web3.middleware import construct_sign_and_send_raw_middleware
|
||||||
|
|
||||||
from dexorder import NARG, config, current_w3
|
from dexorder import config, current_w3, metric
|
||||||
|
from dexorder.base.chain import current_chain
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# this is just here for typing the extra .name. the __new__() function returns an eth_account...LocalAccount
|
# this is just here for typing the extra .name. the __new__() function returns an eth_account...LocalAccount
|
||||||
# we do it this way because web3py expects a LocalAccount object but we cannot construct one directly with a super()
|
# we do it this way because web3py expects a LocalAccount object but we cannot construct one directly with a super()
|
||||||
# call but must instead use a factory :(
|
# call but must instead use a factory :(
|
||||||
class Account (LocalAccount):
|
class Account (LocalAccount):
|
||||||
|
_main_account = None
|
||||||
|
_pool = None
|
||||||
|
_all = []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_named(account_name: str) -> Optional['Account']:
|
def all():
|
||||||
account = config.accounts.get(account_name)
|
Account._init_pool()
|
||||||
return Account.get(account) if account else Account.get()
|
return Account._all
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
# noinspection PyInitNewSignature
|
def get():
|
||||||
def get(account:[Union,str]=NARG) -> Optional['Account']:
|
"""
|
||||||
if account is NARG:
|
Always returns the main account, even if it's busy.
|
||||||
account = config.account
|
"""
|
||||||
if type(account) is not str:
|
Account._init_pool()
|
||||||
return account
|
return Account._main_account
|
||||||
|
|
||||||
key_str = config.accounts.get(account, account)
|
@staticmethod
|
||||||
|
async def acquire():
|
||||||
|
"""
|
||||||
|
MUST call account.release() after the transaction has completed, to return this Account to the available pool.
|
||||||
|
"""
|
||||||
|
Account._init_pool()
|
||||||
|
# log.debug(f'available accounts: {Account._pool.qsize()}')
|
||||||
try:
|
try:
|
||||||
local_account = eth_account.Account.from_key(key_str)
|
async with asyncio.timeout(1):
|
||||||
return Account(local_account, key_str, account)
|
result: "Account" = await Account._pool.get()
|
||||||
except ValueError:
|
except asyncio.TimeoutError:
|
||||||
try:
|
log.error('waiting for an available account')
|
||||||
# was the key missing a leading '0x'?
|
result = await Account._pool.get()
|
||||||
fixed = '0x' + key_str
|
# mark as out of pool
|
||||||
local_account = eth_account.Account.from_key(fixed)
|
result._in_pool = False
|
||||||
print(f'WARNING: account "{account}" is missing a leading "0x"')
|
metric.account_available.set(Account._pool.qsize())
|
||||||
return Account(local_account, fixed, account)
|
return result
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
# was the key an integer posing as a string?
|
|
||||||
converted = f'{int(key_str):#0{66}x}'
|
|
||||||
local_account = eth_account.Account.from_key(converted)
|
|
||||||
print(f'WARNING: account "{account}" is set as an integer instead of a string. Converted to: {converted}')
|
|
||||||
return Account(local_account, converted, account)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
raise ValueError(f'Could not construct account for name "{account}"')
|
|
||||||
|
|
||||||
def __init__(self, local_account: LocalAccount, key_str, name: str): # todo chain_id?
|
@staticmethod
|
||||||
super().__init__(local_account._key_obj, local_account._publicapi) # from digging into the source code
|
def _init_pool():
|
||||||
self.name = name
|
if Account._pool is None:
|
||||||
self.key_str = key_str
|
Account._pool = asyncio.Queue()
|
||||||
|
for key in config.accounts:
|
||||||
|
local_account = eth_account.Account.from_key(key)
|
||||||
|
account = Account(local_account)
|
||||||
|
if Account._main_account is None:
|
||||||
|
Account._main_account = account
|
||||||
|
Account._pool.put_nowait(account)
|
||||||
|
account._in_pool = True # this account is now in the pool
|
||||||
|
Account._all.append(account)
|
||||||
|
metric.account_available.set(Account._pool.qsize())
|
||||||
|
metric.account_total.set(len(Account._all))
|
||||||
|
log.info(f'Account pool {[a.address for a in Account._all]}')
|
||||||
|
|
||||||
|
def __init__(self, local_account: LocalAccount): # todo chain_id?
|
||||||
|
super().__init__(local_account._key_obj, local_account._publicapi) # from digging into the source code
|
||||||
|
self.chain_id = current_chain.get().id
|
||||||
self.signing_middleware = construct_sign_and_send_raw_middleware(self)
|
self.signing_middleware = construct_sign_and_send_raw_middleware(self)
|
||||||
self._nonce: Optional[int] = None
|
self._nonce: Optional[int] = None
|
||||||
|
self.tx_id: Optional[str] = None # current transaction id
|
||||||
|
# release() idempotency tracking
|
||||||
|
self._in_pool: bool = False
|
||||||
|
|
||||||
async def next_nonce(self):
|
async def next_nonce(self):
|
||||||
if self._nonce is None:
|
if self._nonce is None:
|
||||||
self._nonce = await current_w3.get().eth.get_transaction_count(self.address, 'pending')
|
self._nonce = await current_w3.get().eth.get_transaction_count(self.address, 'pending')
|
||||||
|
log.debug(f'queried nonce for account {self.address}: {self._nonce}')
|
||||||
else:
|
else:
|
||||||
self._nonce += 1
|
self._nonce += 1
|
||||||
return self._nonce
|
return self._nonce
|
||||||
|
|
||||||
def attach(self, w3):
|
def reset_nonce(self):
|
||||||
w3.eth.default_account = self.address
|
self._nonce = None
|
||||||
try:
|
|
||||||
w3.middleware_onion.remove('account_signer')
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
w3.middleware_onion.add(self.signing_middleware, 'account_signer')
|
|
||||||
|
|
||||||
def balance(self):
|
def balance(self):
|
||||||
return current_w3.get().eth.get_balance(self.address)
|
return current_w3.get().eth.get_balance(self.address)
|
||||||
|
|
||||||
|
def release(self):
|
||||||
|
"""
|
||||||
|
Return this Account to the pool.
|
||||||
|
|
||||||
|
Idempotent: calling release() multiple times without a new acquire()
|
||||||
|
will only enqueue the account once.
|
||||||
|
"""
|
||||||
|
# If we're already in the pool, do nothing.
|
||||||
|
if self._in_pool:
|
||||||
|
# Optional debug log; comment out if too noisy.
|
||||||
|
# log.debug(f'Account {self.address} already in pool; ignoring extra release()')
|
||||||
|
return
|
||||||
|
|
||||||
|
Account._pool.put_nowait(self)
|
||||||
|
self._in_pool = True
|
||||||
|
metric.account_available.set(Account._pool.qsize())
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.address
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
current_account: ContextVar[Optional[Account]] = ContextVar('current_account', default=Account.get())
|
return
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
import logging
|
|
||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime
|
||||||
|
|
||||||
import dexorder
|
import dexorder
|
||||||
|
|
||||||
@@ -36,6 +35,9 @@ class Blockchain:
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return self.id
|
||||||
|
|
||||||
_instances_by_id = {}
|
_instances_by_id = {}
|
||||||
_instances_by_name = {}
|
_instances_by_name = {}
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from dexorder import timestamp
|
from dexorder import timestamp, from_timestamp
|
||||||
from dexorder.util import hexbytes
|
from dexorder.util import hexbytes
|
||||||
from dexorder.util.convert import decode_IEEE754
|
from dexorder.util.convert import decode_IEEE754
|
||||||
|
|
||||||
@@ -84,6 +84,7 @@ class SwapOrder:
|
|||||||
minFillAmount: int
|
minFillAmount: int
|
||||||
amountIsInput: bool
|
amountIsInput: bool
|
||||||
outputDirectlyToOwner: bool
|
outputDirectlyToOwner: bool
|
||||||
|
inverted: bool
|
||||||
conditionalOrder: int
|
conditionalOrder: int
|
||||||
tranches: list['Tranche']
|
tranches: list['Tranche']
|
||||||
|
|
||||||
@@ -93,17 +94,19 @@ class SwapOrder:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(obj):
|
def load(obj):
|
||||||
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7],
|
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7], obj[8],
|
||||||
[Tranche.load(t) for t in obj[8]])
|
[Tranche.load(t) for t in obj[9]])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_from_chain(obj):
|
def load_from_chain(obj):
|
||||||
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7],
|
return SwapOrder(obj[0], obj[1], Route.load(obj[2]), int(obj[3]), int(obj[4]), obj[5], obj[6], obj[7], obj[8],
|
||||||
[Tranche.load_from_chain(t) for t in obj[8]])
|
[Tranche.load_from_chain(t) for t in obj[9]])
|
||||||
|
|
||||||
def dump(self):
|
def dump(self):
|
||||||
return (self.tokenIn, self.tokenOut, self.route.dump(), str(self.amount), str(self.minFillAmount), self.amountIsInput,
|
return (self.tokenIn, self.tokenOut, self.route.dump(),
|
||||||
self.outputDirectlyToOwner, self.conditionalOrder, [t.dump() for t in self.tranches])
|
str(self.amount), str(self.minFillAmount), self.amountIsInput,
|
||||||
|
self.outputDirectlyToOwner, self.inverted, self.conditionalOrder,
|
||||||
|
[t.dump() for t in self.tranches])
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
msg = f'''
|
msg = f'''
|
||||||
@@ -113,6 +116,7 @@ SwapOrder
|
|||||||
exchange: {self.route.exchange.name, self.route.fee}
|
exchange: {self.route.exchange.name, self.route.fee}
|
||||||
amount: {"input" if self.amountIsInput else "output"} {self.amount}{" to owner" if self.outputDirectlyToOwner else ""}
|
amount: {"input" if self.amountIsInput else "output"} {self.amount}{" to owner" if self.outputDirectlyToOwner else ""}
|
||||||
minFill: {self.minFillAmount}
|
minFill: {self.minFillAmount}
|
||||||
|
inverted: {self.inverted}
|
||||||
tranches:
|
tranches:
|
||||||
'''
|
'''
|
||||||
for tranche in self.tranches:
|
for tranche in self.tranches:
|
||||||
@@ -246,6 +250,26 @@ class ElaboratedSwapOrderStatus:
|
|||||||
def copy(self):
|
def copy(self):
|
||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
msg = f'''
|
||||||
|
SwapOrder
|
||||||
|
status: {self.state.name}
|
||||||
|
in: {self.order.tokenIn}
|
||||||
|
out: {self.order.tokenOut}
|
||||||
|
exchange: {self.order.route.exchange.name, self.order.route.fee}
|
||||||
|
amount: {"input" if self.order.amountIsInput else "output"} {self.filledIn if self.order.amountIsInput else self.filledOut}/{self.order.amount}{" to owner" if self.order.outputDirectlyToOwner else ""}
|
||||||
|
minFill: {self.order.minFillAmount}
|
||||||
|
inverted: {self.order.inverted}
|
||||||
|
tranches:
|
||||||
|
'''
|
||||||
|
for i in range(len(self.trancheStatus)):
|
||||||
|
tranche = self.order.tranches[i]
|
||||||
|
ts = self.trancheStatus[i]
|
||||||
|
msg += f' {tranche}\n'
|
||||||
|
for fill in ts.fills:
|
||||||
|
msg += f' {fill}\n'
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
NO_OCO = 18446744073709551615 # max uint64
|
NO_OCO = 18446744073709551615 # max uint64
|
||||||
|
|
||||||
@@ -259,6 +283,9 @@ DISTANT_FUTURE = 4294967295 # max uint32
|
|||||||
|
|
||||||
MAX_FRACTION = 65535 # max uint16
|
MAX_FRACTION = 65535 # max uint16
|
||||||
|
|
||||||
|
MIN_SLIPPAGE = 0.0001 # one bip
|
||||||
|
MIN_SLIPPAGE_EPSILON = 0.000000000003
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Tranche:
|
class Tranche:
|
||||||
@@ -340,7 +367,7 @@ class Tranche:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
msg = f'{self.fraction/MAX_FRACTION:.1%} {"start+" if self.startTimeIsRelative else ""}{self.startTime} to {"start+" if self.startTimeIsRelative else ""}{self.endTime}'
|
msg = f'{self.fraction/MAX_FRACTION:.1%} {"start+" if self.startTimeIsRelative else ""}{from_timestamp(self.startTime)} to {"start+" if self.startTimeIsRelative else ""}{from_timestamp(self.endTime)}'
|
||||||
if self.marketOrder:
|
if self.marketOrder:
|
||||||
# for marketOrders, minLine.intercept is the slippage
|
# for marketOrders, minLine.intercept is the slippage
|
||||||
msg += f' market order slippage {self.minLine.intercept:.2%}'
|
msg += f' market order slippage {self.minLine.intercept:.2%}'
|
||||||
@@ -348,11 +375,11 @@ class Tranche:
|
|||||||
if self.minLine.intercept or self.minLine.slope:
|
if self.minLine.intercept or self.minLine.slope:
|
||||||
msg += f' >{self.minLine.intercept:.5g}'
|
msg += f' >{self.minLine.intercept:.5g}'
|
||||||
if self.minLine.slope:
|
if self.minLine.slope:
|
||||||
msg += f'{self.minLine.slope:+.5g}/s({self.minLine.value():5g})'
|
msg += f'{self.minLine.slope:+.5g}/s={self.minLine.value():5g}'
|
||||||
if self.maxLine.intercept or self.maxLine.slope:
|
if self.maxLine.intercept or self.maxLine.slope:
|
||||||
msg += f' <{self.maxLine.intercept:.5g}'
|
msg += f' <{self.maxLine.intercept:.5g}'
|
||||||
if self.maxLine.slope:
|
if self.maxLine.slope:
|
||||||
msg += f'{self.maxLine.slope:+.5g}/s({self.maxLine.value():5g})'
|
msg += f'{self.maxLine.slope:+.5g}/s={self.maxLine.value():5g}'
|
||||||
if self.rateLimitPeriod:
|
if self.rateLimitPeriod:
|
||||||
msg += f' {self.rateLimitFraction/MAX_FRACTION:.1%} every {self.rateLimitPeriod/60:.0} minutes'
|
msg += f' {self.rateLimitFraction/MAX_FRACTION:.1%} every {self.rateLimitPeriod/60:.0} minutes'
|
||||||
return msg
|
return msg
|
||||||
|
|||||||
@@ -74,4 +74,4 @@ async def main():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
execute(main())
|
execute(main)
|
||||||
|
|||||||
@@ -37,4 +37,4 @@ if __name__ == '__main__':
|
|||||||
time = parse_date(sys.argv[1], ignoretz=True).replace(tzinfo=timezone.utc)
|
time = parse_date(sys.argv[1], ignoretz=True).replace(tzinfo=timezone.utc)
|
||||||
seconds_per_block = float(sys.argv[2])
|
seconds_per_block = float(sys.argv[2])
|
||||||
sys.argv = [sys.argv[0], *sys.argv[3:]]
|
sys.argv = [sys.argv[0], *sys.argv[3:]]
|
||||||
execute(main())
|
execute(main)
|
||||||
|
|||||||
95
src/dexorder/bin/examine.py
Normal file
95
src/dexorder/bin/examine.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from dexorder import db, blockchain
|
||||||
|
from dexorder.base.order import OrderKey
|
||||||
|
from dexorder.blocks import current_block, get_block
|
||||||
|
from dexorder.blockstate import current_blockstate
|
||||||
|
from dexorder.blockstate.blockdata import BlockData
|
||||||
|
from dexorder.blockstate.db_state import DbState
|
||||||
|
from dexorder.blockstate.fork import current_fork
|
||||||
|
from dexorder.contract.dexorder import VaultContract
|
||||||
|
from dexorder.order.orderstate import Order
|
||||||
|
from dexorder.tokens import adjust_decimals
|
||||||
|
from dexorder.util import json
|
||||||
|
from dexorder.vault_blockdata import vault_balances, pretty_balances
|
||||||
|
from dexorder.bin.executable import execute
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def dump_orders(orders, args):
|
||||||
|
if args.json:
|
||||||
|
print(json.dumps([order.status.dump() for order in orders]))
|
||||||
|
else:
|
||||||
|
first = True
|
||||||
|
for order in orders:
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
print()
|
||||||
|
print(await order.pprint())
|
||||||
|
|
||||||
|
def command_vault_argparse(subparsers):
|
||||||
|
parser = subparsers.add_parser('vault', help='show the vault\'s balances and orders')
|
||||||
|
parser.add_argument('address', help='address of the vault')
|
||||||
|
parser.add_argument('--all', help='show all orders including closed ones', action='store_true')
|
||||||
|
parser.add_argument('--json', help='output in JSON format', action='store_true')
|
||||||
|
|
||||||
|
async def command_vault(args):
|
||||||
|
balances = vault_balances.get(args.address, {})
|
||||||
|
print(f'Vault {args.address} v{await VaultContract(args.address).version()}')
|
||||||
|
print(f'Balances:')
|
||||||
|
print(pretty_balances({k: (await adjust_decimals(k, v)) for k, v in balances.items()}))
|
||||||
|
print(f'Orders:')
|
||||||
|
i = 0
|
||||||
|
orders = []
|
||||||
|
while True:
|
||||||
|
key = OrderKey(args.address, i)
|
||||||
|
try:
|
||||||
|
order = Order.of(key)
|
||||||
|
except KeyError:
|
||||||
|
break
|
||||||
|
if args.all or order.is_open:
|
||||||
|
orders.append(order)
|
||||||
|
i += 1
|
||||||
|
await dump_orders(orders, args)
|
||||||
|
|
||||||
|
|
||||||
|
def command_open_argparse(subparsers):
|
||||||
|
parser = subparsers.add_parser('open', help='show all open orders')
|
||||||
|
parser.add_argument('--json', help='output in JSON format', action='store_true')
|
||||||
|
|
||||||
|
|
||||||
|
async def command_open(args):
|
||||||
|
await dump_orders([Order.of(key) for key in Order.open_orders], args)
|
||||||
|
|
||||||
|
|
||||||
|
async def main(args: list):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('--chain-id', default=None)
|
||||||
|
subparsers = parser.add_subparsers(dest='command')
|
||||||
|
for name in globals():
|
||||||
|
if name.startswith('command_') and name.endswith('_argparse'):
|
||||||
|
globals()[name](subparsers)
|
||||||
|
parsed = parser.parse_args(args)
|
||||||
|
print(parsed)
|
||||||
|
try:
|
||||||
|
subcommand = globals()[f'command_{parsed.command}']
|
||||||
|
except KeyError:
|
||||||
|
parser.print_help()
|
||||||
|
exit(1)
|
||||||
|
await blockchain.connect()
|
||||||
|
db.connect()
|
||||||
|
db_state = DbState(BlockData.by_opt('db'))
|
||||||
|
with db.transaction():
|
||||||
|
state = await db_state.load()
|
||||||
|
# state.readonly = True
|
||||||
|
current_blockstate.set(state)
|
||||||
|
block = await get_block(state.root_hash)
|
||||||
|
current_block.set(block)
|
||||||
|
current_fork.set(state.root_fork)
|
||||||
|
await subcommand(parsed)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
execute(main, parse_args=True)
|
||||||
@@ -1,17 +1,20 @@
|
|||||||
|
import asyncio
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
|
import sys
|
||||||
import tomllib
|
import tomllib
|
||||||
from asyncio import CancelledError
|
from asyncio import CancelledError
|
||||||
from traceback import print_exception
|
|
||||||
import asyncio
|
|
||||||
from signal import Signals
|
from signal import Signals
|
||||||
from typing import Coroutine
|
from traceback import print_exception
|
||||||
|
from typing import Coroutine, Callable, Union, Any
|
||||||
|
|
||||||
import sys
|
from omegaconf import OmegaConf
|
||||||
|
|
||||||
from dexorder import configuration
|
from dexorder import configuration, config
|
||||||
from dexorder.alert import init_alerts
|
from dexorder.alert import init_alerts
|
||||||
|
from dexorder.configuration.schema import Config
|
||||||
|
from dexorder.metric.metric_startup import start_metrics_server
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
raise Exception('this file is meant to be imported not executed')
|
raise Exception('this file is meant to be imported not executed')
|
||||||
@@ -25,7 +28,27 @@ async def _shutdown_coro(_sig, _loop):
|
|||||||
if task is not this_task:
|
if task is not this_task:
|
||||||
task.cancel()
|
task.cancel()
|
||||||
|
|
||||||
def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=True):
|
|
||||||
|
def split_args():
|
||||||
|
omegaconf_args = []
|
||||||
|
regular_args = []
|
||||||
|
for arg in sys.argv[1:]:
|
||||||
|
if '=' in arg and not arg.startswith('--'):
|
||||||
|
key, value = arg.split('=', 1)
|
||||||
|
if hasattr(Config, key):
|
||||||
|
omegaconf_args.append(arg)
|
||||||
|
continue
|
||||||
|
regular_args.append(arg)
|
||||||
|
return omegaconf_args, regular_args
|
||||||
|
|
||||||
|
|
||||||
|
def execute(main:Callable[...,Coroutine[Any,Any,Any]], shutdown=None, *, parse_logging=True,
|
||||||
|
parse_args: Union[Callable[[list[str]],Any], type, bool]=True):
|
||||||
|
"""
|
||||||
|
if parse_args is a function, then the command-line arguments are given to OmegaConf first, and any args parsed by
|
||||||
|
OmegaConf are stripped from the args list. The remaining args are then passed to parse_args(args)
|
||||||
|
if parse_args is a type, then the type is used to parse the extra command-line arguments using OmegaConf.
|
||||||
|
"""
|
||||||
# config
|
# config
|
||||||
configured = False
|
configured = False
|
||||||
if parse_logging:
|
if parse_logging:
|
||||||
@@ -42,10 +65,29 @@ def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=Tru
|
|||||||
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
|
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
|
||||||
log.setLevel(logging.DEBUG)
|
log.setLevel(logging.DEBUG)
|
||||||
log.info('Logging configured to default')
|
log.info('Logging configured to default')
|
||||||
|
xconf = None
|
||||||
if parse_args:
|
if parse_args:
|
||||||
configuration.parse_args()
|
# NOTE: there is special command-line argument handling in config/load.py to get a config filename.
|
||||||
|
# The -c/--config flag MUST BE FIRST if present.
|
||||||
|
# The rest of the arguments are split by format into key=value for omegaconf and anything else is "regular args"
|
||||||
|
omegaconf_args, regular_args = split_args()
|
||||||
|
configuration.parse_args(omegaconf_args)
|
||||||
|
# must check for `type` before `callable`, because types are also callables
|
||||||
|
if isinstance(parse_args, type):
|
||||||
|
# noinspection PyUnboundLocalVariable
|
||||||
|
xconf = OmegaConf.merge(OmegaConf.structured(parse_args), OmegaConf.from_cli(regular_args))
|
||||||
|
elif callable(parse_args):
|
||||||
|
# noinspection PyUnboundLocalVariable
|
||||||
|
xconf = parse_args(regular_args)
|
||||||
|
else:
|
||||||
|
# just pass the regular args to main
|
||||||
|
xconf = regular_args
|
||||||
|
|
||||||
init_alerts()
|
init_alerts()
|
||||||
|
|
||||||
|
if config.metrics_port:
|
||||||
|
start_metrics_server()
|
||||||
|
|
||||||
# loop setup
|
# loop setup
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
signals = Signals.SIGQUIT, Signals.SIGTERM, Signals.SIGINT
|
signals = Signals.SIGQUIT, Signals.SIGTERM, Signals.SIGINT
|
||||||
@@ -53,7 +95,14 @@ def execute(main:Coroutine, shutdown=None, *, parse_logging=True, parse_args=Tru
|
|||||||
loop.add_signal_handler(s, lambda sig=s: asyncio.create_task(_shutdown_coro(sig, loop), name=f'{s.name} handler'))
|
loop.add_signal_handler(s, lambda sig=s: asyncio.create_task(_shutdown_coro(sig, loop), name=f'{s.name} handler'))
|
||||||
|
|
||||||
# main
|
# main
|
||||||
task = loop.create_task(main, name='main')
|
num_args = len(inspect.signature(main).parameters)
|
||||||
|
if num_args == 0:
|
||||||
|
coro = main()
|
||||||
|
elif num_args == 1:
|
||||||
|
coro = main(xconf)
|
||||||
|
else:
|
||||||
|
raise Exception(f'main() must accept 0 or 1 arguments, not {num_args}')
|
||||||
|
task = loop.create_task(coro, name='main')
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(task)
|
loop.run_until_complete(task)
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
|
|||||||
@@ -62,4 +62,4 @@ async def main():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
execute(main())
|
execute(main)
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ import logging
|
|||||||
from asyncio import CancelledError
|
from asyncio import CancelledError
|
||||||
|
|
||||||
from dexorder import db, blockchain
|
from dexorder import db, blockchain
|
||||||
|
from dexorder.accounting import initialize_accounting_runner
|
||||||
|
from dexorder.alert import infoAlert
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.bin.executable import execute
|
from dexorder.bin.executable import execute
|
||||||
from dexorder.blockstate import current_blockstate
|
from dexorder.blockstate import current_blockstate
|
||||||
@@ -12,13 +14,15 @@ from dexorder.contract import get_contract_event
|
|||||||
from dexorder.contract.dexorder import get_dexorder_contract
|
from dexorder.contract.dexorder import get_dexorder_contract
|
||||||
from dexorder.event_handler import (init, dump_log, handle_vault_created, handle_order_placed,
|
from dexorder.event_handler import (init, dump_log, handle_vault_created, handle_order_placed,
|
||||||
handle_transfer, handle_swap_filled, handle_order_canceled, handle_order_cancel_all,
|
handle_transfer, handle_swap_filled, handle_order_canceled, handle_order_cancel_all,
|
||||||
handle_uniswap_swaps, handle_vault_impl_changed)
|
handle_uniswap_swaps, handle_vault_impl_changed, update_metrics)
|
||||||
|
from dexorder.marks import publish_marks
|
||||||
from dexorder.memcache import memcache
|
from dexorder.memcache import memcache
|
||||||
from dexorder.memcache.memcache_state import RedisState, publish_all
|
from dexorder.memcache.memcache_state import RedisState, publish_all
|
||||||
from dexorder.order.executionhandler import handle_dexorderexecutions, execute_tranches
|
from dexorder.order.executionhandler import handle_dexorderexecutions, execute_tranches
|
||||||
from dexorder.order.triggers import activate_orders, end_trigger_updates
|
from dexorder.order.triggers import activate_orders, end_trigger_updates
|
||||||
from dexorder.runner import BlockStateRunner
|
from dexorder.runner import BlockStateRunner
|
||||||
from dexorder.transactions import handle_transaction_receipts, finalize_transactions
|
from dexorder.transactions import handle_transaction_receipts, cleanup_jobs
|
||||||
|
from dexorder.vaultcreationhandler import handle_vault_creation_requests
|
||||||
|
|
||||||
log = logging.getLogger('dexorder')
|
log = logging.getLogger('dexorder')
|
||||||
LOG_ALL_EVENTS = False # for debug todo config
|
LOG_ALL_EVENTS = False # for debug todo config
|
||||||
@@ -57,6 +61,8 @@ def setup_logevent_triggers(runner):
|
|||||||
|
|
||||||
runner.add_callback(check_activate_orders)
|
runner.add_callback(check_activate_orders)
|
||||||
runner.add_callback(init)
|
runner.add_callback(init)
|
||||||
|
|
||||||
|
runner.add_event_trigger(handle_transaction_receipts)
|
||||||
runner.add_event_trigger(handle_vault_created, get_contract_event('Vault', 'VaultCreated'))
|
runner.add_event_trigger(handle_vault_created, get_contract_event('Vault', 'VaultCreated'))
|
||||||
runner.add_event_trigger(handle_vault_impl_changed, get_contract_event('Vault', 'VaultImplChanged'))
|
runner.add_event_trigger(handle_vault_impl_changed, get_contract_event('Vault', 'VaultImplChanged'))
|
||||||
runner.add_event_trigger(handle_order_placed, get_contract_event('VaultImpl', 'DexorderSwapPlaced'))
|
runner.add_event_trigger(handle_order_placed, get_contract_event('VaultImpl', 'DexorderSwapPlaced'))
|
||||||
@@ -66,16 +72,26 @@ def setup_logevent_triggers(runner):
|
|||||||
runner.add_event_trigger(handle_order_canceled, get_contract_event('VaultImpl', 'DexorderSwapCanceled'))
|
runner.add_event_trigger(handle_order_canceled, get_contract_event('VaultImpl', 'DexorderSwapCanceled'))
|
||||||
runner.add_event_trigger(handle_order_cancel_all, get_contract_event('VaultImpl', 'DexorderCancelAll'))
|
runner.add_event_trigger(handle_order_cancel_all, get_contract_event('VaultImpl', 'DexorderCancelAll'))
|
||||||
|
|
||||||
runner.add_event_trigger(handle_transaction_receipts) # todo handle only the transactions that were posted to this block
|
|
||||||
runner.add_event_trigger(handle_dexorderexecutions, executions)
|
runner.add_event_trigger(handle_dexorderexecutions, executions)
|
||||||
|
runner.add_event_trigger(handle_vault_creation_requests)
|
||||||
|
|
||||||
runner.add_callback(end_trigger_updates)
|
runner.add_callback(end_trigger_updates)
|
||||||
runner.add_callback(execute_tranches)
|
runner.add_callback(execute_tranches)
|
||||||
|
|
||||||
|
# fee adjustments are handled offline by batch jobs
|
||||||
|
# runner.add_event_trigger(handle_fee_limits_changed, get_contract_event('IFeeManager', 'FeeLimitsChanged'))
|
||||||
|
# runner.add_event_trigger(handle_fees_changed, get_contract_event('IFeeManager', 'FeesChanged'))
|
||||||
|
# runner.add_callback(adjust_gas)
|
||||||
|
|
||||||
|
runner.add_callback(cleanup_jobs)
|
||||||
|
runner.add_callback(publish_marks)
|
||||||
|
runner.add_callback(update_metrics)
|
||||||
|
|
||||||
|
|
||||||
# noinspection DuplicatedCode
|
# noinspection DuplicatedCode
|
||||||
async def main():
|
async def main():
|
||||||
await blockchain.connect()
|
infoAlert('Started', 'backend has started', log_level=logging.INFO)
|
||||||
|
await blockchain.connect(autosign=False) # the transaction manager checks out accounts and releases them.
|
||||||
redis_state = None
|
redis_state = None
|
||||||
state = None
|
state = None
|
||||||
if memcache:
|
if memcache:
|
||||||
@@ -88,16 +104,23 @@ async def main():
|
|||||||
db_state = DbState(BlockData.by_opt('db'))
|
db_state = DbState(BlockData.by_opt('db'))
|
||||||
with db.transaction():
|
with db.transaction():
|
||||||
state = await db_state.load()
|
state = await db_state.load()
|
||||||
if state is None:
|
if state is None:
|
||||||
log.info('no state in database')
|
log.info('no state in database')
|
||||||
if redis_state:
|
if redis_state:
|
||||||
await redis_state.clear()
|
await redis_state.clear()
|
||||||
else:
|
else:
|
||||||
current_blockstate.set(state)
|
current_blockstate.set(state)
|
||||||
current_fork.set(state.root_fork)
|
current_fork.set(state.root_fork)
|
||||||
global activate_orders_needed
|
global activate_orders_needed
|
||||||
activate_orders_needed = True
|
activate_orders_needed = True
|
||||||
log.info(f'loaded state from db for root block {state.root_branch.height}')
|
log.info(f'loaded state from db for root block {state.root_branch.height}')
|
||||||
|
if redis_state:
|
||||||
|
# load initial state
|
||||||
|
log.info('initializing redis with root state')
|
||||||
|
await redis_state.init(state, state.root_fork)
|
||||||
|
|
||||||
|
await initialize_accounting_runner()
|
||||||
|
|
||||||
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None)
|
runner = BlockStateRunner(state, publish_all=publish_all if redis_state else None)
|
||||||
setup_logevent_triggers(runner)
|
setup_logevent_triggers(runner)
|
||||||
# OHLC printing hard-disabled for main. Use the finaldata process.
|
# OHLC printing hard-disabled for main. Use the finaldata process.
|
||||||
@@ -108,7 +131,6 @@ async def main():
|
|||||||
runner.on_promotion.append(db_state.finalize)
|
runner.on_promotion.append(db_state.finalize)
|
||||||
if redis_state:
|
if redis_state:
|
||||||
runner.on_head_update.append(redis_state.save)
|
runner.on_head_update.append(redis_state.save)
|
||||||
runner.on_promotion.append(finalize_transactions)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await runner.run()
|
await runner.run()
|
||||||
@@ -118,4 +140,4 @@ async def main():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
execute(main())
|
execute(main)
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ async def write_metadata( pools, mirror_pools ):
|
|||||||
pool_dicts = [get_pool(addr) for (addr,_inverted) in mirror_pools]
|
pool_dicts = [get_pool(addr) for (addr,_inverted) in mirror_pools]
|
||||||
pool_dicts = await asyncio.gather(*pool_dicts)
|
pool_dicts = await asyncio.gather(*pool_dicts)
|
||||||
for data, addr, (_,inverted) in zip(pool_dicts, pools, mirror_pools):
|
for data, addr, (_,inverted) in zip(pool_dicts, pools, mirror_pools):
|
||||||
data['x'] = dict(data=dict(uri=f'https://beta.dexorder.trade/ohlc/', chain=42161, symbol=addr, inverted=inverted))
|
data['x'] = dict(data=dict(uri=f'https://app.dexorder.com/ohlc/', chain=42161, symbol=addr, inverted=inverted))
|
||||||
tokens = set(p['base'] for p in pool_dicts)
|
tokens = set(p['base'] for p in pool_dicts)
|
||||||
tokens.update(p['quote'] for p in pool_dicts)
|
tokens.update(p['quote'] for p in pool_dicts)
|
||||||
tokens = await asyncio.gather(*[get_token(t) for t in tokens])
|
tokens = await asyncio.gather(*[get_token(t) for t in tokens])
|
||||||
@@ -119,14 +119,14 @@ async def main():
|
|||||||
delay = max(0.010, config.polling)
|
delay = max(0.010, config.polling)
|
||||||
update_once = config.polling <= 0
|
update_once = config.polling <= 0
|
||||||
global source_w3
|
global source_w3
|
||||||
source_w3 = await create_w3(config.mirror_source_rpc_url, name='source', autosign=False)
|
source_w3 = await create_w3(config.mirror_source_rpc_url, name='source', autosign=False, archive_url=[])
|
||||||
pools = (config.mirror_pools or [])
|
pools = (config.mirror_pools or [])
|
||||||
if not pools:
|
if not pools:
|
||||||
log.error('must configure mirror_pools')
|
log.error('must configure mirror_pools')
|
||||||
return
|
return
|
||||||
if config.account is None:
|
if not config.accounts:
|
||||||
# Dev Account #5
|
# Dev Account #6 0x976EA74026E726554dB657fA54763abd0C3a0aa9
|
||||||
config.account = '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba'
|
config.accounts = ['0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e']
|
||||||
await blockchain.connect(name='target')
|
await blockchain.connect(name='target')
|
||||||
|
|
||||||
mirror_addr = config.mirror_env
|
mirror_addr = config.mirror_env
|
||||||
@@ -144,35 +144,28 @@ async def main():
|
|||||||
tokens = set(i[1] for i in pool_infos).union(i[2] for i in pool_infos)
|
tokens = set(i[1] for i in pool_infos).union(i[2] for i in pool_infos)
|
||||||
|
|
||||||
log.debug(f'Mirroring tokens')
|
log.debug(f'Mirroring tokens')
|
||||||
txs = []
|
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
# noinspection PyBroadException
|
# noinspection PyBroadException
|
||||||
try:
|
try:
|
||||||
info = await get_token_info(t)
|
info = await get_token_info(t)
|
||||||
# anvil had trouble estimating the gas, so we hardcode it.
|
# anvil had trouble estimating the gas, so we hardcode it.
|
||||||
tx = await mirrorenv.transact.mirrorToken(info, gas=1_000_000)
|
tx = await mirrorenv.transact.mirrorToken(info, gas=1_000_000)
|
||||||
txs.append(tx.wait())
|
await tx.wait()
|
||||||
except Exception:
|
except Exception:
|
||||||
log.exception(f'Failed to mirror token {t}')
|
log.exception(f'Failed to mirror token {t}')
|
||||||
exit(1)
|
exit(1)
|
||||||
results = await asyncio.gather(*txs)
|
|
||||||
if any(result['status'] != 1 for result in results):
|
|
||||||
log.error('Mirroring a token reverted.')
|
|
||||||
exit(1)
|
|
||||||
log.info(f'Tokens deployed')
|
log.info(f'Tokens deployed')
|
||||||
|
|
||||||
log.debug(f'Mirroring pools {", ".join(pools)}')
|
log.debug(f'Mirroring pools {", ".join(pools)}')
|
||||||
txs = []
|
|
||||||
for pool, info in zip(pools, pool_infos):
|
for pool, info in zip(pools, pool_infos):
|
||||||
# noinspection PyBroadException
|
# noinspection PyBroadException
|
||||||
try:
|
try:
|
||||||
# anvil had trouble estimating the gas, so we hardcode it.
|
# anvil had trouble estimating the gas, so we hardcode it.
|
||||||
tx = await mirrorenv.transact.mirrorPool(info, gas=5_500_000)
|
tx = await mirrorenv.transact.mirrorPool(info, gas=5_500_000)
|
||||||
|
await tx.wait()
|
||||||
except Exception:
|
except Exception:
|
||||||
log.exception(f'Failed to mirror pool {pool}')
|
log.exception(f'Failed to mirror pool {pool}')
|
||||||
exit(1)
|
exit(1)
|
||||||
txs.append(tx.wait())
|
|
||||||
await asyncio.gather(*txs)
|
|
||||||
log.info('Pools deployed')
|
log.info('Pools deployed')
|
||||||
|
|
||||||
mirror_pool_list = []
|
mirror_pool_list = []
|
||||||
@@ -197,6 +190,7 @@ async def main():
|
|||||||
while True:
|
while True:
|
||||||
wake_up = now() + delay
|
wake_up = now() + delay
|
||||||
# log.debug(f'querying {pool}')
|
# log.debug(f'querying {pool}')
|
||||||
|
tx = None
|
||||||
try:
|
try:
|
||||||
price = await get_pool_price(pool)
|
price = await get_pool_price(pool)
|
||||||
if price != last_prices.get(pool):
|
if price != last_prices.get(pool):
|
||||||
@@ -207,7 +201,10 @@ async def main():
|
|||||||
addr, inverted = mirror_pools[pool]
|
addr, inverted = mirror_pools[pool]
|
||||||
log.debug(f'Mirrored {addr} {price}')
|
log.debug(f'Mirrored {addr} {price}')
|
||||||
except Exception as x:
|
except Exception as x:
|
||||||
log.debug(f'Could not update {pool}: {x}')
|
log.debug(f'Could not update {pool}: {x} {tx}')
|
||||||
|
if tx is not None:
|
||||||
|
tx.account.reset_nonce()
|
||||||
|
tx.account.release()
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
pool = next(pool_iter)
|
pool = next(pool_iter)
|
||||||
@@ -223,4 +220,4 @@ async def main():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
execute(main())
|
execute(main)
|
||||||
|
|||||||
31
src/dexorder/bin/reconcile.py
Normal file
31
src/dexorder/bin/reconcile.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from sqlalchemy import select, text
|
||||||
|
|
||||||
|
from dexorder import db, blockchain
|
||||||
|
from dexorder.accounting import accounting_reconcile
|
||||||
|
from dexorder.bin.executable import execute
|
||||||
|
from dexorder.blocks import fetch_latest_block, current_block
|
||||||
|
from dexorder.database.model import DbAccount
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
await blockchain.connect()
|
||||||
|
db.connect()
|
||||||
|
block = await fetch_latest_block()
|
||||||
|
current_block.set(block)
|
||||||
|
db.session.execute(text("LOCK TABLE account, accounting, reconciliation IN EXCLUSIVE MODE"))
|
||||||
|
try:
|
||||||
|
accounts = db.session.execute(select(DbAccount)).scalars().all()
|
||||||
|
for account in accounts:
|
||||||
|
await accounting_reconcile(account)
|
||||||
|
db.session.commit()
|
||||||
|
log.info('Reconciliation complete')
|
||||||
|
except:
|
||||||
|
db.session.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
execute(main)
|
||||||
23
src/dexorder/bin/refill.py
Normal file
23
src/dexorder/bin/refill.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from dexorder import blockchain, db, dec
|
||||||
|
from dexorder.bin.executable import execute
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RefillConfig:
|
||||||
|
refill_level: dec
|
||||||
|
refill_accounts: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
async def main(refill_config: RefillConfig):
|
||||||
|
# await blockchain.connect()
|
||||||
|
# db.connect()
|
||||||
|
log.info(f'Refilling to {refill_config.refill_level:.18f} ETH')
|
||||||
|
log.info(f'Refilling accounts: {refill_config.refill_accounts}')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
execute(main, parse_args=RefillConfig)
|
||||||
@@ -1,58 +1,75 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
from random import random
|
from random import random
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
# noinspection PyPackageRequirements
|
# noinspection PyPackageRequirements
|
||||||
from aiohttp import ClientResponseError
|
from aiohttp import ClientResponseError, ClientSession, ClientTimeout, TCPConnector
|
||||||
from eth_typing import URI
|
from eth_typing import URI
|
||||||
from hexbytes import HexBytes
|
from hexbytes import HexBytes
|
||||||
from web3 import WebsocketProviderV2, AsyncWeb3, AsyncHTTPProvider
|
from web3 import WebsocketProviderV2, AsyncWeb3, AsyncHTTPProvider
|
||||||
from web3.middleware.signing import async_construct_sign_and_send_raw_middleware
|
from web3.middleware.signing import async_construct_sign_and_send_raw_middleware
|
||||||
|
from web3.providers.async_base import AsyncJSONBaseProvider
|
||||||
from web3.types import RPCEndpoint, RPCResponse
|
from web3.types import RPCEndpoint, RPCResponse
|
||||||
|
|
||||||
from .. import current_w3, Blockchain, config, Account, NARG
|
from .. import current_w3, Blockchain, config, Account, NARG
|
||||||
from ..base.chain import current_chain
|
from ..base.chain import current_chain
|
||||||
from ..configuration import resolve_rpc_url
|
|
||||||
from ..configuration.resolve import resolve_ws_url
|
|
||||||
from ..contract import get_contract_data
|
from ..contract import get_contract_data
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
async def connect(rpc_url=None, account=NARG, autosign=True, name='default'):
|
|
||||||
|
async def connect(rpc_url: Union[str,list[str]]=None, account=NARG, autosign=True, name='default', *, archive_url: Union[str,list[str]] = None):
|
||||||
"""
|
"""
|
||||||
connects to the rpc_url and configures context vars
|
connects to the rpc_url and configures context vars
|
||||||
"""
|
"""
|
||||||
w3 = await create_w3(rpc_url, account, autosign, name)
|
w3 = await create_w3(rpc_url, account, autosign, name, archive_url=archive_url)
|
||||||
current_w3.set(w3)
|
current_w3.set(w3)
|
||||||
current_chain.set(Blockchain.get(await w3.eth.chain_id))
|
chain = Blockchain.get(await w3.eth.chain_id)
|
||||||
|
current_chain.set(chain)
|
||||||
return w3
|
return w3
|
||||||
|
|
||||||
|
|
||||||
async def create_w3(rpc_url=None, account=NARG, autosign=True, name='default'):
|
async def create_w3(rpc_url: Union[str,list[str]]=None, account=NARG, autosign=True, name='default', *, archive_url: Union[str,list[str]] = None) -> AsyncWeb3:
|
||||||
# todo create a proxy w3 that rotates among rpc urls
|
if rpc_url is None:
|
||||||
# self.w3s = tuple(await create_w3(url) for url in rpc_url_or_tag)
|
rpc_url = config.rpc_url
|
||||||
# chain_id = self.w3s[0].eth.chain_id
|
if isinstance(rpc_url, str):
|
||||||
# assert all(w3.eth.chain_id == chain_id for w3 in self.w3s) # all rpc urls must be the same blockchain
|
rpc_urls = [resolve_rpc_url(s) for url in rpc_url.split(',') if (s:=url.strip()) != '']
|
||||||
# self.w3iter = itertools.cycle(self.w3s)
|
elif isinstance(rpc_url, list):
|
||||||
url = resolve_rpc_url(rpc_url)
|
rpc_urls = [resolve_rpc_url(s) for url in rpc_url if (s:=url.strip()) != '']
|
||||||
w3 = AsyncWeb3(RetryHTTPProvider(url))
|
else:
|
||||||
# w3.middleware_onion.inject(geth_poa_middleware, layer=0) # todo is this line needed?
|
raise ValueError("rpc_url must be a string or list of strings")
|
||||||
# w3.middleware_onion.add(simple_cache_middleware)
|
|
||||||
# log.debug(f'middleware {list(w3.middleware_onion.middlewares)}')
|
if archive_url is None:
|
||||||
|
archive_url = config.archive_url
|
||||||
|
if archive_url is None:
|
||||||
|
archive_urls = []
|
||||||
|
elif isinstance(archive_url, str):
|
||||||
|
archive_urls = [resolve_rpc_url(s) for url in archive_url.split(',') if (s:=url.strip()) != '']
|
||||||
|
elif isinstance(archive_url, list):
|
||||||
|
archive_urls = [resolve_rpc_url(s) for url in archive_url if (s:=url.strip()) != '']
|
||||||
|
else:
|
||||||
|
raise ValueError("archive_url must be a string or list of strings")
|
||||||
|
|
||||||
|
if not rpc_urls:
|
||||||
|
raise ValueError("No rpc_url configured")
|
||||||
|
|
||||||
|
|
||||||
|
provider = await RoundRobinHTTPProvider.construct(rpc_urls, archive_urls) if len(rpc_urls) > 1 or archive_urls else await RetryHTTPProvider.construct(rpc_urls[0])
|
||||||
|
w3 = AsyncWeb3(provider)
|
||||||
|
if archive_urls:
|
||||||
|
w3.middleware_onion.add(archive_intercept_middleware, 'block_number_intercept_middleware')
|
||||||
w3.middleware_onion.remove('attrdict')
|
w3.middleware_onion.remove('attrdict')
|
||||||
w3.middleware_onion.add(clean_input_async, 'clean_input')
|
w3.middleware_onion.add(clean_input_async, 'clean_input')
|
||||||
w3.eth.Contract = _make_contract(w3.eth)
|
w3.eth.Contract = _make_contract(w3.eth)
|
||||||
has_account = False
|
|
||||||
if autosign:
|
if autosign:
|
||||||
a = Account.get(account)
|
if account is NARG:
|
||||||
if a is not None:
|
account = Account.get()
|
||||||
|
if account is not None:
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
w3.middleware_onion.add(await async_construct_sign_and_send_raw_middleware(a))
|
w3.middleware_onion.add(await async_construct_sign_and_send_raw_middleware(account))
|
||||||
w3.eth.default_account = a.address
|
w3.eth.default_account = account.address
|
||||||
has_account = True
|
|
||||||
log.info(f'{name} w3 configured to autosign as {a.address}')
|
|
||||||
if not has_account:
|
|
||||||
log.info(f'No account set for {name} w3')
|
|
||||||
return w3
|
return w3
|
||||||
|
|
||||||
|
|
||||||
@@ -74,6 +91,30 @@ async def create_w3_ws(ws_url=None) -> AsyncWeb3:
|
|||||||
return w3
|
return w3
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_rpc_url(rpc_url=None):
|
||||||
|
if rpc_url is None:
|
||||||
|
rpc_url = config.rpc_url
|
||||||
|
if rpc_url == 'test':
|
||||||
|
return 'http://localhost:8545'
|
||||||
|
try:
|
||||||
|
return config.rpc_urls[rpc_url] # look up aliases
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
return rpc_url
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_ws_url(ws_url=None):
|
||||||
|
if ws_url is None:
|
||||||
|
ws_url = config.ws_url
|
||||||
|
if ws_url == 'test':
|
||||||
|
return 'ws://localhost:8545'
|
||||||
|
try:
|
||||||
|
return config.rpc_urls[ws_url] # look up aliases
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
return ws_url
|
||||||
|
|
||||||
|
|
||||||
def _clean(obj):
|
def _clean(obj):
|
||||||
if type(obj) is HexBytes:
|
if type(obj) is HexBytes:
|
||||||
return bytes(obj)
|
return bytes(obj)
|
||||||
@@ -117,24 +158,88 @@ def _make_contract(w3_eth):
|
|||||||
return f
|
return f
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
#
|
||||||
|
# ARCHIVE NODE MANAGEMENT
|
||||||
|
#
|
||||||
|
|
||||||
|
# Regular RPC nodes do not necessarily have the full state history available. The code below tracks the block heights
|
||||||
|
# of historical data lookup failures on rpc nodes and automatically retries failed history requests on an archive_rpc
|
||||||
|
# node. Archive RPC nodes are otherwise not used unless they are also listed in the rpc_url config.
|
||||||
|
|
||||||
|
# Define methods that may carry a `block_identifier` parameter,
|
||||||
|
# along with the required number of arguments to include it.
|
||||||
|
|
||||||
|
ARCHIVE_METHODS = {
|
||||||
|
# Examples:
|
||||||
|
"eth_getBalance": 2, # e.g., get_balance(address, block_identifier)
|
||||||
|
"eth_call": 2, # e.g., contract.call(params, block_identifier)
|
||||||
|
"eth_getStorageAt": 3, # e.g., get_storage_at(address, position, block_identifier)
|
||||||
|
# Add more methods as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
ARCHIVE_ERRORS = {
|
||||||
|
'state recreation l2 gas depth limit exceeded',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def is_archive_method(method, params):
|
||||||
|
expected_args = ARCHIVE_METHODS.get(method, -1)
|
||||||
|
return len(params) == expected_args
|
||||||
|
|
||||||
|
|
||||||
|
async def archive_intercept_middleware(make_request, w3):
|
||||||
|
"""
|
||||||
|
Middleware to intercept any call with `block_number` and manage marking archive_fault_height
|
||||||
|
"""
|
||||||
|
async def middleware(method, params):
|
||||||
|
# Only intercept relevant methods
|
||||||
|
is_archival = is_archive_method(method, params)
|
||||||
|
block_height = None
|
||||||
|
resp = await make_request(method, params)
|
||||||
|
if is_archival and 'error' in resp and resp['error']['message'] in ARCHIVE_ERRORS:
|
||||||
|
block_identifier = params[-1]
|
||||||
|
if block_identifier not in ('latest', 'pending',):
|
||||||
|
block_height = int(block_identifier, 16) if type(block_identifier) is str else int(params[-1])
|
||||||
|
raise ArchiveException(method, block_height)
|
||||||
|
else:
|
||||||
|
# noinspection PyUnboundLocalVariable
|
||||||
|
raise Exception(f'Got an archive fault using a block_identifier of {block_identifier}: {w3.provider.endpoint_uri} {method} {params}\n{resp}')
|
||||||
|
return resp
|
||||||
|
|
||||||
|
return middleware
|
||||||
|
|
||||||
|
|
||||||
|
class ArchiveException (Exception):
|
||||||
|
def __init__(self, method, block_number):
|
||||||
|
super().__init__(f"Archive fault for method {method} at block {block_number}", block_number)
|
||||||
|
self.method = method
|
||||||
|
self.block_number = block_number
|
||||||
|
|
||||||
|
|
||||||
class RetryHTTPProvider (AsyncHTTPProvider):
|
class RetryHTTPProvider (AsyncHTTPProvider):
|
||||||
|
|
||||||
def __init__(self, endpoint_uri: Optional[Union[URI, str]] = None, request_kwargs: Optional[Any] = None) -> None:
|
@staticmethod
|
||||||
|
async def construct(endpoint_uri: Optional[Union[URI, str]] = None, request_kwargs: Optional[Any] = None, retries: int = 10):
|
||||||
|
result = RetryHTTPProvider(endpoint_uri, request_kwargs, retries)
|
||||||
|
connector = TCPConnector(limit=config.concurrent_rpc_connections)
|
||||||
|
session = ClientSession(connector=connector, timeout=ClientTimeout(config.rpc_timeout))
|
||||||
|
await result.cache_async_session(session)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __init__(self, endpoint_uri: Optional[Union[URI, str]] = None, request_kwargs: Optional[Any] = None, retries: int = 10) -> None:
|
||||||
super().__init__(endpoint_uri, request_kwargs)
|
super().__init__(endpoint_uri, request_kwargs)
|
||||||
|
|
||||||
self.in_flight = asyncio.Semaphore(config.concurrent_rpc_connections)
|
self.in_flight = asyncio.Semaphore(config.concurrent_rpc_connections)
|
||||||
self.rate_allowed = asyncio.Event()
|
self.rate_allowed = asyncio.Event()
|
||||||
self.rate_allowed.set()
|
self.rate_allowed.set()
|
||||||
|
self.retries = retries
|
||||||
|
|
||||||
async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
||||||
wait = 0
|
wait = 0
|
||||||
while True:
|
for _ in range(self.retries):
|
||||||
try:
|
try:
|
||||||
async with self.in_flight:
|
async with self.in_flight:
|
||||||
await self.rate_allowed.wait()
|
await self.rate_allowed.wait()
|
||||||
# log.debug(f'Requesting RPC call {method}')
|
|
||||||
return await super().make_request(method, params)
|
return await super().make_request(method, params)
|
||||||
except ClientResponseError as e:
|
except ClientResponseError as e:
|
||||||
if e.status != 429:
|
if e.status != 429:
|
||||||
@@ -150,5 +255,63 @@ class RetryHTTPProvider (AsyncHTTPProvider):
|
|||||||
await asyncio.sleep(wait)
|
await asyncio.sleep(wait)
|
||||||
finally:
|
finally:
|
||||||
self.rate_allowed.set()
|
self.rate_allowed.set()
|
||||||
# finally:
|
raise IOError(f'Could not query rpc server after {self.retries} tries: {method} {params}')
|
||||||
# log.debug(f'Ended request of RPC call {method}')
|
|
||||||
|
|
||||||
|
class RoundRobinHTTPProvider (AsyncJSONBaseProvider):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def construct(endpoint_uris: list[str], archive_uris: list[str]):
|
||||||
|
providers = [RetryHTTPProvider.construct(uri, retries=1) for uri in endpoint_uris]
|
||||||
|
archive_providers = [RetryHTTPProvider.construct(uri, retries=1) for uri in archive_uris]
|
||||||
|
providers = await asyncio.gather(*providers)
|
||||||
|
archive_providers = await asyncio.gather(*archive_providers)
|
||||||
|
|
||||||
|
# Ensure all instances share the same chain ID
|
||||||
|
chain_ids = await asyncio.gather(*(AsyncWeb3(provider).eth.chain_id for provider in itertools.chain(providers, archive_providers)))
|
||||||
|
if len(set(chain_ids)) != 1:
|
||||||
|
raise RuntimeError("All RPC URLs must belong to the same blockchain")
|
||||||
|
|
||||||
|
return RoundRobinHTTPProvider(providers, archive_providers)
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, providers: list[RetryHTTPProvider], archive_providers: list[RetryHTTPProvider]):
|
||||||
|
super().__init__()
|
||||||
|
self.providers = providers
|
||||||
|
self.archive_providers = archive_providers
|
||||||
|
for provider in self.providers:
|
||||||
|
provider.archive_fault_height = 0
|
||||||
|
self.index = 0
|
||||||
|
self.archive_index = 0
|
||||||
|
|
||||||
|
async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
||||||
|
provider = self._current()
|
||||||
|
is_archival = is_archive_method(method, params)
|
||||||
|
try:
|
||||||
|
if is_archival:
|
||||||
|
block_identifier = params[-1]
|
||||||
|
if block_identifier not in ('latest', 'pending',):
|
||||||
|
block_height = int(block_identifier, 16) if type(block_identifier) is str else int(params[-1])
|
||||||
|
if block_height <= provider.archive_fault_height:
|
||||||
|
# this block is at least as old as another block that already failed to fetch history from this RPC
|
||||||
|
raise ArchiveException(method, block_height)
|
||||||
|
return await provider.make_request(method, params)
|
||||||
|
except ArchiveException as e:
|
||||||
|
provider.archive_fault_height = max(provider.archive_fault_height, e.block_number)
|
||||||
|
if not self.archive_providers:
|
||||||
|
raise
|
||||||
|
return await self._current_archive().make_request(method, params)
|
||||||
|
|
||||||
|
def _current(self) -> RetryHTTPProvider:
|
||||||
|
if self.index >= len(self.providers):
|
||||||
|
self.index = 0
|
||||||
|
current_provider = self.providers[self.index]
|
||||||
|
self.index += 1
|
||||||
|
return current_provider
|
||||||
|
|
||||||
|
def _current_archive(self) -> RetryHTTPProvider:
|
||||||
|
if self.archive_index >= len(self.archive_providers):
|
||||||
|
self.archive_index = 0
|
||||||
|
current_provider = self.archive_providers[self.archive_index]
|
||||||
|
self.archive_index += 1
|
||||||
|
return current_provider
|
||||||
|
|||||||
@@ -17,10 +17,16 @@ from dexorder import current_w3, config, db, Blockchain
|
|||||||
from dexorder.base.block import Block, BlockInfo, latest_block
|
from dexorder.base.block import Block, BlockInfo, latest_block
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.database.model import DbBlock
|
from dexorder.database.model import DbBlock
|
||||||
|
from dexorder.util import hexbytes
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def blocktime():
|
||||||
|
""" timestamp of the most recent block seen in real-time, NOT the current block being worked on """
|
||||||
|
return latest_block[current_chain.get().id].timestamp
|
||||||
|
|
||||||
|
|
||||||
async def get_block_timestamp(block_id: Union[bytes,int]) -> int:
|
async def get_block_timestamp(block_id: Union[bytes,int]) -> int:
|
||||||
block = await get_block(block_id)
|
block = await get_block(block_id)
|
||||||
if block is None:
|
if block is None:
|
||||||
@@ -35,6 +41,11 @@ class FetchLock:
|
|||||||
self.exception = None
|
self.exception = None
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_latest_block() -> Block:
|
||||||
|
blockdata = await current_w3.get().eth.get_block('latest')
|
||||||
|
return Block(current_chain.get(), blockdata)
|
||||||
|
|
||||||
|
|
||||||
async def _fetch(fetch: FetchLock, chain_id: int, block_id: Union[int,bytes]) -> Optional[Block]:
|
async def _fetch(fetch: FetchLock, chain_id: int, block_id: Union[int,bytes]) -> Optional[Block]:
|
||||||
# try database first
|
# try database first
|
||||||
if config.cache_blocks_in_db and db:
|
if config.cache_blocks_in_db and db:
|
||||||
@@ -75,8 +86,10 @@ def cache_block(block: Block, confirmed=False):
|
|||||||
confirmed=confirmed, data=block.data))
|
confirmed=confirmed, data=block.data))
|
||||||
|
|
||||||
|
|
||||||
async def get_block(block_id: Union[bytes,int], *, chain_id=None) -> Block:
|
async def get_block(block_id: Union[bytes,str,int], *, chain_id=None) -> Block:
|
||||||
# log.debug(f'get_block {block_id}')
|
# log.debug(f'get_block {block_id}')
|
||||||
|
if type(block_id) is str:
|
||||||
|
block_id = hexbytes(block_id)
|
||||||
if chain_id is None:
|
if chain_id is None:
|
||||||
chain_id = current_chain.get().id
|
chain_id = current_chain.get().id
|
||||||
|
|
||||||
@@ -136,7 +149,11 @@ async def fetch_block(blockhash, *, chain_id=None) -> Optional[Block]:
|
|||||||
chain_id = current_chain.get().id
|
chain_id = current_chain.get().id
|
||||||
response = await current_w3.get().provider.make_request('eth_getBlockByHash', [blockhash, False])
|
response = await current_w3.get().provider.make_request('eth_getBlockByHash', [blockhash, False])
|
||||||
# log.debug(f'fetch_block response {blockhash} {chain_id} {response}')
|
# log.debug(f'fetch_block response {blockhash} {chain_id} {response}')
|
||||||
blockdict: BlockInfo = response['result']
|
try:
|
||||||
|
blockdict: BlockInfo = response['result']
|
||||||
|
except KeyError:
|
||||||
|
log.error(f'fetch_block got strange response {response}')
|
||||||
|
return None
|
||||||
if blockdict is None:
|
if blockdict is None:
|
||||||
log.debug(f'block {blockhash} not found')
|
log.debug(f'block {blockhash} not found')
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ class BlockData (Generic[T]):
|
|||||||
if self.lazy_getitem:
|
if self.lazy_getitem:
|
||||||
lazy = self.lazy_getitem(self, item)
|
lazy = self.lazy_getitem(self, item)
|
||||||
if lazy is not NARG:
|
if lazy is not NARG:
|
||||||
state.set(state.root_fork, self.series, item, lazy)
|
state.set(state.root_fork, self.series, item, lazy, readonly_override=True)
|
||||||
result = lazy
|
result = lazy
|
||||||
if result is NARG:
|
if result is NARG:
|
||||||
raise KeyError
|
raise KeyError
|
||||||
@@ -117,6 +117,14 @@ class BlockData (Generic[T]):
|
|||||||
fork = current_fork.get()
|
fork = current_fork.get()
|
||||||
state.delete_series(fork, self.series)
|
state.delete_series(fork, self.series)
|
||||||
|
|
||||||
|
def upper_len(self):
|
||||||
|
"""
|
||||||
|
Since record values may be marked DELETE there is not an efficient way to know the exact length of a series.
|
||||||
|
We could track it per-branch but instead we just return the number of keys, which is an upper bound on the
|
||||||
|
series length.
|
||||||
|
"""
|
||||||
|
state = current_blockstate.get()
|
||||||
|
return state.upper_len(self.series)
|
||||||
|
|
||||||
class BlockSet(Generic[T], Iterable[T], BlockData[T]):
|
class BlockSet(Generic[T], Iterable[T], BlockData[T]):
|
||||||
def __init__(self, series: Any, **tags):
|
def __init__(self, series: Any, **tags):
|
||||||
|
|||||||
@@ -91,22 +91,25 @@ class DbState(SeriesCollection):
|
|||||||
root_fork = state.init_root_block(root_block)
|
root_fork = state.init_root_block(root_block)
|
||||||
for series, data in self.datas.items():
|
for series, data in self.datas.items():
|
||||||
if data.opts.get('db') != 'lazy':
|
if data.opts.get('db') != 'lazy':
|
||||||
log.debug(f'loading series {series}')
|
|
||||||
t = data.type
|
t = data.type
|
||||||
|
count = 0
|
||||||
if t == DataType.SET:
|
if t == DataType.SET:
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
var: BlockSet = BlockData.registry[series]
|
var: BlockSet = BlockData.registry[series]
|
||||||
for row in db.session.query(SeriesSet).where(SeriesSet.chain == chain_id, SeriesSet.series == data.series2str(series)):
|
for row in db.session.query(SeriesSet).where(SeriesSet.chain == chain_id, SeriesSet.series == data.series2str(series)):
|
||||||
key = data.str2key(row.key)
|
key = data.str2key(row.key)
|
||||||
log.debug(f'load {series} {key}')
|
# log.debug(f'load {series} {key}')
|
||||||
state.set(root_fork, var.series, key, None, overwrite=False)
|
state.set(root_fork, var.series, key, None, overwrite=False)
|
||||||
|
count += 1
|
||||||
elif t == DataType.DICT:
|
elif t == DataType.DICT:
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
var: BlockDict = BlockData.registry[series]
|
var: BlockDict = BlockData.registry[series]
|
||||||
for row in db.session.query(SeriesDict).where(SeriesDict.chain == chain_id, SeriesDict.series == data.series2str(series)):
|
for row in db.session.query(SeriesDict).where(SeriesDict.chain == chain_id, SeriesDict.series == data.series2str(series)):
|
||||||
key = data.str2key(row.key)
|
key = data.str2key(row.key)
|
||||||
value = data.str2value(row.value)
|
value = data.str2value(row.value)
|
||||||
log.debug(f'load {series} {key} {value}')
|
# log.debug(f'load {series} {key} {value}')
|
||||||
state.set(root_fork, var.series, key, value, overwrite=True)
|
state.set(root_fork, var.series, key, value, overwrite=True)
|
||||||
|
count += 1
|
||||||
|
log.debug(f'loaded {count} rows from db series {series}')
|
||||||
log.debug(f'loaded db state from block {root_block}')
|
log.debug(f'loaded db state from block {root_block}')
|
||||||
return state
|
return state
|
||||||
|
|||||||
@@ -53,7 +53,10 @@ class BlockState:
|
|||||||
with a diff height of the root branch or older is always part of the finalized blockchain.
|
with a diff height of the root branch or older is always part of the finalized blockchain.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
class ReadOnlyError(Exception): ...
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.readonly = False
|
||||||
self._root_branch: Optional[Branch] = None
|
self._root_branch: Optional[Branch] = None
|
||||||
self._root_fork: Optional[Fork] = None
|
self._root_fork: Optional[Fork] = None
|
||||||
self.height: int = 0 # highest branch seen
|
self.height: int = 0 # highest branch seen
|
||||||
@@ -80,6 +83,8 @@ class BlockState:
|
|||||||
|
|
||||||
@root_branch.setter
|
@root_branch.setter
|
||||||
def root_branch(self, value: Branch):
|
def root_branch(self, value: Branch):
|
||||||
|
if self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
self._root_branch = value
|
self._root_branch = value
|
||||||
self._root_fork = Fork([value])
|
self._root_fork = Fork([value])
|
||||||
|
|
||||||
@@ -92,6 +97,8 @@ class BlockState:
|
|||||||
return self._root_branch.head
|
return self._root_branch.head
|
||||||
|
|
||||||
def init_root_block(self, root_block: Block) -> Fork:
|
def init_root_block(self, root_block: Block) -> Fork:
|
||||||
|
if self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
assert self.root_branch is None
|
assert self.root_branch is None
|
||||||
return self.add_branch(Branch.from_block(root_block))
|
return self.add_branch(Branch.from_block(root_block))
|
||||||
|
|
||||||
@@ -113,6 +120,8 @@ class BlockState:
|
|||||||
should only be set to False when it is assured that the branch may be joined by height alone, because
|
should only be set to False when it is assured that the branch may be joined by height alone, because
|
||||||
the branch join is known to be at a live-blockchain-finalized height.
|
the branch join is known to be at a live-blockchain-finalized height.
|
||||||
"""
|
"""
|
||||||
|
if self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
assert branch.id not in self.branches_by_id
|
assert branch.id not in self.branches_by_id
|
||||||
|
|
||||||
if self.root_branch is None:
|
if self.root_branch is None:
|
||||||
@@ -150,11 +159,13 @@ class BlockState:
|
|||||||
self.branches_by_height[branch.height].append(branch)
|
self.branches_by_height[branch.height].append(branch)
|
||||||
self.branches_by_id[branch.id] = branch
|
self.branches_by_id[branch.id] = branch
|
||||||
self.height = max(self.height, branch.height)
|
self.height = max(self.height, branch.height)
|
||||||
state_log.info(f'added branch {fork}')
|
# state_log.debug(f'added branch {fork}')
|
||||||
return fork
|
return fork
|
||||||
|
|
||||||
|
|
||||||
def remove_branch(self, branch: Branch, *, remove_series_diffs=True):
|
def remove_branch(self, branch: Branch, *, remove_series_diffs=True):
|
||||||
|
if self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
if branch.height == self.height and len(self.branches_by_height[branch.height]) == 1:
|
if branch.height == self.height and len(self.branches_by_height[branch.height]) == 1:
|
||||||
# this is the only branch at this height: compute the new lower height
|
# this is the only branch at this height: compute the new lower height
|
||||||
other_heights = [b.height for b in self.branches_by_id.values() if b is not branch]
|
other_heights = [b.height for b in self.branches_by_id.values() if b is not branch]
|
||||||
@@ -179,6 +190,8 @@ class BlockState:
|
|||||||
difflist.remove(diff.entry)
|
difflist.remove(diff.entry)
|
||||||
state_log.info(('removed' if remove_series_diffs else 'promoted')+f' branch {branch}')
|
state_log.info(('removed' if remove_series_diffs else 'promoted')+f' branch {branch}')
|
||||||
|
|
||||||
|
def upper_len(self, series):
|
||||||
|
return len(self.diffs_by_series.get(series, {}))
|
||||||
|
|
||||||
def get(self, fork: Fork, series, key, default=NARG):
|
def get(self, fork: Fork, series, key, default=NARG):
|
||||||
series_diffs = self.diffs_by_series.get(series)
|
series_diffs = self.diffs_by_series.get(series)
|
||||||
@@ -208,7 +221,9 @@ class BlockState:
|
|||||||
return DELETE
|
return DELETE
|
||||||
|
|
||||||
|
|
||||||
def set(self, fork: Fork, series, key, value, overwrite=True):
|
def set(self, fork: Fork, series, key, value, overwrite=True, *, readonly_override=False):
|
||||||
|
if not readonly_override and self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
# first look for an existing value
|
# first look for an existing value
|
||||||
branch = fork.branch
|
branch = fork.branch
|
||||||
diffs = self.diffs_by_series.get(series,{}).get(key)
|
diffs = self.diffs_by_series.get(series,{}).get(key)
|
||||||
@@ -234,6 +249,8 @@ class BlockState:
|
|||||||
return old_value
|
return old_value
|
||||||
|
|
||||||
def unload(self, fork: Optional[Fork], series, key):
|
def unload(self, fork: Optional[Fork], series, key):
|
||||||
|
if self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
self.unloads[fork.branch_id].append((series, key))
|
self.unloads[fork.branch_id].append((series, key))
|
||||||
|
|
||||||
def iteritems(self, fork: Optional[Fork], series):
|
def iteritems(self, fork: Optional[Fork], series):
|
||||||
@@ -283,6 +300,8 @@ class BlockState:
|
|||||||
|
|
||||||
Returns the set of diffs for the promoted fork.
|
Returns the set of diffs for the promoted fork.
|
||||||
"""
|
"""
|
||||||
|
if self.readonly:
|
||||||
|
raise self.ReadOnlyError()
|
||||||
found_root = False
|
found_root = False
|
||||||
promotion_branches = []
|
promotion_branches = []
|
||||||
for branch in reversed(fork.branches):
|
for branch in reversed(fork.branches):
|
||||||
@@ -348,6 +367,7 @@ class FinalizedBlockState:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.readonly = False
|
||||||
self.data = {}
|
self.data = {}
|
||||||
self.by_hash = {}
|
self.by_hash = {}
|
||||||
|
|
||||||
@@ -359,6 +379,8 @@ class FinalizedBlockState:
|
|||||||
|
|
||||||
def set(self, _fork: Optional[Fork], series, key, value, overwrite=True):
|
def set(self, _fork: Optional[Fork], series, key, value, overwrite=True):
|
||||||
assert overwrite
|
assert overwrite
|
||||||
|
if self.readonly:
|
||||||
|
raise BlockState.ReadOnlyError()
|
||||||
self.data.setdefault(series, {})[key] = value
|
self.data.setdefault(series, {})[key] = value
|
||||||
|
|
||||||
def iteritems(self, _fork: Optional[Fork], series):
|
def iteritems(self, _fork: Optional[Fork], series):
|
||||||
@@ -371,6 +393,8 @@ class FinalizedBlockState:
|
|||||||
return self.data.get(series,{}).values()
|
return self.data.get(series,{}).values()
|
||||||
|
|
||||||
def delete_series(self, _fork: Optional[Fork], series: str):
|
def delete_series(self, _fork: Optional[Fork], series: str):
|
||||||
|
if self.readonly:
|
||||||
|
raise BlockState.ReadOnlyError()
|
||||||
del self.data[series]
|
del self.data[series]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
from .standard_accounts import test_accounts
|
from .standard_accounts import test_accounts
|
||||||
from .load import config, parse_args
|
from .load import config, parse_args
|
||||||
from .resolve import resolve_rpc_url
|
|
||||||
|
|||||||
@@ -2,14 +2,15 @@ import os
|
|||||||
import tomllib
|
import tomllib
|
||||||
from tomllib import TOMLDecodeError
|
from tomllib import TOMLDecodeError
|
||||||
|
|
||||||
|
import sys
|
||||||
from omegaconf import OmegaConf, DictConfig
|
from omegaconf import OmegaConf, DictConfig
|
||||||
from omegaconf.errors import OmegaConfBaseException
|
from omegaconf.errors import OmegaConfBaseException
|
||||||
|
|
||||||
from .schema import Config
|
from .schema import Config
|
||||||
from .standard_accounts import default_accounts_config
|
|
||||||
|
|
||||||
schema = OmegaConf.structured(Config())
|
schema = OmegaConf.structured(Config(), flags={'struct': False})
|
||||||
|
|
||||||
|
_config_file = 'dexorder.toml'
|
||||||
|
|
||||||
class ConfigException (Exception):
|
class ConfigException (Exception):
|
||||||
pass
|
pass
|
||||||
@@ -19,24 +20,14 @@ def load_config():
|
|||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
result:ConfigDict = OmegaConf.merge(
|
result:ConfigDict = OmegaConf.merge(
|
||||||
schema,
|
schema,
|
||||||
load_accounts(),
|
|
||||||
from_toml('.secret.toml'),
|
from_toml('.secret.toml'),
|
||||||
from_toml('dexorder.toml'),
|
from_toml(_config_file),
|
||||||
from_toml('config.toml'),
|
from_toml('config.toml'),
|
||||||
from_env()
|
from_env()
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def load_accounts():
|
|
||||||
accounts_conf = OmegaConf.create({'accounts': default_accounts_config})
|
|
||||||
try:
|
|
||||||
OmegaConf.merge(schema, accounts_conf)
|
|
||||||
return accounts_conf
|
|
||||||
except OmegaConfBaseException as _x:
|
|
||||||
raise ConfigException(f'Error while processing default accounts:\n{_x}')
|
|
||||||
|
|
||||||
|
|
||||||
def from_env(prefix='DEXORDER_'):
|
def from_env(prefix='DEXORDER_'):
|
||||||
merge = {}
|
merge = {}
|
||||||
for key, value in os.environ.items():
|
for key, value in os.environ.items():
|
||||||
@@ -77,5 +68,12 @@ def parse_args(args=None):
|
|||||||
class ConfigDict (Config, DictConfig): # give type hints from Config plus methods from DictConfig
|
class ConfigDict (Config, DictConfig): # give type hints from Config plus methods from DictConfig
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Special command-line argument handling to get a config file. The -c/--config flag MUST BE FIRST.
|
||||||
|
if len(sys.argv) > 1 and (sys.argv[1] == '-c' or sys.argv[1] == '--config'):
|
||||||
|
if len(sys.argv) < 3:
|
||||||
|
raise ConfigException('Missing config file argument')
|
||||||
|
else:
|
||||||
|
_config_file = sys.argv[2]
|
||||||
|
sys.argv = [sys.argv[0], *sys.argv[3:]]
|
||||||
|
|
||||||
config = load_config()
|
config = load_config()
|
||||||
|
|||||||
@@ -1,25 +1,3 @@
|
|||||||
from .load import config
|
from .load import config
|
||||||
|
|
||||||
|
|
||||||
def resolve_rpc_url(rpc_url=None):
|
|
||||||
if rpc_url is None:
|
|
||||||
rpc_url = config.rpc_url
|
|
||||||
if rpc_url == 'test':
|
|
||||||
return 'http://localhost:8545'
|
|
||||||
try:
|
|
||||||
return config.rpc_urls[rpc_url] # look up aliases
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return rpc_url
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_ws_url(ws_url=None):
|
|
||||||
if ws_url is None:
|
|
||||||
ws_url = config.ws_url
|
|
||||||
if ws_url == 'test':
|
|
||||||
return 'ws://localhost:8545'
|
|
||||||
try:
|
|
||||||
return config.rpc_urls[ws_url] # look up aliases
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return ws_url
|
|
||||||
|
|||||||
@@ -11,13 +11,17 @@ from typing import Optional
|
|||||||
class Config:
|
class Config:
|
||||||
confirms: Optional[int] = None # number of blocks before data is considered finalized. if None then the chain's default setting is used
|
confirms: Optional[int] = None # number of blocks before data is considered finalized. if None then the chain's default setting is used
|
||||||
batch_size: Optional[int] = None # max number of blocks to query in a single backfill rpc request
|
batch_size: Optional[int] = None # max number of blocks to query in a single backfill rpc request
|
||||||
rpc_url: str = 'http://localhost:8545'
|
rpc_url: str = 'http://localhost:8545' # may be a comma-separated list. may include names of entries in rpc_urls.
|
||||||
|
archive_url: str = '' # these rpc URL's are not used unless a query uses an old block number that prior to what the currently-assigned rpc_url can provide
|
||||||
ws_url: Optional[str] = 'ws://localhost:8545'
|
ws_url: Optional[str] = 'ws://localhost:8545'
|
||||||
rpc_urls: Optional[dict[str,str]] = field(default_factory=dict)
|
rpc_urls: Optional[dict[str,str]] = field(default_factory=dict)
|
||||||
db_url: Optional[str] = 'postgresql://dexorder:redroxed@localhost/dexorder'
|
db_url: Optional[str] = 'postgresql://dexorder:redroxed@localhost/dexorder'
|
||||||
|
db_readonly: bool = False
|
||||||
dump_sql: bool = False
|
dump_sql: bool = False
|
||||||
redis_url: Optional[str] = 'redis://localhost:6379'
|
redis_url: Optional[str] = 'redis://localhost:6379'
|
||||||
|
|
||||||
|
metrics_port: Optional[int] = None
|
||||||
|
|
||||||
cache_blocks_in_db: bool = False
|
cache_blocks_in_db: bool = False
|
||||||
metadata: Optional[str] = None
|
metadata: Optional[str] = None
|
||||||
ohlc_dir: Optional[str] = None # if empty string or None, then OHLC's are not saved to disk
|
ohlc_dir: Optional[str] = None # if empty string or None, then OHLC's are not saved to disk
|
||||||
@@ -25,19 +29,30 @@ class Config:
|
|||||||
|
|
||||||
concurrent_rpc_connections: int = 4
|
concurrent_rpc_connections: int = 4
|
||||||
parallel_logevent_queries: bool = True
|
parallel_logevent_queries: bool = True
|
||||||
|
rpc_timeout: float = 3
|
||||||
polling: float = 0 # seconds between queries for a new block. 0 disables polling and uses a websocket subscription on ws_url instead
|
polling: float = 0 # seconds between queries for a new block. 0 disables polling and uses a websocket subscription on ws_url instead
|
||||||
backfill: int = 0 # if not 0, then runner will initialize an empty database by backfilling from the given block height. Use negative numbers to indicate a number of blocks before the present.
|
backfill: int = 0 # if not 0, then runner will initialize an empty database by backfilling from the given block height. Use negative numbers to indicate a number of blocks before the present.
|
||||||
|
|
||||||
account: Optional[str] = None # may be a private key or an account alias
|
accounts: list[str] = field(default_factory=list) # the pool of accounts is used round-robin
|
||||||
accounts: Optional[dict[str,str]] = field(default_factory=dict) # account aliases
|
adjuster: Optional[str] = None # special account allowed to adjust fees. must NOT be listed in accounts.
|
||||||
|
order_gas: int = 425000 # cost to place a conditional order
|
||||||
|
execution_gas: int = 275000 # cost to perform a successful execution
|
||||||
|
order_gas_multiplier: float = 2.0 # multiply the gas amount by this to get the fee
|
||||||
|
exeution_gas_multiplier: float = 2.0 # multiply the gas amount by this to get the fee
|
||||||
|
fee_leeway = 0.1 # do not adjust fees if they are within this proportion
|
||||||
min_gas: str = '0'
|
min_gas: str = '0'
|
||||||
|
|
||||||
|
mark_publish_seconds: float = 60 # publish mark prices every this number of seconds
|
||||||
|
|
||||||
# Order slashing
|
# Order slashing
|
||||||
slash_kill_count: int = 5
|
slash_kill_count: int = 5
|
||||||
slash_delay_base: float = 60 # one minute
|
slash_delay_base: float = 60 # one minute
|
||||||
slash_delay_mul: float = 2 # double the delay each time
|
slash_delay_mul: float = 2 # double the delay each time
|
||||||
slash_delay_max: int = 15 * 60
|
slash_delay_max: int = 15 * 60
|
||||||
|
|
||||||
|
# Tranches are paused for this long after they trigger a slippage control
|
||||||
|
slippage_control_delay: float = 10 # matches the 10-second TWAP used by our uniswap router
|
||||||
|
|
||||||
walker_name: str = 'default'
|
walker_name: str = 'default'
|
||||||
walker_flush_interval: float = 300
|
walker_flush_interval: float = 300
|
||||||
walker_stop: Optional[int] = None # block number of the last block the walker should process
|
walker_stop: Optional[int] = None # block number of the last block the walker should process
|
||||||
@@ -47,3 +62,7 @@ class Config:
|
|||||||
mirror_env: Optional[str] = None
|
mirror_env: Optional[str] = None
|
||||||
|
|
||||||
pagerduty: Optional[str] = None
|
pagerduty: Optional[str] = None
|
||||||
|
|
||||||
|
stablecoins: list[str] = field(default_factory=list) # primary stablecoins which are marked to $1
|
||||||
|
quotecoins: list[str] = field(default_factory=list) # quote tokens like WETH that have stablecoin markets
|
||||||
|
nativecoin: Optional[str] = None # used for accounting of native values. e.g. address of WETH
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from web3.exceptions import Web3Exception
|
|||||||
from web3.types import TxReceipt, TxData
|
from web3.types import TxReceipt, TxData
|
||||||
|
|
||||||
from dexorder import current_w3, Account
|
from dexorder import current_w3, Account
|
||||||
from dexorder.blockstate.fork import current_fork
|
from dexorder.blocks import current_block
|
||||||
from dexorder.util import hexstr
|
from dexorder.util import hexstr
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@@ -18,22 +18,23 @@ class ContractTransaction:
|
|||||||
# This is the standard RPC transaction dictionary
|
# This is the standard RPC transaction dictionary
|
||||||
self.tx = tx
|
self.tx = tx
|
||||||
|
|
||||||
# These three fields are populated only after signing
|
# These fields are populated only after signing
|
||||||
self.id_bytes: Optional[bytes] = None
|
self.id_bytes: Optional[bytes] = None
|
||||||
self.id: Optional[str] = None
|
self.id: Optional[str] = None
|
||||||
self.data: Optional[bytes] = None
|
self.data: Optional[bytes] = None
|
||||||
|
self.account: Optional[Account] = None
|
||||||
|
|
||||||
# This field is populated only after the transaction has been mined
|
# This field is populated only after the transaction has been mined
|
||||||
self.receipt: Optional[TxReceipt] = None # todo could be multiple receipts for different branches!
|
self.receipt: Optional[TxReceipt] = None # todo could be multiple receipts for different branches!
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
# todo this is from an old status system
|
return f'tx-{self.id}'
|
||||||
receipt_status = 'IN_FLIGHT' if self.receipt is None else 'REVERTED' if self.receipt.status == 0 else self.receipt.blockNumber
|
|
||||||
return f'Transaction({self.id},{receipt_status})'
|
|
||||||
|
|
||||||
async def wait(self) -> TxReceipt:
|
async def wait(self) -> TxReceipt:
|
||||||
if self.receipt is None:
|
if self.receipt is None:
|
||||||
self.receipt = await current_w3.get().eth.wait_for_transaction_receipt(self.id)
|
self.receipt = await current_w3.get().eth.wait_for_transaction_receipt(self.id)
|
||||||
|
if self.account is not None:
|
||||||
|
self.account.release()
|
||||||
return self.receipt
|
return self.receipt
|
||||||
|
|
||||||
async def sign(self, account: Account):
|
async def sign(self, account: Account):
|
||||||
@@ -43,6 +44,7 @@ class ContractTransaction:
|
|||||||
self.data = signed['rawTransaction']
|
self.data = signed['rawTransaction']
|
||||||
self.id_bytes = signed['hash']
|
self.id_bytes = signed['hash']
|
||||||
self.id = hexstr(self.id_bytes)
|
self.id = hexstr(self.id_bytes)
|
||||||
|
self.account = account
|
||||||
|
|
||||||
|
|
||||||
class DeployTransaction (ContractTransaction):
|
class DeployTransaction (ContractTransaction):
|
||||||
@@ -59,13 +61,14 @@ class DeployTransaction (ContractTransaction):
|
|||||||
|
|
||||||
|
|
||||||
def call_wrapper(addr, name, func):
|
def call_wrapper(addr, name, func):
|
||||||
async def f(*args, **kwargs):
|
async def f(*args, block_identifier=None, **kwargs):
|
||||||
|
if block_identifier is None:
|
||||||
|
try:
|
||||||
|
block_identifier = current_block.get().height
|
||||||
|
except (LookupError, AttributeError):
|
||||||
|
block_identifier = 'latest'
|
||||||
try:
|
try:
|
||||||
blockid = current_fork.get().head_identifier
|
return await func(*args).call(block_identifier=block_identifier, **kwargs)
|
||||||
except (LookupError, AttributeError):
|
|
||||||
blockid = 'latest'
|
|
||||||
try:
|
|
||||||
return await func(*args).call(block_identifier=blockid, **kwargs)
|
|
||||||
except Web3Exception as e:
|
except Web3Exception as e:
|
||||||
e.args += addr, name
|
e.args += addr, name
|
||||||
raise e
|
raise e
|
||||||
@@ -74,19 +77,22 @@ def call_wrapper(addr, name, func):
|
|||||||
|
|
||||||
def transact_wrapper(addr, name, func):
|
def transact_wrapper(addr, name, func):
|
||||||
async def f(*args, **kwargs):
|
async def f(*args, **kwargs):
|
||||||
|
tx = await func(*args).build_transaction(kwargs)
|
||||||
|
ct = ContractTransaction(tx)
|
||||||
|
account = await Account.acquire()
|
||||||
|
if account is None:
|
||||||
|
raise ValueError(f'No account to sign transaction {addr}.{name}()')
|
||||||
try:
|
try:
|
||||||
tx = await func(*args).build_transaction(kwargs)
|
|
||||||
ct = ContractTransaction(tx)
|
|
||||||
account = Account.get()
|
|
||||||
if account is None:
|
|
||||||
raise ValueError(f'No account to sign transaction {addr}.{name}()')
|
|
||||||
await ct.sign(account)
|
await ct.sign(account)
|
||||||
tx_id = await current_w3.get().eth.send_raw_transaction(ct.data)
|
try:
|
||||||
assert tx_id == ct.id_bytes
|
tx_id = await current_w3.get().eth.send_raw_transaction(ct.data)
|
||||||
return ct
|
assert tx_id == ct.id_bytes
|
||||||
except Web3Exception as e:
|
return ct
|
||||||
e.args += addr, name
|
except Web3Exception as e:
|
||||||
raise e
|
e.args += addr, name
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
account.release()
|
||||||
return f
|
return f
|
||||||
|
|
||||||
|
|
||||||
@@ -148,10 +154,14 @@ class ContractProxy:
|
|||||||
def __getattr__(self, item):
|
def __getattr__(self, item):
|
||||||
if item == 'constructor':
|
if item == 'constructor':
|
||||||
found = self.contract.constructor
|
found = self.contract.constructor
|
||||||
elif item in self.contract.functions:
|
|
||||||
found = self.contract.functions[item]
|
|
||||||
else:
|
else:
|
||||||
raise AttributeError(item)
|
funcs = self.contract.functions
|
||||||
|
# In web3.py v6+, contract functions are exposed as attributes, not via __getitem__.
|
||||||
|
# Using getattr ensures we obtain the callable factory for the function; indexing may return None.
|
||||||
|
# Additionally, guard against unexpected None to fail fast with a clear error.
|
||||||
|
found = getattr(funcs, item, None)
|
||||||
|
if not callable(found):
|
||||||
|
raise AttributeError(f"Function '{item}' not found on contract {self._interface_name} at {self.address}")
|
||||||
return self._wrapper(self.address, item, found)
|
return self._wrapper(self.address, item, found)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from dexorder import db
|
|
||||||
from dexorder.contract import ERC20, CONTRACT_ERRORS
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def token_decimals(addr):
|
|
||||||
key = f'td|{addr}'
|
|
||||||
try:
|
|
||||||
return db.kv[key]
|
|
||||||
except KeyError:
|
|
||||||
# noinspection PyBroadException
|
|
||||||
try:
|
|
||||||
decimals = await ERC20(addr).decimals()
|
|
||||||
except CONTRACT_ERRORS:
|
|
||||||
log.warning(f'token {addr} has no decimals()')
|
|
||||||
decimals = 0
|
|
||||||
except Exception:
|
|
||||||
log.debug(f'could not get token decimals for {addr}')
|
|
||||||
return None
|
|
||||||
db.kv[key] = decimals
|
|
||||||
return decimals
|
|
||||||
@@ -3,6 +3,7 @@ import logging
|
|||||||
|
|
||||||
from eth_abi.packed import encode_packed
|
from eth_abi.packed import encode_packed
|
||||||
from eth_utils import keccak, to_bytes, to_checksum_address
|
from eth_utils import keccak, to_bytes, to_checksum_address
|
||||||
|
from typing_extensions import Optional
|
||||||
|
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.contract import ContractProxy
|
from dexorder.contract import ContractProxy
|
||||||
@@ -20,11 +21,11 @@ log.info(f'Version: {version}')
|
|||||||
|
|
||||||
chain_info = version['chainInfo']
|
chain_info = version['chainInfo']
|
||||||
|
|
||||||
for chain_id, info in chain_info.items():
|
for _chain_id, info in chain_info.items():
|
||||||
chain_id = int(chain_id)
|
_chain_id = int(_chain_id)
|
||||||
_factory[chain_id] = ContractProxy(info['factory'], 'VaultFactory')
|
_factory[_chain_id] = ContractProxy(info['factory'], 'VaultFactory')
|
||||||
_dexorder[chain_id] = ContractProxy(info['dexorder'], 'Dexorder')
|
_dexorder[_chain_id] = ContractProxy(info['dexorder'], 'Dexorder')
|
||||||
_vault_init_code_hash[chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
|
_vault_init_code_hash[_chain_id] = to_bytes(hexstr=info['vaultInitCodeHash'])
|
||||||
|
|
||||||
def get_by_chain(d):
|
def get_by_chain(d):
|
||||||
return d[current_chain.get().id]
|
return d[current_chain.get().id]
|
||||||
@@ -38,6 +39,14 @@ def get_dexorder_contract() -> ContractProxy:
|
|||||||
def get_vault_init_code_hash() -> bytes:
|
def get_vault_init_code_hash() -> bytes:
|
||||||
return get_by_chain(_vault_init_code_hash)
|
return get_by_chain(_vault_init_code_hash)
|
||||||
|
|
||||||
|
def get_mockenv() -> Optional[ContractProxy]:
|
||||||
|
addr = chain_info.get(str(current_chain.get().id),{}).get('mockenv')
|
||||||
|
return ContractProxy(addr, 'MockEnv') if addr is not None else None
|
||||||
|
|
||||||
|
def get_mirrorenv() -> Optional[ContractProxy]:
|
||||||
|
addr = chain_info.get(str(current_chain.get().id),{}).get('mirrorenv')
|
||||||
|
return ContractProxy(addr, 'MirrorEnv') if addr is not None else None
|
||||||
|
|
||||||
def vault_address(owner, num):
|
def vault_address(owner, num):
|
||||||
salt = keccak(encode_packed(['address','uint8'],[owner,num]))
|
salt = keccak(encode_packed(['address','uint8'],[owner,num]))
|
||||||
contract_address = keccak(
|
contract_address = keccak(
|
||||||
@@ -57,3 +66,11 @@ def VaultContract(addr):
|
|||||||
|
|
||||||
def DexorderContract(addr):
|
def DexorderContract(addr):
|
||||||
return ContractProxy(addr, 'Dexorder')
|
return ContractProxy(addr, 'Dexorder')
|
||||||
|
|
||||||
|
|
||||||
|
async def get_fee_manager_contract():
|
||||||
|
factory_contract = get_factory_contract()
|
||||||
|
implementation_address = await factory_contract.implementation()
|
||||||
|
fee_manager_address = await ContractProxy(implementation_address, 'IVaultImpl').feeManager()
|
||||||
|
fee_manager = ContractProxy(fee_manager_address, 'IFeeManager')
|
||||||
|
return fee_manager
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import logging
|
|||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
|
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from sqlalchemy import Engine
|
from sqlalchemy import Engine, event
|
||||||
from sqlalchemy.orm import Session, SessionTransaction
|
from sqlalchemy.orm import Session, SessionTransaction
|
||||||
|
|
||||||
from .migrate import migrate_database
|
from .migrate import migrate_database
|
||||||
@@ -99,7 +99,7 @@ class Db:
|
|||||||
_session.set(None)
|
_session.set(None)
|
||||||
|
|
||||||
# noinspection PyShadowingNames
|
# noinspection PyShadowingNames
|
||||||
def connect(self, url=None, migrate=True, reconnect=False, dump_sql=None):
|
def connect(self, url=None, migrate=True, reconnect=False, dump_sql=None, readonly:bool=None):
|
||||||
if _engine.get() is not None and not reconnect:
|
if _engine.get() is not None and not reconnect:
|
||||||
return None
|
return None
|
||||||
if url is None:
|
if url is None:
|
||||||
@@ -114,6 +114,19 @@ class Db:
|
|||||||
if dump_sql is None:
|
if dump_sql is None:
|
||||||
dump_sql = config.dump_sql
|
dump_sql = config.dump_sql
|
||||||
engine = sqlalchemy.create_engine(url, echo=dump_sql, json_serializer=json.dumps, json_deserializer=json.loads)
|
engine = sqlalchemy.create_engine(url, echo=dump_sql, json_serializer=json.dumps, json_deserializer=json.loads)
|
||||||
|
|
||||||
|
if readonly is None:
|
||||||
|
readonly = config.db_readonly
|
||||||
|
if readonly:
|
||||||
|
@event.listens_for(engine, "connect")
|
||||||
|
def set_readonly(dbapi_connection, _connection_record):
|
||||||
|
cursor = dbapi_connection.cursor()
|
||||||
|
try:
|
||||||
|
cursor.execute("SET default_transaction_read_only = on;")
|
||||||
|
log.info('database connection set to READ ONLY')
|
||||||
|
finally:
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
if migrate:
|
if migrate:
|
||||||
migrate_database(url)
|
migrate_database(url)
|
||||||
with engine.connect() as connection:
|
with engine.connect() as connection:
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
import dataclasses
|
import dataclasses
|
||||||
|
import json
|
||||||
import math
|
import math
|
||||||
from typing import Union
|
from typing import Union, Optional, Any
|
||||||
|
|
||||||
from sqlalchemy import TypeDecorator, BIGINT
|
from sqlalchemy import TypeDecorator, BIGINT, Dialect
|
||||||
from sqlalchemy.dialects.postgresql import BYTEA, JSONB
|
from sqlalchemy.dialects.postgresql import BYTEA, JSONB, NUMERIC
|
||||||
|
from sqlalchemy.sql.type_api import _T
|
||||||
from web3 import Web3
|
from web3 import Web3
|
||||||
|
|
||||||
from dexorder import Fixed2 as NativeFixed, Blockchain as NativeBlockchain
|
from dexorder import Fixed2 as NativeFixed, Blockchain as NativeBlockchain, dec
|
||||||
from dexorder.util import hexstr, hexbytes
|
from dexorder.util import hexstr, hexbytes
|
||||||
|
|
||||||
|
|
||||||
@@ -74,6 +76,31 @@ def Fixed(bits, dbits, signed=False):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class DecimalNumeric (TypeDecorator):
|
||||||
|
impl = NUMERIC
|
||||||
|
|
||||||
|
def process_bind_param(self, value, dialect):
|
||||||
|
return value
|
||||||
|
|
||||||
|
def process_result_value(self, value, dialect):
|
||||||
|
return None if value is None else dec(value)
|
||||||
|
|
||||||
|
|
||||||
|
class Balances (TypeDecorator):
|
||||||
|
"""
|
||||||
|
Dictionary of decimals keyed by strings
|
||||||
|
"""
|
||||||
|
impl = JSONB
|
||||||
|
|
||||||
|
def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any:
|
||||||
|
return json.dumps({k: str(v) for k,v in value.items()})
|
||||||
|
|
||||||
|
def process_result_value(
|
||||||
|
self, value: Optional[Any], dialect: Dialect
|
||||||
|
) -> Optional[_T]:
|
||||||
|
return {k: dec(v) for k,v in json.loads(value).items()}
|
||||||
|
|
||||||
|
|
||||||
class DataclassDictBase(TypeDecorator):
|
class DataclassDictBase(TypeDecorator):
|
||||||
impl = JSONB
|
impl = JSONB
|
||||||
|
|
||||||
|
|||||||
@@ -6,3 +6,8 @@ from .dbblock import DbBlock
|
|||||||
from .orderindex import OrderIndex
|
from .orderindex import OrderIndex
|
||||||
from .pool import Pool
|
from .pool import Pool
|
||||||
from .token import Token
|
from .token import Token
|
||||||
|
from .ofac import OFAC, OFACAlerts
|
||||||
|
from .accounting import Accounting, DbAccount
|
||||||
|
from .vaultcreationrequest import VaultCreationRequest
|
||||||
|
from .tos import TOSAcceptance
|
||||||
|
from .sharedata import ShareData
|
||||||
|
|||||||
104
src/dexorder/database/model/accounting.py
Normal file
104
src/dexorder/database/model/accounting.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from decimal import Decimal as dec
|
||||||
|
from enum import Enum, auto
|
||||||
|
|
||||||
|
from sqlalchemy import ForeignKeyConstraint
|
||||||
|
from sqlalchemy.ext.mutable import MutableDict
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from typing_extensions import Optional
|
||||||
|
|
||||||
|
from dexorder import now
|
||||||
|
from dexorder.database.column import Blockchain
|
||||||
|
from dexorder.database.column_types import DecimalNumeric, Balances
|
||||||
|
from dexorder.database.model import Base
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountingCategory (Enum):
|
||||||
|
Transfer = auto()
|
||||||
|
Income = auto()
|
||||||
|
Expense = auto()
|
||||||
|
Trade = auto()
|
||||||
|
Special = auto()
|
||||||
|
|
||||||
|
class AccountingSubcategory (Enum):
|
||||||
|
# Income
|
||||||
|
OrderFee = auto()
|
||||||
|
GasFee = auto()
|
||||||
|
FillFee = auto()
|
||||||
|
|
||||||
|
# Expense
|
||||||
|
Admin = auto() # contract deployments and upgrades, changing adjuster address, etc.
|
||||||
|
TransactionGas = auto()
|
||||||
|
VaultCreation = auto()
|
||||||
|
Execution = auto()
|
||||||
|
FeeAdjustment = auto() # includes adjusting fee limits
|
||||||
|
|
||||||
|
# Transfer
|
||||||
|
# Transfers have no subcategories, but the note field will be the address of the other account. Both a debit and a
|
||||||
|
# credit entry will be created, one for each account participating in the transfer.
|
||||||
|
|
||||||
|
# Special Codes
|
||||||
|
InitialBalance = auto()
|
||||||
|
|
||||||
|
|
||||||
|
class Accounting (Base):
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||||
|
time: Mapped[datetime] = mapped_column(default=now(), index=True)
|
||||||
|
chain_id: Mapped[int] = mapped_column(index=True) # chain_id
|
||||||
|
account: Mapped[str] = mapped_column(index=True)
|
||||||
|
category: Mapped[AccountingCategory] = mapped_column(index=True)
|
||||||
|
subcategory: Mapped[Optional[AccountingSubcategory]] = mapped_column(index=True)
|
||||||
|
token: Mapped[str] = mapped_column(index=True)
|
||||||
|
amount: Mapped[dec] = mapped_column(DecimalNumeric)
|
||||||
|
value: Mapped[Optional[dec]] = mapped_column(DecimalNumeric) # USD value of the amount. If NULL then accounting has not been done for this row.
|
||||||
|
|
||||||
|
tx_id: Mapped[Optional[str]]
|
||||||
|
note: Mapped[Optional[str]] # format depends on the type of entry
|
||||||
|
|
||||||
|
class AccountKind (Enum):
|
||||||
|
Admin = 0 # administrative (contract deployments etc)
|
||||||
|
OrderFee = 1 # receives order placement fees
|
||||||
|
GasFee = 2 # receives gas fees
|
||||||
|
FillFee = 3 # receives fill fees
|
||||||
|
Execution = 4 # spends gas
|
||||||
|
|
||||||
|
|
||||||
|
class DbAccount(Base):
|
||||||
|
__tablename__ = "account"
|
||||||
|
chain: Mapped[Blockchain] = mapped_column(primary_key=True)
|
||||||
|
address: Mapped[str] = mapped_column(primary_key=True)
|
||||||
|
kind: Mapped[AccountKind] = mapped_column(index=True)
|
||||||
|
balances: Mapped[dict[str, dec]] = mapped_column(MutableDict.as_mutable(Balances), default=dict, server_default="{}")
|
||||||
|
|
||||||
|
reconciliations: Mapped[list["Reconciliation"]] = relationship(
|
||||||
|
"Reconciliation",
|
||||||
|
back_populates="account",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
)
|
||||||
|
|
||||||
|
# records balance snapshots that have been verified by matching all three sources of balance records:
|
||||||
|
# 1. the on-chain balance as of the given block height (must be finalized)
|
||||||
|
# 2. the DbAccount row balance (as saved in the single DbAccount row)
|
||||||
|
# 3. the sum of all accounting rows for this address (check the summation of itemizations)
|
||||||
|
class Reconciliation (Base):
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||||
|
chain: Mapped[Blockchain] = mapped_column(index=True)
|
||||||
|
address: Mapped[str] = mapped_column(index=True) # address of the Account we are reconciling
|
||||||
|
accounting_id: Mapped[int] = mapped_column(index=True) # ID of the last accounting row to be processed for this Reconciliation
|
||||||
|
height: Mapped[int] = mapped_column(index=True) # blockchain height
|
||||||
|
balances: Mapped[dict[str, dec]] = mapped_column(Balances, default=dict, server_default="{}")
|
||||||
|
|
||||||
|
account: Mapped[DbAccount] = relationship(
|
||||||
|
"DbAccount",
|
||||||
|
back_populates="reconciliations",
|
||||||
|
foreign_keys=[chain, address],
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
ForeignKeyConstraint(
|
||||||
|
["chain", "address"], ["account.chain", "account.address"], ondelete="CASCADE"
|
||||||
|
),
|
||||||
|
)
|
||||||
26
src/dexorder/database/model/ofac.py
Normal file
26
src/dexorder/database/model/ofac.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Mapped
|
||||||
|
from sqlalchemy.testing.schema import mapped_column
|
||||||
|
from typing_extensions import Optional
|
||||||
|
|
||||||
|
from dexorder import now
|
||||||
|
from dexorder.database.model import Base
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# todo check broad country restrictions
|
||||||
|
|
||||||
|
class OFAC (Base):
|
||||||
|
address: Mapped[str] = mapped_column(primary_key=True)
|
||||||
|
|
||||||
|
class OFACAlerts (Base):
|
||||||
|
"""
|
||||||
|
This table records any time when a banned address tries to use our service.
|
||||||
|
"""
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||||
|
time: Mapped[datetime] = mapped_column(default=now())
|
||||||
|
address: Mapped[str]
|
||||||
|
ip: Mapped[Optional[str]]
|
||||||
|
|
||||||
12
src/dexorder/database/model/sharedata.py
Normal file
12
src/dexorder/database/model/sharedata.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
|
from dexorder.database.model import Base
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class ShareData (Base):
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||||
|
data: Mapped[dict] = mapped_column(JSONB)
|
||||||
@@ -39,13 +39,13 @@ class Token (Base):
|
|||||||
|
|
||||||
chain: Mapped[Blockchain] = mapped_column(primary_key=True)
|
chain: Mapped[Blockchain] = mapped_column(primary_key=True)
|
||||||
address: Mapped[Address] = mapped_column(primary_key=True)
|
address: Mapped[Address] = mapped_column(primary_key=True)
|
||||||
name: Mapped[str]
|
name: Mapped[str] # indexed below
|
||||||
symbol: Mapped[str] = mapped_column(index=True)
|
symbol: Mapped[str] = mapped_column(index=True)
|
||||||
decimals: Mapped[Uint8]
|
decimals: Mapped[Uint8]
|
||||||
approved: Mapped[bool] = mapped_column(index=True)
|
approved: Mapped[bool] = mapped_column(index=True)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_name', 'name', postgresql_using='gist'), # full text search on name
|
Index('ix_token_name', 'name', postgresql_using='gist'), # full text search on name
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
13
src/dexorder/database/model/tos.py
Normal file
13
src/dexorder/database/model/tos.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
|
from dexorder.database.model import Base
|
||||||
|
|
||||||
|
|
||||||
|
# We do not index this table since it is warehouse information and rarely if ever queried
|
||||||
|
class TOSAcceptance (Base):
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||||
|
ipaddr: Mapped[str]
|
||||||
|
time: Mapped[datetime]
|
||||||
|
version: Mapped[datetime]
|
||||||
@@ -5,7 +5,7 @@ from typing import Optional
|
|||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
from dexorder.base import TransactionRequest, transaction_request_registry
|
from dexorder.base import TransactionRequest, transaction_request_deserializers
|
||||||
from dexorder.database.column import Dict, Bytes, UUID_PK, Blockchain
|
from dexorder.database.column import Dict, Bytes, UUID_PK, Blockchain
|
||||||
from dexorder.database.column_types import DataclassDict
|
from dexorder.database.column_types import DataclassDict
|
||||||
from dexorder.database.model import Base
|
from dexorder.database.model import Base
|
||||||
@@ -18,6 +18,7 @@ class TransactionJobState (Enum):
|
|||||||
Sent = 's' # tx has been delivered to a node
|
Sent = 's' # tx has been delivered to a node
|
||||||
Mined = 'z' # mined on at least one fork, whether reverted or not. todo handle forks that didnt confirm: receipts are per-fork!
|
Mined = 'z' # mined on at least one fork, whether reverted or not. todo handle forks that didnt confirm: receipts are per-fork!
|
||||||
Error = 'x' # an exception has prevented this job from sending a transaction
|
Error = 'x' # an exception has prevented this job from sending a transaction
|
||||||
|
Declined = 'd' # the transaction builder successfully returned None
|
||||||
|
|
||||||
|
|
||||||
# noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
@@ -27,7 +28,7 @@ TransactionJobStateColumnType = sa.Enum(TransactionJobState)
|
|||||||
|
|
||||||
def deserialize_transaction_request(**d):
|
def deserialize_transaction_request(**d):
|
||||||
t = d['type']
|
t = d['type']
|
||||||
Class = transaction_request_registry.get(t)
|
Class = transaction_request_deserializers.get(t)
|
||||||
if Class is None:
|
if Class is None:
|
||||||
raise ValueError(f'No TransactionRequest for type "{t}"')
|
raise ValueError(f'No TransactionRequest for type "{t}"')
|
||||||
# noinspection PyArgumentList
|
# noinspection PyArgumentList
|
||||||
|
|||||||
25
src/dexorder/database/model/vaultcreationrequest.py
Normal file
25
src/dexorder/database/model/vaultcreationrequest.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy.dialects.postgresql import INET
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
from sqlalchemy.schema import Index
|
||||||
|
|
||||||
|
from dexorder.database.column import Blockchain
|
||||||
|
from dexorder.database.model import Base
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class VaultCreationRequest (Base):
|
||||||
|
chain: Mapped[Blockchain] = mapped_column(primary_key=True)
|
||||||
|
owner: Mapped[str] = mapped_column(primary_key=True)
|
||||||
|
num: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
time: Mapped[datetime]
|
||||||
|
ipaddr: Mapped[str] = mapped_column(INET)
|
||||||
|
vault: Mapped[Optional[str]] = mapped_column(default=None, server_default=None) # filled in after the vault is indeed created
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("ix_vault_address_not_null", "vault", postgresql_where="vault IS NOT NULL"),
|
||||||
|
)
|
||||||
|
|
||||||
@@ -3,20 +3,23 @@ import logging
|
|||||||
|
|
||||||
from web3.types import EventData
|
from web3.types import EventData
|
||||||
|
|
||||||
from dexorder import current_pub, minutely
|
from dexorder import db, metric, current_w3, timestamp
|
||||||
|
from dexorder.accounting import accounting_fill, accounting_placement
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.base.order import TrancheKey, OrderKey
|
from dexorder.base.order import TrancheKey, OrderKey
|
||||||
from dexorder.base.orderlib import SwapOrderState
|
from dexorder.base.orderlib import SwapOrderState
|
||||||
from dexorder.blocks import get_block_timestamp
|
from dexorder.blocks import get_block_timestamp
|
||||||
from dexorder.contract.dexorder import vault_address, VaultContract, get_factory_contract
|
from dexorder.blockstate import current_blockstate
|
||||||
|
from dexorder.contract.dexorder import VaultContract, get_factory_contract
|
||||||
|
from dexorder.database.model import VaultCreationRequest
|
||||||
from dexorder.impls import get_impl_version
|
from dexorder.impls import get_impl_version
|
||||||
from dexorder.ohlc import ohlcs
|
from dexorder.ohlc import ohlcs
|
||||||
from dexorder.order.orderstate import Order
|
from dexorder.order.orderstate import Order
|
||||||
from dexorder.order.triggers import (OrderTriggers, activate_order, update_balance_triggers, start_trigger_updates,
|
from dexorder.order.triggers import (OrderTriggers, activate_order, update_balance_triggers, start_trigger_updates,
|
||||||
update_price_triggers)
|
update_price_triggers, TimeTrigger, PriceLineTrigger)
|
||||||
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data
|
from dexorder.pools import new_pool_prices, pool_prices, get_uniswap_data
|
||||||
from dexorder.util import hexstr
|
from dexorder.util import hexstr
|
||||||
from dexorder.vault_blockdata import vault_owners, adjust_balance, MAX_VAULTS, verify_vault
|
from dexorder.vault_blockdata import vault_owners, adjust_balance, verify_vault, publish_vaults
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -31,17 +34,20 @@ def init():
|
|||||||
|
|
||||||
|
|
||||||
async def handle_order_placed(event: EventData):
|
async def handle_order_placed(event: EventData):
|
||||||
# event DexorderPlaced (uint64 startOrderIndex, uint8 numOrders);
|
# event DexorderSwapPlaced (uint64 startOrderIndex, uint8 numOrders, uint);
|
||||||
addr = event['address']
|
addr = event['address']
|
||||||
start_index = int(event['args']['startOrderIndex'])
|
try:
|
||||||
num_orders = int(event['args']['numOrders'])
|
start_index = int(event['args']['startOrderIndex'])
|
||||||
# todo accounting
|
num_orders = int(event['args']['numOrders'])
|
||||||
order_fee = int(event['args']['orderFee'])
|
except KeyError:
|
||||||
gas_fee = int(event['args']['gasFee'])
|
log.warning(f'Rogue DexorderSwapPlaced in tx {hexstr(event["transactionHash"])}')
|
||||||
log.debug(f'DexorderPlaced {addr} {start_index} {num_orders}')
|
return
|
||||||
|
log.debug(f'DexorderSwapPlaced {addr} {start_index} {num_orders}')
|
||||||
if not await verify_vault(addr):
|
if not await verify_vault(addr):
|
||||||
log.warning(f'Discarding order from rogue vault {addr}.')
|
log.warning(f'Discarding order from rogue vault {addr}.')
|
||||||
return
|
return
|
||||||
|
await accounting_placement(event)
|
||||||
|
metric.orders.inc()
|
||||||
contract = None
|
contract = None
|
||||||
for index in range(start_index, start_index+num_orders):
|
for index in range(start_index, start_index+num_orders):
|
||||||
key = OrderKey(addr, index)
|
key = OrderKey(addr, index)
|
||||||
@@ -52,7 +58,7 @@ async def handle_order_placed(event: EventData):
|
|||||||
log.debug(f'raw order status {obj}')
|
log.debug(f'raw order status {obj}')
|
||||||
order = Order.create(addr, index, event['transactionHash'], obj)
|
order = Order.create(addr, index, event['transactionHash'], obj)
|
||||||
await activate_order(order)
|
await activate_order(order)
|
||||||
log.debug(f'new order{order}')
|
log.debug(f'new order {order.key} {await order.pprint()}')
|
||||||
|
|
||||||
|
|
||||||
async def handle_swap_filled(event: EventData):
|
async def handle_swap_filled(event: EventData):
|
||||||
@@ -60,17 +66,24 @@ async def handle_swap_filled(event: EventData):
|
|||||||
log.debug(f'DexorderSwapFilled {event}')
|
log.debug(f'DexorderSwapFilled {event}')
|
||||||
args = event['args']
|
args = event['args']
|
||||||
vault = event['address']
|
vault = event['address']
|
||||||
order_index = args['orderIndex']
|
try:
|
||||||
tranche_index = args['trancheIndex']
|
order_index = args['orderIndex']
|
||||||
amount_in = args['amountIn']
|
tranche_index = args['trancheIndex']
|
||||||
amount_out = args['amountOut']
|
amount_in = args['amountIn']
|
||||||
fill_fee = args['fillFee']
|
amount_out = args['amountOut']
|
||||||
next_execution_time = args['nextExecutionTime']
|
fill_fee = args['fillFee']
|
||||||
|
next_execution_time = args['nextExecutionTime']
|
||||||
|
except KeyError:
|
||||||
|
log.warning(f'Rogue DexorderSwapFilled in tx {hexstr(event["transactionHash"])}')
|
||||||
|
return
|
||||||
try:
|
try:
|
||||||
order: Order = Order.of(vault, order_index)
|
order: Order = Order.of(vault, order_index)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f'DexorderSwapFilled IGNORED due to missing order {vault} {order_index}')
|
log.warning(f'DexorderSwapFilled IGNORED due to missing order {vault} {order_index}')
|
||||||
return
|
return
|
||||||
|
value = await accounting_fill(event, order.order.tokenOut)
|
||||||
|
if value is not None:
|
||||||
|
metric.volume.inc(float(value))
|
||||||
order.status.trancheStatus[tranche_index].activationTime = next_execution_time # update rate limit
|
order.status.trancheStatus[tranche_index].activationTime = next_execution_time # update rate limit
|
||||||
try:
|
try:
|
||||||
triggers = OrderTriggers.instances[order.key]
|
triggers = OrderTriggers.instances[order.key]
|
||||||
@@ -109,13 +122,11 @@ async def handle_order_cancel_all(event: EventData):
|
|||||||
|
|
||||||
|
|
||||||
async def handle_transfer(transfer: EventData):
|
async def handle_transfer(transfer: EventData):
|
||||||
# todo handle native transfers incl gas for token transfers
|
|
||||||
# log.debug(f'Transfer {transfer}')
|
# log.debug(f'Transfer {transfer}')
|
||||||
from_address = transfer['args']['from']
|
from_address = transfer['args']['from']
|
||||||
to_address = transfer['args']['to']
|
to_address = transfer['args']['to']
|
||||||
if to_address == from_address:
|
|
||||||
return
|
|
||||||
amount = int(transfer['args']['value'])
|
amount = int(transfer['args']['value'])
|
||||||
|
token_address = transfer['address']
|
||||||
if to_address in vault_owners:
|
if to_address in vault_owners:
|
||||||
log.debug(f'deposit {to_address} {amount}')
|
log.debug(f'deposit {to_address} {amount}')
|
||||||
vault = to_address
|
vault = to_address
|
||||||
@@ -125,10 +136,12 @@ async def handle_transfer(transfer: EventData):
|
|||||||
else:
|
else:
|
||||||
vault = None
|
vault = None
|
||||||
if vault is not None:
|
if vault is not None:
|
||||||
token_address = transfer['address']
|
|
||||||
await adjust_balance(vault, token_address, amount)
|
await adjust_balance(vault, token_address, amount)
|
||||||
await update_balance_triggers(vault, token_address, amount)
|
await update_balance_triggers(vault, token_address)
|
||||||
|
# This wuold double-count fill fees. Instead, we book the transfer when sending money to the account as part of a refill.
|
||||||
|
# if is_tracked_address(to_address):
|
||||||
|
# # noinspection PyTypeChecker
|
||||||
|
# await accounting_transfer(transfer, token_address, from_address, to_address, amount, adjust_decimals=True)
|
||||||
|
|
||||||
async def handle_uniswap_swaps(swaps: list[EventData]):
|
async def handle_uniswap_swaps(swaps: list[EventData]):
|
||||||
# asynchronously prefetch the block timestamps we'll need
|
# asynchronously prefetch the block timestamps we'll need
|
||||||
@@ -159,20 +172,26 @@ async def handle_vault_created(created: EventData):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
log.debug('couldnt parse event data for VaultCreated', created)
|
log.debug('couldnt parse event data for VaultCreated', created)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# stop trying to create the vault
|
||||||
|
chain_id = current_chain.get().id
|
||||||
|
db_req = db.session.get(VaultCreationRequest, (chain_id, owner, num))
|
||||||
|
if db_req is None:
|
||||||
|
log.warning(f'could not find vault creation request {chain_id}|{owner}|{num}')
|
||||||
|
else:
|
||||||
|
db_req.vault = addr
|
||||||
|
|
||||||
# Verify the authenticity of the vault. We are permissive on Mockchain due to irregular restarts of various components
|
# Verify the authenticity of the vault. We are permissive on Mockchain due to irregular restarts of various components
|
||||||
if not await verify_vault(addr, owner, num):
|
if not await verify_vault(addr, owner, num):
|
||||||
log.warning(f'Discarding rogue vault {addr}')
|
log.warning(f'Discarding rogue vault {addr}')
|
||||||
return
|
return
|
||||||
vault_owners[addr] = owner
|
vault_owners[addr] = owner
|
||||||
log.debug(f'VaultCreated {owner} #{num} => {addr}')
|
log.debug(f'VaultCreated {owner} #{num} => {addr}')
|
||||||
vaults = []
|
publish_vaults(chain_id, owner)
|
||||||
for num in range(MAX_VAULTS):
|
# BlockData doesn't have an easy way to calculate exact sizes because some keys could hold DELETE values, so
|
||||||
addr = vault_address(owner, num)
|
# this is actually an upper limit on the size.
|
||||||
if addr in vault_owners:
|
approx_size = len(current_blockstate.get().diffs_by_series)
|
||||||
vaults.append(addr)
|
metric.vaults.set(approx_size)
|
||||||
else:
|
|
||||||
break
|
|
||||||
current_pub.get()(f'{current_chain.get().id}|{owner}', 'vaults', vaults)
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_vault_impl_changed(upgrade: EventData):
|
async def handle_vault_impl_changed(upgrade: EventData):
|
||||||
@@ -193,3 +212,22 @@ async def handle_vault_impl_changed(upgrade: EventData):
|
|||||||
version = await get_impl_version(impl)
|
version = await get_impl_version(impl)
|
||||||
log.debug(f'Vault {addr} upgraded to impl version {version}')
|
log.debug(f'Vault {addr} upgraded to impl version {version}')
|
||||||
|
|
||||||
|
|
||||||
|
slow_metric_update = 0
|
||||||
|
async def update_metrics():
|
||||||
|
|
||||||
|
# called at the end of the runloop in the worker context
|
||||||
|
metric.vaults.set(vault_owners.upper_len())
|
||||||
|
metric.open_orders.set(Order.open_orders.upper_len())
|
||||||
|
metric.triggers_time.set(len(TimeTrigger.all))
|
||||||
|
metric.triggers_line.set(sum(len(s) for s in PriceLineTrigger.by_pool.values()))
|
||||||
|
|
||||||
|
# slow updates
|
||||||
|
global slow_metric_update
|
||||||
|
now = timestamp()
|
||||||
|
if now - slow_metric_update >= 60:
|
||||||
|
slow_metric_update = now
|
||||||
|
|
||||||
|
# put slow updates here
|
||||||
|
price = await current_w3.get().eth.gas_price
|
||||||
|
metric.gas_price.observe(price)
|
||||||
|
|||||||
31
src/dexorder/feemanager.py
Normal file
31
src/dexorder/feemanager.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from dexorder.contract import ContractProxy
|
||||||
|
from dexorder.contract.dexorder import get_factory_contract, get_fee_manager_contract
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class FeeManager (ContractProxy):
|
||||||
|
_instance: 'FeeManager' = None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get():
|
||||||
|
if FeeManager._instance is None:
|
||||||
|
fee_manager = await get_fee_manager_contract()
|
||||||
|
order_fee_account_addr, gas_fee_account_addr, fill_fee_account_addr = await asyncio.gather(
|
||||||
|
fee_manager.orderFeeAccount(),
|
||||||
|
fee_manager.gasFeeAccount(),
|
||||||
|
fee_manager.fillFeeAccount()
|
||||||
|
)
|
||||||
|
FeeManager._instance = FeeManager(fee_manager.address, order_fee_account_addr, gas_fee_account_addr, fill_fee_account_addr)
|
||||||
|
return FeeManager._instance
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, address, order_fee_account_addr, gas_fee_account_addr, fill_fee_account_addr):
|
||||||
|
super().__init__(address, 'IFeeManager')
|
||||||
|
self.order_fee_account_addr = order_fee_account_addr
|
||||||
|
self.gas_fee_account_addr = gas_fee_account_addr
|
||||||
|
self.fill_fee_account_addr = fill_fee_account_addr
|
||||||
|
|
||||||
|
|
||||||
@@ -146,8 +146,11 @@ class OHLCFile:
|
|||||||
t, o, c = self.cur
|
t, o, c = self.cur
|
||||||
self.cur = t, o, max(o,c,price), min(o,c,price), price
|
self.cur = t, o, max(o,c,price), min(o,c,price), price
|
||||||
else:
|
else:
|
||||||
t, o, h, line, c = self.cur
|
try:
|
||||||
self.cur = t, o, max(h,line,price), min(h,line,price), price
|
t, o, h, line, c = self.cur
|
||||||
|
self.cur = t, o, max(h,line,price), min(h,line,price), price
|
||||||
|
except ValueError:
|
||||||
|
log.error(f'Could not unpack cur {self.cur}')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def row_bytes(row):
|
def row_bytes(row):
|
||||||
|
|||||||
163
src/dexorder/gas_fees.py
Normal file
163
src/dexorder/gas_fees.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
import logging
|
||||||
|
import math
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import eth_account
|
||||||
|
from web3.types import EventData
|
||||||
|
|
||||||
|
from dexorder import current_w3, config, Account
|
||||||
|
from dexorder.accounting import accounting_transaction_gas
|
||||||
|
from dexorder.alert import warningAlert
|
||||||
|
from dexorder.base import TransactionReceiptDict, TransactionRequest
|
||||||
|
from dexorder.contract.contract_proxy import ContractTransaction
|
||||||
|
from dexorder.contract.dexorder import get_fee_manager_contract
|
||||||
|
from dexorder.database.model import TransactionJob
|
||||||
|
from dexorder.database.model.accounting import AccountingSubcategory
|
||||||
|
from dexorder.transactions import TransactionHandler, submit_transaction_request
|
||||||
|
from dexorder.util.convert import to_base_exp
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
order_fee: Optional[int] = None
|
||||||
|
gas_fee: Optional[int] = None
|
||||||
|
fill_fee_half_bps: Optional[int] = None
|
||||||
|
|
||||||
|
order_fee_limit: Optional[int] = None
|
||||||
|
gas_fee_limit: Optional[int] = None
|
||||||
|
fill_fee_half_bps_limit: Optional[int] = None
|
||||||
|
|
||||||
|
adjuster_account: Optional[Account] = None
|
||||||
|
adjuster_locked = False
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AdjustFeeTransactionRequest (TransactionRequest):
|
||||||
|
TYPE = 'adjust'
|
||||||
|
|
||||||
|
order_fee: int
|
||||||
|
order_exp: int
|
||||||
|
gas_fee: int
|
||||||
|
gas_exp:int
|
||||||
|
fill_fee_half_bps: int
|
||||||
|
|
||||||
|
# noinspection PyShadowingNames
|
||||||
|
def __init__(self, order_fee: int, order_exp: int, gas_fee: int, gas_exp:int, fill_fee_half_bps: int):
|
||||||
|
super().__init__(AdjustFeeTransactionRequest.TYPE, (order_fee, order_exp, gas_fee, gas_exp, fill_fee_half_bps))
|
||||||
|
self.order_fee = order_fee
|
||||||
|
self.order_exp = order_exp
|
||||||
|
self.gas_fee = gas_fee
|
||||||
|
self.gas_exp = gas_exp
|
||||||
|
self.fill_fee_half_bps = fill_fee_half_bps
|
||||||
|
|
||||||
|
@property
|
||||||
|
def schedule(self):
|
||||||
|
return self.order_fee, self.order_exp, self.gas_fee, self.gas_exp, self.fill_fee_half_bps
|
||||||
|
|
||||||
|
|
||||||
|
class AdjustFeeTransactionHandler (TransactionHandler):
|
||||||
|
|
||||||
|
async def build_transaction(self, job_id: int, tr: TransactionRequest) -> Optional[ContractTransaction]:
|
||||||
|
tr: AdjustFeeTransactionRequest
|
||||||
|
fee_manager = await get_fee_manager_contract()
|
||||||
|
return await fee_manager.build.setFees(tr.schedule)
|
||||||
|
|
||||||
|
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None:
|
||||||
|
await accounting_transaction_gas(receipt, AccountingSubcategory.FeeAdjustment) # vault creation gas
|
||||||
|
|
||||||
|
async def transaction_exception(self, job: TransactionJob, e: Exception) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def acquire_account(self) -> Optional[Account]:
|
||||||
|
global adjuster_account, adjuster_locked
|
||||||
|
if adjuster_locked:
|
||||||
|
return None
|
||||||
|
if config.adjuster is None:
|
||||||
|
return None
|
||||||
|
if adjuster_account is None:
|
||||||
|
local_account = eth_account.Account.from_key(config.adjuster)
|
||||||
|
adjuster_account = Account(local_account)
|
||||||
|
adjuster_locked = True
|
||||||
|
return adjuster_account
|
||||||
|
|
||||||
|
async def release_account(self, account: Account):
|
||||||
|
global adjuster_locked
|
||||||
|
adjuster_locked = False
|
||||||
|
|
||||||
|
|
||||||
|
async def ensure_gas_fee_data():
|
||||||
|
global order_fee, gas_fee, fill_fee_half_bps, order_fee_limit, gas_fee_limit, fill_fee_half_bps_limit
|
||||||
|
if order_fee is None or gas_fee is None or order_fee_limit is None or gas_fee_limit is None:
|
||||||
|
fee_manager = await get_fee_manager_contract()
|
||||||
|
order_fee_base, order_fee_exp, gas_fee_base, gas_fee_exp, fill_fee_half_bps = await fee_manager.fees()
|
||||||
|
order_fee = order_fee_base << order_fee_exp
|
||||||
|
gas_fee = gas_fee_base << gas_fee_exp
|
||||||
|
order_fee_base_limit, order_fee_exp_limit, gas_fee_base_limit, gas_fee_exp_limit, fill_fee_half_bps_limit = await fee_manager.fee_limits()
|
||||||
|
order_fee_limit = order_fee_base_limit << order_fee_exp_limit
|
||||||
|
gas_fee_limit = gas_fee_base_limit << gas_fee_exp_limit
|
||||||
|
return fee_manager
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def adjust_gas():
|
||||||
|
if not config.adjuster:
|
||||||
|
return
|
||||||
|
w3 = current_w3.get()
|
||||||
|
price = await w3.eth.gas_price
|
||||||
|
new_order_fee = round(config.order_gas * config.order_gas_multiplier * price)
|
||||||
|
new_gas_fee = round(config.order_gas * config.order_gas_multiplier * price)
|
||||||
|
log.debug(f'avg gas price: {price/10**18}')
|
||||||
|
await ensure_gas_fee_data()
|
||||||
|
global order_fee, gas_fee
|
||||||
|
if abs(1 - new_order_fee / order_fee) >= config.fee_leeway or abs(1 - new_gas_fee / gas_fee) >= config.fee_leeway:
|
||||||
|
if new_order_fee > order_fee_limit or new_gas_fee > gas_fee_limit:
|
||||||
|
warningAlert('Fees Hit Limits', 'Adjusting fees would exceed existing fee limits.')
|
||||||
|
new_order_fee = min(order_fee_limit, new_order_fee)
|
||||||
|
new_gas_fee = min(gas_fee_limit, new_gas_fee)
|
||||||
|
# TODO check if the new fee is adjusting upwards too fast and cap it
|
||||||
|
# TODO check if the new fees are already proposed and pending
|
||||||
|
# if new_order_fee/order_fee - 1 >
|
||||||
|
if new_order_fee != order_fee or new_gas_fee != gas_fee:
|
||||||
|
log.info(f'adjusting gas fees: orderFee={new_order_fee/10**18}, gasFee={new_gas_fee/10**18}')
|
||||||
|
new_order_fee_base, new_order_fee_exp = to_base_exp(new_order_fee, math.floor)
|
||||||
|
new_gas_fee_base, new_gas_fee_exp = to_base_exp(new_gas_fee, math.floor)
|
||||||
|
req = AdjustFeeTransactionRequest(new_order_fee_base, new_order_fee_exp,
|
||||||
|
new_gas_fee_base, new_gas_fee_exp, fill_fee_half_bps)
|
||||||
|
submit_transaction_request(req)
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection DuplicatedCode
|
||||||
|
async def handle_fee_limits_changed(event: EventData):
|
||||||
|
try:
|
||||||
|
fees = event['args']['fees']
|
||||||
|
new_order_fee_limit = fees['orderFee']
|
||||||
|
new_order_exp_limit = fees['orderExp']
|
||||||
|
new_gas_fee_limit = fees['gasFee']
|
||||||
|
new_gas_exp_limit = fees['gasExp']
|
||||||
|
new_fill_fee_half_bps_limit = fees['fillFeeHalfBps']
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
global order_fee_limit, gas_fee_limit
|
||||||
|
order_fee_limit = new_order_fee_limit << new_order_exp_limit
|
||||||
|
gas_fee_limit = new_gas_fee_limit << new_gas_exp_limit
|
||||||
|
fill_fee_limit = new_fill_fee_half_bps_limit / 200
|
||||||
|
log.info(f'gas fee limits updated: orderFeeLimit={new_order_fee_limit/10**18}, gasFeeLimit={new_gas_fee_limit/10**18}, fillFeeLimit={fill_fee_limit:.3%}')
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection DuplicatedCode
|
||||||
|
async def handle_fees_changed(event: EventData):
|
||||||
|
try:
|
||||||
|
fees = event['args']['fees']
|
||||||
|
new_order_fee = fees['orderFee']
|
||||||
|
new_order_exp = fees['orderExp']
|
||||||
|
new_gas_fee = fees['gasFee']
|
||||||
|
new_gas_exp = fees['gasExp']
|
||||||
|
new_fill_fee_half_bps = fees['fillFeeHalfBps']
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
global order_fee, gas_fee
|
||||||
|
order_fee = new_order_fee << new_order_exp
|
||||||
|
gas_fee = new_gas_fee << new_gas_exp
|
||||||
|
fill_fee = new_fill_fee_half_bps / 200
|
||||||
|
log.info(f'gas fees updated: orderFee={new_order_fee/10**18}, gasFee={new_gas_fee/10**18}, fillFee={fill_fee:.3%}')
|
||||||
|
|
||||||
44
src/dexorder/marks.py
Normal file
44
src/dexorder/marks.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""
|
||||||
|
"marks" are mark-to-market USD values of a selected set of tokens called quote tokens. Publishing a set of USD marks
|
||||||
|
for the quote tokens allows almost any token to be marked to USD via one hop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
from dexorder import dec, NATIVE_TOKEN, config
|
||||||
|
from dexorder.base.chain import current_chain
|
||||||
|
from dexorder.blockstate import BlockDict
|
||||||
|
from dexorder.pools import quotes, mark_to_market
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def pub_marks(_s,k,v):
|
||||||
|
chain_id = current_chain.get().id
|
||||||
|
return str(chain_id), 'marks.usd', (chain_id, k, str(v))
|
||||||
|
|
||||||
|
|
||||||
|
marks: BlockDict[str, dec] = BlockDict('mark.usd', db=False, redis=True, pub=pub_marks, value2str=str)
|
||||||
|
|
||||||
|
class RateLimiter:
|
||||||
|
def __init__(self, rate: float):
|
||||||
|
self.rate = rate
|
||||||
|
self.last_update = 0.0
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
now = time.monotonic()
|
||||||
|
if now - self.last_update < self.rate:
|
||||||
|
return False
|
||||||
|
self.last_update = now
|
||||||
|
return True
|
||||||
|
|
||||||
|
mark_publish_rate = RateLimiter(config.mark_publish_seconds)
|
||||||
|
|
||||||
|
def publish_marks():
|
||||||
|
if mark_publish_rate.ready():
|
||||||
|
for token_addr in [NATIVE_TOKEN]+quotes:
|
||||||
|
# overwrite=False checks the previous value and does not generate a diff if the values match. This prevents
|
||||||
|
# excessive updates to Redis
|
||||||
|
value = mark_to_market(token_addr)
|
||||||
|
if value is not None:
|
||||||
|
marks.setitem(token_addr, value, overwrite=False)
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
@@ -10,16 +11,70 @@ from dexorder import config
|
|||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
BATCH_SIZE = 1_000
|
||||||
|
|
||||||
|
class PipelineProxy:
|
||||||
|
def __init__(self, pipe: Pipeline):
|
||||||
|
self.pipe = pipe
|
||||||
|
self.ops = 0
|
||||||
|
|
||||||
|
async def push(self, num=1):
|
||||||
|
self.ops += num
|
||||||
|
if self.ops >= BATCH_SIZE:
|
||||||
|
self.ops = 0
|
||||||
|
await self.pipe.execute()
|
||||||
|
|
||||||
|
async def sadd(self, series, *keys):
|
||||||
|
while keys:
|
||||||
|
most = min(BATCH_SIZE-self.ops, len(keys))
|
||||||
|
assert most > 0
|
||||||
|
send = keys[:most]
|
||||||
|
keys = keys[most:]
|
||||||
|
await self.pipe.sadd(series, *send)
|
||||||
|
await self.push(len(send))
|
||||||
|
|
||||||
|
async def srem(self, series, *keys):
|
||||||
|
while keys:
|
||||||
|
most = min(BATCH_SIZE-self.ops, len(keys))
|
||||||
|
assert most > 0
|
||||||
|
send = keys[:most]
|
||||||
|
keys = keys[most:]
|
||||||
|
await self.pipe.srem(series, *send)
|
||||||
|
await self.push(len(send))
|
||||||
|
|
||||||
|
async def hset(self, series, *, mapping):
|
||||||
|
items = list(mapping.items())
|
||||||
|
while items:
|
||||||
|
most = min(BATCH_SIZE-self.ops, len(items))
|
||||||
|
assert most > 0
|
||||||
|
send = items[:most]
|
||||||
|
items = items[most:]
|
||||||
|
await self.pipe.hset(series, mapping={k:v for k,v in send})
|
||||||
|
await self.push(len(send))
|
||||||
|
|
||||||
|
async def hdel(self, series, *keys):
|
||||||
|
while keys:
|
||||||
|
most = min(BATCH_SIZE-self.ops, len(keys))
|
||||||
|
assert most > 0
|
||||||
|
send = keys[:most]
|
||||||
|
keys = keys[most:]
|
||||||
|
await self.pipe.hdel(series, *send)
|
||||||
|
await self.push(len(send))
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return getattr(self.pipe, item)
|
||||||
|
|
||||||
|
|
||||||
class Memcache:
|
class Memcache:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def batch():
|
async def batch(transaction=True):
|
||||||
old_redis: Redis = current_redis.get()
|
old_redis: Redis = current_redis.get()
|
||||||
pipe: Pipeline = old_redis.pipeline()
|
pipe = old_redis.pipeline(transaction=transaction)
|
||||||
|
# noinspection PyTypeChecker
|
||||||
current_redis.set(pipe)
|
current_redis.set(pipe)
|
||||||
try:
|
try:
|
||||||
yield pipe
|
yield PipelineProxy(pipe)
|
||||||
await pipe.execute()
|
await pipe.execute()
|
||||||
finally:
|
finally:
|
||||||
current_redis.set(old_redis)
|
current_redis.set(old_redis)
|
||||||
|
|||||||
@@ -12,7 +12,8 @@ from dexorder.blockstate.blockdata import SeriesCollection, BlockData
|
|||||||
from dexorder.blockstate.diff import DiffEntryItem
|
from dexorder.blockstate.diff import DiffEntryItem
|
||||||
from dexorder.blockstate.fork import Fork
|
from dexorder.blockstate.fork import Fork
|
||||||
from dexorder.blockstate.state import compress_diffs
|
from dexorder.blockstate.state import compress_diffs
|
||||||
from dexorder.memcache import current_redis, memcache
|
from dexorder.memcache import current_redis, memcache, PipelineProxy
|
||||||
|
from dexorder.util import hexstr
|
||||||
from dexorder.util.async_util import maywait
|
from dexorder.util.async_util import maywait
|
||||||
from dexorder.util.json import json_encoder
|
from dexorder.util.json import json_encoder
|
||||||
|
|
||||||
@@ -39,11 +40,11 @@ class RedisState (SeriesCollection):
|
|||||||
for series in self.datas.keys():
|
for series in self.datas.keys():
|
||||||
for k, v in state.iteritems(fork, series):
|
for k, v in state.iteritems(fork, series):
|
||||||
diffs.append(DiffItem(series, k, v))
|
diffs.append(DiffItem(series, k, v))
|
||||||
await self.save(fork, diffs)
|
await self.save(fork, diffs, use_transaction=False, skip_pubs=True) # use_transaction=False if the data is too big
|
||||||
|
|
||||||
|
|
||||||
# noinspection PyAsyncCall
|
# noinspection PyAsyncCall
|
||||||
async def save(self, fork: Fork, diffs: Reversible[Union[DiffItem, DiffEntryItem]]):
|
async def save(self, fork: Fork, diffs: Reversible[Union[DiffItem, DiffEntryItem]], *, use_transaction=True, skip_pubs=False):
|
||||||
# the diffs must be already compressed such that there is only one action per key
|
# the diffs must be already compressed such that there is only one action per key
|
||||||
chain = current_chain.get()
|
chain = current_chain.get()
|
||||||
chain_id = chain.id
|
chain_id = chain.id
|
||||||
@@ -90,21 +91,23 @@ class RedisState (SeriesCollection):
|
|||||||
hsets[series][key] = value
|
hsets[series][key] = value
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
async with memcache.batch() as r:
|
|
||||||
r: Pipeline
|
async with memcache.batch(use_transaction) as r:
|
||||||
|
r: PipelineProxy
|
||||||
for series, keys in sadds.items():
|
for series, keys in sadds.items():
|
||||||
r.sadd(series, *keys)
|
await r.sadd(series, *keys)
|
||||||
for series, keys in sdels.items():
|
for series, keys in sdels.items():
|
||||||
r.srem(series, *keys)
|
await r.srem(series, *keys)
|
||||||
for series, kvs in hsets.items():
|
for series, kvs in hsets.items():
|
||||||
r.hset(series, mapping=kvs)
|
await r.hset(series, mapping=kvs)
|
||||||
for series, keys in hdels.items():
|
for series, keys in hdels.items():
|
||||||
r.hdel(series, *keys)
|
await r.hdel(series, *keys)
|
||||||
block_series = f'{chain_id}|head'
|
block_series = f'{chain_id}|head'
|
||||||
r.json(json_encoder).set(block_series,'$',[fork.height, fork.head])
|
headstr = hexstr(fork.head)
|
||||||
pubs.append((str(chain_id), 'head', [fork.height, fork.head]))
|
r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
|
||||||
|
pubs.append((str(chain_id), 'head', [fork.height, headstr]))
|
||||||
# separate batch for pubs
|
# separate batch for pubs
|
||||||
if pubs:
|
if pubs and not skip_pubs:
|
||||||
await publish_all(pubs)
|
await publish_all(pubs)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
8
src/dexorder/metric/__init__.py
Normal file
8
src/dexorder/metric/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
default_labels = dict(
|
||||||
|
# Default label keys must be defined here but set their actual values later in metrics_startup.py:setup_default_labels()
|
||||||
|
pod=None,
|
||||||
|
chain_id=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .metric_type import *
|
||||||
|
from .metrics import *
|
||||||
34
src/dexorder/metric/metric_startup.py
Normal file
34
src/dexorder/metric/metric_startup.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from prometheus_client import start_http_server
|
||||||
|
|
||||||
|
from dexorder import config, metric, now
|
||||||
|
from dexorder.base.chain import current_chain
|
||||||
|
|
||||||
|
_chain_id = None
|
||||||
|
|
||||||
|
|
||||||
|
def _height_or_none(block):
|
||||||
|
return None if block is None else block.height
|
||||||
|
|
||||||
|
|
||||||
|
def start_metrics_server():
|
||||||
|
# First, set default_labels
|
||||||
|
setup_default_labels()
|
||||||
|
# Start the http daemon thread
|
||||||
|
start_http_server(config.metrics_port)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_default_labels():
|
||||||
|
global _chain_id
|
||||||
|
_chain_id = current_chain.get().id
|
||||||
|
metric.default_labels['chain_id'] = _chain_id
|
||||||
|
metric.default_labels['pod'] = os.environ.get('HOSTNAME', '')
|
||||||
|
|
||||||
|
metric.info.info({
|
||||||
|
# MUST BE STRING VALUES
|
||||||
|
'started': now().isoformat(),
|
||||||
|
# 'pod': os.environ.get('HOSTNAME', ''),
|
||||||
|
# 'chain_id': _chain_id,
|
||||||
|
})
|
||||||
|
|
||||||
94
src/dexorder/metric/metric_type.py
Normal file
94
src/dexorder/metric/metric_type.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
__all__ = ['Counter', 'Gauge', 'Summary', 'Histogram', 'Info', 'Enum']
|
||||||
|
|
||||||
|
import functools
|
||||||
|
|
||||||
|
import prometheus_client as pc
|
||||||
|
|
||||||
|
from dexorder import metric
|
||||||
|
from dexorder.metric import default_labels
|
||||||
|
|
||||||
|
|
||||||
|
def metric_class(OurCls, PcCls, **defaultkw):
|
||||||
|
def construct(defkw, name, documentation, labelnames=(), namespace='dexorder', **kwargs):
|
||||||
|
kw = dict(defkw)
|
||||||
|
kw.update(kwargs)
|
||||||
|
labelnames = tuple(labelnames) + tuple(metric.default_labels.keys()) + tuple(kw.pop('labelnames', ()))
|
||||||
|
return OurCls(PcCls(name, documentation, labelnames, namespace, **kw))
|
||||||
|
|
||||||
|
return functools.partial(construct,defaultkw)
|
||||||
|
|
||||||
|
|
||||||
|
class _Labeled:
|
||||||
|
|
||||||
|
def __init__(self, obj, labels=None):
|
||||||
|
self._obj = obj
|
||||||
|
self._labels = labels or {}
|
||||||
|
|
||||||
|
def labels(self, **kwargs):
|
||||||
|
kw = self._labels.copy()
|
||||||
|
kw.update(kwargs)
|
||||||
|
return self.__class__(self._obj, kw)
|
||||||
|
|
||||||
|
def _apply(self):
|
||||||
|
labels = default_labels | self._labels
|
||||||
|
return self._obj.labels(**labels) if labels else self._obj
|
||||||
|
|
||||||
|
|
||||||
|
class _Counter(_Labeled):
|
||||||
|
def inc(self, amount=1):
|
||||||
|
self._apply().inc(amount)
|
||||||
|
|
||||||
|
|
||||||
|
Counter = metric_class(_Counter, pc.Counter)
|
||||||
|
|
||||||
|
|
||||||
|
class _Gauge(_Labeled):
|
||||||
|
def inc(self, amount=1):
|
||||||
|
self._apply().inc(amount)
|
||||||
|
|
||||||
|
def dec(self, amount=1):
|
||||||
|
self._apply().dec(amount)
|
||||||
|
|
||||||
|
def set(self, value):
|
||||||
|
self._apply().set(value)
|
||||||
|
|
||||||
|
def set_to_current_time(self):
|
||||||
|
self._apply().set_to_current_time()
|
||||||
|
|
||||||
|
def set_function(self, f):
|
||||||
|
self._apply().set_function(f)
|
||||||
|
|
||||||
|
Gauge = metric_class(_Gauge, pc.Gauge)
|
||||||
|
|
||||||
|
|
||||||
|
class _Summary(_Labeled):
|
||||||
|
def observe(self, amount):
|
||||||
|
self._apply().observe(amount)
|
||||||
|
|
||||||
|
|
||||||
|
Summary = metric_class(_Summary, pc.Summary)
|
||||||
|
|
||||||
|
|
||||||
|
class _Histogram(_Labeled):
|
||||||
|
def observe(self, amount):
|
||||||
|
self._apply().observe(amount)
|
||||||
|
|
||||||
|
|
||||||
|
Histogram = metric_class(_Histogram, pc.Histogram)
|
||||||
|
|
||||||
|
|
||||||
|
class _Info(_Labeled):
|
||||||
|
def info(self, val):
|
||||||
|
self._apply().info(val)
|
||||||
|
|
||||||
|
|
||||||
|
Info = metric_class(_Info, pc.Info)
|
||||||
|
|
||||||
|
|
||||||
|
class _Enum(_Labeled):
|
||||||
|
def state(self, state):
|
||||||
|
self._apply().state(state)
|
||||||
|
|
||||||
|
|
||||||
|
Enum = metric_class(_Enum, pc.Enum)
|
||||||
|
|
||||||
25
src/dexorder/metric/metrics.py
Normal file
25
src/dexorder/metric/metrics.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from .metric_type import *
|
||||||
|
|
||||||
|
# Put any set_function(...) calls in metric_startup.py:automatic_metrics()
|
||||||
|
|
||||||
|
info = Info("backend_info", "Information about the backend process")
|
||||||
|
|
||||||
|
block_current = Gauge("block_current", "Current block number being processed")
|
||||||
|
block_latest = Gauge("block_latest", "Highest block number seen")
|
||||||
|
|
||||||
|
runner_loops = Counter("runner_loops", "Number of times the runner loop has been completed")
|
||||||
|
runner_latency = Summary("runner_latency", "How old the current block being processed is, in seconds")
|
||||||
|
|
||||||
|
vaults = Gauge("vaults", "Total vault count", )
|
||||||
|
orders = Counter("orders", "Orders placed", )
|
||||||
|
open_orders = Gauge("open_orders", "Total active orders", )
|
||||||
|
triggers_time = Gauge("triggers_time", "Total active time triggers", )
|
||||||
|
triggers_line = Gauge("triggers_line", "Total active line triggers", )
|
||||||
|
executions = Counter("executions", "Total executions attempted")
|
||||||
|
executions_failed = Counter("executions_failed", "Number of failed execution attempts")
|
||||||
|
volume = Counter("volume", "Total volume of successful executions in USD")
|
||||||
|
|
||||||
|
account_total = Gauge('account_total', 'Total number of accounts configured')
|
||||||
|
account_available = Gauge('account_available', 'Number of accounts that do not have any pending transactions')
|
||||||
|
|
||||||
|
gas_price = Summary('gas_price', 'Gas price in wei')
|
||||||
128
src/dexorder/ofac.py
Normal file
128
src/dexorder/ofac.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from pytz import timezone
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from eth_utils import to_checksum_address
|
||||||
|
|
||||||
|
from dexorder import db
|
||||||
|
from dexorder.base.chain import current_chain
|
||||||
|
from dexorder.database.model.ofac import OFAC
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
deny = set()
|
||||||
|
ofac_enabled = True
|
||||||
|
|
||||||
|
def init_ofac():
|
||||||
|
if current_chain.get().id in [1337,31337]:
|
||||||
|
log.info(f'OFAC disabled on test network {current_chain.get()}')
|
||||||
|
global ofac_enabled
|
||||||
|
ofac_enabled = False
|
||||||
|
return
|
||||||
|
global deny
|
||||||
|
deny = set(o.address for o in db.session.query(OFAC).all())
|
||||||
|
update_ofac()
|
||||||
|
|
||||||
|
# feature_ids are the OFAC feature integer ID's for cryptocurrency features e.g. "Digital Currency - BTC"
|
||||||
|
def save_ofac_meta(date: datetime, feature_ids: set[int]):
|
||||||
|
# todo tim debug
|
||||||
|
pass
|
||||||
|
# db.kv['ofac_meta'] = dict(date=date.isoformat(), feature_ids=sorted(feature_ids))
|
||||||
|
|
||||||
|
def get_ofac_meta():
|
||||||
|
# found = db.kv.get('ofac_meta')
|
||||||
|
found = None # todo tim debug
|
||||||
|
if found is None:
|
||||||
|
return None, set()
|
||||||
|
[date, feature_ids] = found
|
||||||
|
date = datetime.fromisoformat(date)
|
||||||
|
return date, set(feature_ids)
|
||||||
|
|
||||||
|
def add_ofac_denial(addr):
|
||||||
|
db.session.add(OFAC(addr))
|
||||||
|
deny.add(addr)
|
||||||
|
# todo push to redis
|
||||||
|
|
||||||
|
def process_ofac_xml(xml: str, date: datetime = None, feature_ids: set[int] = None):
|
||||||
|
if date is None or feature_ids is None:
|
||||||
|
[date, feature_ids] = get_ofac_meta()
|
||||||
|
|
||||||
|
# Parse the XML string
|
||||||
|
doc = ET.parse(xml)
|
||||||
|
ns = {'o':'https://sanctionslistservice.ofac.treas.gov/api/PublicationPreview/exports/ENHANCED_XML'}
|
||||||
|
new_date = datetime.fromisoformat(doc.find('./o:publicationInfo/o:dataAsOf', ns).text)
|
||||||
|
if date is not None and new_date <= date:
|
||||||
|
if new_date < date:
|
||||||
|
log.debug('ignoring old OFAC XML')
|
||||||
|
return
|
||||||
|
for ft in doc.findall('./o:featureTypes/o:featureType', ns):
|
||||||
|
found = re.match(r'Digital Currency Address - (.+)', ft.find('./o:type', ns).text)
|
||||||
|
if found:
|
||||||
|
currency = found.group(1)
|
||||||
|
feature_id = int(ft.attrib['featureTypeId'])
|
||||||
|
print(currency, feature_id)
|
||||||
|
feature_ids.add(feature_id)
|
||||||
|
for e in doc.findall('./o:entities/o:entity', ns):
|
||||||
|
for f in e.findall('./o:features/o:feature', ns):
|
||||||
|
t = f.find('./o:type', ns)
|
||||||
|
feature_id = int(t.attrib['featureTypeId'])
|
||||||
|
if feature_id in feature_ids:
|
||||||
|
addr = f.find('./o:value', ns).text
|
||||||
|
if addr.startswith('0x'):
|
||||||
|
check = to_checksum_address(addr)
|
||||||
|
add_ofac_denial(check)
|
||||||
|
print(check)
|
||||||
|
save_ofac_meta(new_date, feature_ids)
|
||||||
|
return new_date, feature_ids
|
||||||
|
|
||||||
|
|
||||||
|
def day(date, tz=None):
|
||||||
|
if tz is None:
|
||||||
|
tz = date.tzinfo
|
||||||
|
return datetime(date.year, date.month, date.day, tzinfo=tz)
|
||||||
|
|
||||||
|
|
||||||
|
treasury_tz = timezone('America/New_York')
|
||||||
|
|
||||||
|
|
||||||
|
def update_ofac():
|
||||||
|
if not ofac_enabled:
|
||||||
|
return
|
||||||
|
date: datetime
|
||||||
|
[date, feature_ids] = get_ofac_meta()
|
||||||
|
if date is None:
|
||||||
|
# fetch and process the full dataset
|
||||||
|
log.info(f'initializing OFAC')
|
||||||
|
url = 'https://sanctionslistservice.ofac.treas.gov/entities'
|
||||||
|
xml = requests.get(url).text
|
||||||
|
process_ofac_xml(xml, date, feature_ids)
|
||||||
|
else:
|
||||||
|
# fetch only the changes since last time
|
||||||
|
now = day(date)
|
||||||
|
last = day(datetime.now(date.tzinfo))
|
||||||
|
publications = []
|
||||||
|
while now <= last:
|
||||||
|
url = f'https://sanctionslistservice.ofac.treas.gov/changes/history/{now.year}/{now.month}/{now.day}'
|
||||||
|
for pub in json.loads(requests.get(url).text):
|
||||||
|
# {"publicationID":407,"datePublished":"2024-11-19T10:03:00.790295"}
|
||||||
|
# NO TIMEZONE IN THIS RESPONSE :(((((
|
||||||
|
pub_id = pub['publicationId']
|
||||||
|
pub_date = day(pub['datePublished'], treasury_tz)
|
||||||
|
if pub_date > date:
|
||||||
|
publications.append(pub_id)
|
||||||
|
now += timedelta(days=1)
|
||||||
|
if not publications:
|
||||||
|
log.info(f'OFAC table is current. Last publication date: {date}')
|
||||||
|
else:
|
||||||
|
for pub_id in publications:
|
||||||
|
url = f'https://sanctionslistservice.ofac.treas.gov/changes/{pub_id}'
|
||||||
|
xml = requests.get(url).text
|
||||||
|
date, feature_ids = process_ofac_xml(xml, date, feature_ids)
|
||||||
|
log.info(f'OFAC updated with publication {pub_id} {date}')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
process_ofac_xml('/tmp/entities.xml')
|
||||||
@@ -359,7 +359,7 @@ class OHLCRepository:
|
|||||||
if price is None, then bars are advanced based on the time but no new price is added to the series.
|
if price is None, then bars are advanced based on the time but no new price is added to the series.
|
||||||
"""
|
"""
|
||||||
if OHLC_LIMIT_POOLS_DEBUG is not None and (symbol,period) not in OHLC_LIMIT_POOLS_DEBUG:
|
if OHLC_LIMIT_POOLS_DEBUG is not None and (symbol,period) not in OHLC_LIMIT_POOLS_DEBUG:
|
||||||
return
|
return None
|
||||||
# logname = f'{symbol} {period_name(period)}'
|
# logname = f'{symbol} {period_name(period)}'
|
||||||
# log.debug(f'Updating OHLC {logname} {minutely(time)} {price}')
|
# log.debug(f'Updating OHLC {logname} {minutely(time)} {price}')
|
||||||
if price is not None:
|
if price is not None:
|
||||||
@@ -371,33 +371,31 @@ class OHLCRepository:
|
|||||||
# log.debug(f'got recent {historical}')
|
# log.debug(f'got recent {historical}')
|
||||||
if not historical:
|
if not historical:
|
||||||
if create is False or price is None:
|
if create is False or price is None:
|
||||||
return # do not track symbols which have not been explicity set up
|
return None # do not track symbols which have not been explicity set up
|
||||||
historical = []
|
|
||||||
updated = [NativeOHLC(ohlc_start_time(time, period), price, price, price, price)]
|
updated = [NativeOHLC(ohlc_start_time(time, period), price, price, price, price)]
|
||||||
# log.debug(f'\tcreated new bars {updated}')
|
# log.debug(f'\tcreated new bars {updated}')
|
||||||
else:
|
else:
|
||||||
updated = update_ohlc(historical[-1], period, time, price)
|
updated = update_ohlc(historical[-1], period, time, price)
|
||||||
# drop any historical bars that are older than we need
|
# overlap the updated OHLC's on top of the historical ones
|
||||||
# oldest_needed = cover the root block time plus one period prior
|
|
||||||
root_branch = current_blockstate.get().root_branch
|
|
||||||
root_hash = root_branch.head
|
|
||||||
if root_hash is not None:
|
|
||||||
root_timestamp = await get_block_timestamp(root_hash)
|
|
||||||
oldest_needed = from_timestamp(root_timestamp) - period
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
trim = (oldest_needed - historical[0].start) // period
|
|
||||||
if trim > 0:
|
|
||||||
historical = historical[trim:]
|
|
||||||
|
|
||||||
# now overlap the updated data on top of the historical data
|
|
||||||
if not historical or not updated:
|
|
||||||
updated = historical + updated
|
|
||||||
else:
|
|
||||||
last_bar = historical[-1].start
|
last_bar = historical[-1].start
|
||||||
first_updated = updated[0].start
|
first_updated = updated[0].start
|
||||||
overlap = (first_updated - last_bar) // period + 1
|
overlap = (first_updated - last_bar) // period + 1
|
||||||
updated = historical[:-overlap] + updated if overlap > 0 else historical + updated
|
updated = historical[:-overlap] + updated if overlap > 0 else historical + updated
|
||||||
# log.debug(f'\tnew recents: {updated}')
|
|
||||||
|
# drop any bars that are older than we need
|
||||||
|
# oldest_needed = cover the root block time plus one period prior
|
||||||
|
root_branch = current_blockstate.get().root_branch
|
||||||
|
root_hash = root_branch.head
|
||||||
|
if root_hash is not None:
|
||||||
|
root_timestamp = await get_block_timestamp(root_hash)
|
||||||
|
oldest_needed = from_timestamp(root_timestamp) - period
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
trim = (oldest_needed - updated[0].start) // period
|
||||||
|
if trim > 0:
|
||||||
|
updated = updated[trim:]
|
||||||
|
|
||||||
|
# if len(updated) > 3:
|
||||||
|
# log.debug(f'\tnew recents ({len(updated)}): {updated}')
|
||||||
recent_ohlcs.setitem(key, updated)
|
recent_ohlcs.setitem(key, updated)
|
||||||
return updated
|
return updated
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from .executionhandler import TrancheExecutionHandler # do not remove. ensures the handler is registered.
|
|
||||||
|
|
||||||
def order_key(vault:str, ):
|
def order_key(vault:str, ):
|
||||||
return f'{vault}'
|
return f'{vault}'
|
||||||
|
|
||||||
|
|||||||
@@ -1,26 +1,69 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Union, Any
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from web3.exceptions import ContractPanicError, ContractLogicError
|
from web3.exceptions import ContractPanicError, ContractLogicError
|
||||||
from web3.types import EventData
|
from web3.types import EventData
|
||||||
|
|
||||||
from dexorder import db
|
from dexorder import db, metric, config
|
||||||
from dexorder.base import TransactionReceiptDict
|
from dexorder.accounting import accounting_transaction_gas
|
||||||
|
from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_deserializers
|
||||||
from dexorder.base.order import TrancheKey, OrderKey
|
from dexorder.base.order import TrancheKey, OrderKey
|
||||||
from dexorder.base.orderlib import PriceProof
|
from dexorder.base.orderlib import PriceProof
|
||||||
from dexorder.contract.dexorder import get_dexorder_contract
|
from dexorder.contract.dexorder import get_dexorder_contract
|
||||||
|
from dexorder.database.model.accounting import AccountingSubcategory
|
||||||
from dexorder.database.model.transaction import TransactionJob
|
from dexorder.database.model.transaction import TransactionJob
|
||||||
from dexorder.order.orderstate import Order
|
from dexorder.order.orderstate import Order
|
||||||
from dexorder.order.triggers import (inflight_execution_requests, OrderTriggers,
|
from dexorder.order.triggers import (OrderTriggers,
|
||||||
TrancheState, active_tranches, order_error)
|
TrancheState, active_tranches, order_error)
|
||||||
from dexorder.transactions import TransactionHandler, TrancheExecutionRequest, submit_transaction_request, \
|
from dexorder.transactions import TransactionHandler, submit_transaction_request
|
||||||
new_tranche_execution_request
|
|
||||||
from dexorder.util import hexbytes
|
from dexorder.util import hexbytes
|
||||||
|
from dexorder.vault_blockdata import refresh_vault_balances
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrancheExecutionRequest (TransactionRequest):
|
||||||
|
TYPE = 'te'
|
||||||
|
|
||||||
|
# type='te' for tranche execution
|
||||||
|
vault: str
|
||||||
|
order_index: int
|
||||||
|
tranche_index: int
|
||||||
|
price_proof: Union[None,dict,tuple[int]]
|
||||||
|
|
||||||
|
def __init__(self, vault: str, order_index: int, tranche_index: int, price_proof: Union[None,dict,tuple[int]], **_):
|
||||||
|
super().__init__(TrancheExecutionRequest.TYPE, (vault, order_index, tranche_index))
|
||||||
|
self.vault = vault
|
||||||
|
self.order_index = order_index
|
||||||
|
self.tranche_index = tranche_index
|
||||||
|
self.price_proof = price_proof
|
||||||
|
|
||||||
|
def key(self) -> Any:
|
||||||
|
return self.vault, self.order_index, self.tranche_index
|
||||||
|
|
||||||
|
@property
|
||||||
|
def order_key(self):
|
||||||
|
return OrderKey(self.vault, self.order_index)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tranche_key(self):
|
||||||
|
return TrancheKey(self.vault, self.order_index, self.tranche_index)
|
||||||
|
|
||||||
|
|
||||||
|
# Must register the class for deserialization
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
transaction_request_deserializers[TrancheExecutionRequest.TYPE] = lambda **data: TrancheExecutionRequest(data['vault'], data['order_index'], data['tranche_index'], data['price_proof'] if 'price_proof' in data else None)
|
||||||
|
|
||||||
|
|
||||||
|
def new_tranche_execution_request(tk: TrancheKey, proof: Optional[PriceProof]=None) -> TrancheExecutionRequest:
|
||||||
|
if proof is None:
|
||||||
|
proof = PriceProof(0)
|
||||||
|
return TrancheExecutionRequest(tk.vault, tk.order_index, tk.tranche_index, proof.dump())
|
||||||
|
|
||||||
|
|
||||||
class TrancheExecutionHandler (TransactionHandler):
|
class TrancheExecutionHandler (TransactionHandler):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__('te')
|
super().__init__('te')
|
||||||
@@ -37,24 +80,24 @@ class TrancheExecutionHandler (TransactionHandler):
|
|||||||
errcode = hexbytes(x.args[1]).decode('utf-8')
|
errcode = hexbytes(x.args[1]).decode('utf-8')
|
||||||
log.error(f'While building execution for tranche {tk}: {errcode}')
|
log.error(f'While building execution for tranche {tk}: {errcode}')
|
||||||
# if there's a logic error we shouldn't keep trying
|
# if there's a logic error we shouldn't keep trying
|
||||||
finish_execution_request(tk, errcode)
|
await finish_execution_request(tk, errcode)
|
||||||
raise exception
|
raise exception
|
||||||
|
|
||||||
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None:
|
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None:
|
||||||
# we handle execution results using the DexorderExecution event, so there's nothing to do here.
|
# we handle execution results using the DexorderExecution event. here, only accounting is required
|
||||||
pass
|
await accounting_transaction_gas(receipt, AccountingSubcategory.Execution)
|
||||||
|
|
||||||
async def transaction_exception(self, job: TransactionJob, e: Exception) -> None:
|
async def transaction_exception(self, job: TransactionJob, e: Exception) -> None:
|
||||||
log.error('Could not build execution transaction due to exception', exc_info=e)
|
log.error('Could not build execution transaction due to exception', exc_info=e)
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
req: TrancheExecutionRequest = job.request
|
req: TrancheExecutionRequest = job.request
|
||||||
finish_execution_request(req.tranche_key, '')
|
await finish_execution_request(req.tranche_key, '')
|
||||||
|
|
||||||
|
|
||||||
TrancheExecutionHandler() # map 'te' to a TrancheExecutionHandler
|
TrancheExecutionHandler() # map 'te' to a TrancheExecutionHandler
|
||||||
|
|
||||||
|
|
||||||
def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
async def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
||||||
order_key = OrderKey(tk.vault, tk.order_index)
|
order_key = OrderKey(tk.vault, tk.order_index)
|
||||||
try:
|
try:
|
||||||
order: Order = Order.of(order_key)
|
order: Order = Order.of(order_key)
|
||||||
@@ -62,11 +105,6 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
|||||||
log.error(f'Could not get order {order_key}')
|
log.error(f'Could not get order {order_key}')
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
|
||||||
inflight_execution_requests.remove(tk)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_trigger():
|
def get_trigger():
|
||||||
try:
|
try:
|
||||||
return OrderTriggers.instances[order_key].triggers[tk.tranche_index]
|
return OrderTriggers.instances[order_key].triggers[tk.tranche_index]
|
||||||
@@ -83,6 +121,16 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
|||||||
if trig is not None:
|
if trig is not None:
|
||||||
trig.touch()
|
trig.touch()
|
||||||
|
|
||||||
|
def delay(secs=None):
|
||||||
|
trig = get_trigger()
|
||||||
|
if trig is not None:
|
||||||
|
trig.deactivate(secs if secs is not None else config.slippage_control_delay)
|
||||||
|
|
||||||
|
if error is None:
|
||||||
|
metric.executions.inc()
|
||||||
|
else:
|
||||||
|
metric.executions_failed.inc()
|
||||||
|
|
||||||
#
|
#
|
||||||
# execute() error handling
|
# execute() error handling
|
||||||
#
|
#
|
||||||
@@ -92,7 +140,9 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
|||||||
# Insufficient Input Amount
|
# Insufficient Input Amount
|
||||||
token = order.order.tokenIn
|
token = order.order.tokenIn
|
||||||
log.debug(f'insufficient funds {tk.vault} {token} ')
|
log.debug(f'insufficient funds {tk.vault} {token} ')
|
||||||
|
slash()
|
||||||
retry()
|
retry()
|
||||||
|
await refresh_vault_balances(tk.vault, order.order.tokenIn, order.order.tokenOut)
|
||||||
elif error == 'SPL':
|
elif error == 'SPL':
|
||||||
# todo tight slippage can cause excessive executions as the backend repeatedly retries the remainder. The symptom is error 'SPL'.
|
# todo tight slippage can cause excessive executions as the backend repeatedly retries the remainder. The symptom is error 'SPL'.
|
||||||
# Square-root price limit from Uniswap means we asked for a limit price that isn't met. This is a fault of
|
# Square-root price limit from Uniswap means we asked for a limit price that isn't met. This is a fault of
|
||||||
@@ -105,7 +155,7 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
|||||||
order_error(order, error) # We do not know if it was filled or not so only Error status can be given
|
order_error(order, error) # We do not know if it was filled or not so only Error status can be given
|
||||||
elif error == 'TF':
|
elif error == 'TF':
|
||||||
# Tranche Filled
|
# Tranche Filled
|
||||||
log.warning(f'tranche already filled {tk}')
|
log.debug(f'tranche already filled {tk}')
|
||||||
tranche_trigger = get_trigger()
|
tranche_trigger = get_trigger()
|
||||||
if tranche_trigger is not None:
|
if tranche_trigger is not None:
|
||||||
tranche_trigger.status = TrancheState.Filled
|
tranche_trigger.status = TrancheState.Filled
|
||||||
@@ -117,6 +167,7 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
|||||||
retry()
|
retry()
|
||||||
elif error == 'RL':
|
elif error == 'RL':
|
||||||
log.debug(f'tranche {tk} execution failed due to "RL" rate limit')
|
log.debug(f'tranche {tk} execution failed due to "RL" rate limit')
|
||||||
|
delay()
|
||||||
retry()
|
retry()
|
||||||
elif error == 'TE':
|
elif error == 'TE':
|
||||||
log.debug(f'tranche {tk} execution failed due to "TE" too early')
|
log.debug(f'tranche {tk} execution failed due to "TE" too early')
|
||||||
@@ -152,24 +203,27 @@ def finish_execution_request(tk: TrancheKey, error: Optional[str]=None):
|
|||||||
def execute_tranches():
|
def execute_tranches():
|
||||||
new_execution_requests = []
|
new_execution_requests = []
|
||||||
for tk, proof in active_tranches.items():
|
for tk, proof in active_tranches.items():
|
||||||
if tk not in inflight_execution_requests:
|
new_execution_requests.append((tk, proof))
|
||||||
new_execution_requests.append((tk, proof))
|
|
||||||
# todo order requests and batch
|
# todo order requests and batch
|
||||||
for tk, proof in new_execution_requests:
|
for tk, proof in new_execution_requests:
|
||||||
create_execution_request(tk, proof)
|
create_execution_request(tk, proof)
|
||||||
|
|
||||||
|
|
||||||
def create_execution_request(tk: TrancheKey, proof: PriceProof):
|
def create_execution_request(tk: TrancheKey, proof: PriceProof):
|
||||||
inflight_execution_requests.add(tk)
|
|
||||||
job = submit_transaction_request(new_tranche_execution_request(tk, proof))
|
job = submit_transaction_request(new_tranche_execution_request(tk, proof))
|
||||||
log.debug(f'Executing {tk} as job {job.id}')
|
if job is not None:
|
||||||
|
log.debug(f'Executing {tk} as job {job.id}')
|
||||||
return job
|
return job
|
||||||
|
|
||||||
|
|
||||||
def handle_dexorderexecutions(event: EventData):
|
async def handle_dexorderexecutions(event: EventData):
|
||||||
log.debug(f'executions {event}')
|
log.debug(f'executions {event}')
|
||||||
exe_id = UUID(bytes=event['args']['id'])
|
exe_id = UUID(bytes=event['args']['id'])
|
||||||
errors = event['args']['errors']
|
try:
|
||||||
|
errors = event['args']['errors']
|
||||||
|
except KeyError:
|
||||||
|
log.warning(f'Rogue DexorderExecutions event {event}')
|
||||||
|
return
|
||||||
if len(errors) == 0:
|
if len(errors) == 0:
|
||||||
log.warning(f'No errors found in DexorderExecutions event: {event}')
|
log.warning(f'No errors found in DexorderExecutions event: {event}')
|
||||||
return
|
return
|
||||||
@@ -182,4 +236,4 @@ def handle_dexorderexecutions(event: EventData):
|
|||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
req: TrancheExecutionRequest = job.request
|
req: TrancheExecutionRequest = job.request
|
||||||
tk = TrancheKey(req.vault, req.order_index, req.tranche_index)
|
tk = TrancheKey(req.vault, req.order_index, req.tranche_index)
|
||||||
finish_execution_request(tk, None if errors[0] == '' else errors[0])
|
await finish_execution_request(tk, None if errors[0] == '' else errors[0])
|
||||||
|
|||||||
@@ -3,13 +3,14 @@ import logging
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import overload
|
from typing import overload
|
||||||
|
|
||||||
from dexorder import DELETE, db, order_log
|
from dexorder import DELETE, db, order_log, from_timestamp
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.base.order import OrderKey, TrancheKey
|
from dexorder.base.order import OrderKey, TrancheKey
|
||||||
from dexorder.base.orderlib import SwapOrderState, ElaboratedSwapOrderStatus, Fill
|
from dexorder.base.orderlib import SwapOrderState, ElaboratedSwapOrderStatus, Fill
|
||||||
from dexorder.blockstate import BlockDict, BlockSet
|
from dexorder.blockstate import BlockDict, BlockSet
|
||||||
from dexorder.database.model.orderindex import OrderIndex
|
from dexorder.database.model.orderindex import OrderIndex
|
||||||
from dexorder.routing import pool_address
|
from dexorder.routing import pool_address
|
||||||
|
from dexorder.tokens import adjust_decimals
|
||||||
from dexorder.util import json
|
from dexorder.util import json
|
||||||
from dexorder.vault_blockdata import vault_owners
|
from dexorder.vault_blockdata import vault_owners
|
||||||
|
|
||||||
@@ -127,7 +128,7 @@ class Order:
|
|||||||
key = a if b is None else OrderKey(a, b)
|
key = a if b is None else OrderKey(a, b)
|
||||||
assert key not in Order.instances
|
assert key not in Order.instances
|
||||||
self.key = key
|
self.key = key
|
||||||
self.status: ElaboratedSwapOrderStatus = Order.order_statuses[key].copy()
|
self._status: ElaboratedSwapOrderStatus = Order.order_statuses[key].copy()
|
||||||
self.pool_address: str = pool_address(self.status.order)
|
self.pool_address: str = pool_address(self.status.order)
|
||||||
self.tranche_keys = [TrancheKey(key.vault, key.order_index, i) for i in range(len(self.status.trancheStatus))]
|
self.tranche_keys = [TrancheKey(key.vault, key.order_index, i) for i in range(len(self.status.trancheStatus))]
|
||||||
# flattenings of various static data
|
# flattenings of various static data
|
||||||
@@ -138,6 +139,14 @@ class Order:
|
|||||||
self.tranche_amounts = [t.fraction_of(self.amount) for t in self.order.tranches]
|
self.tranche_amounts = [t.fraction_of(self.amount) for t in self.order.tranches]
|
||||||
Order.instances[self.key] = self
|
Order.instances[self.key] = self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self):
|
||||||
|
return self._status
|
||||||
|
|
||||||
|
@status.setter
|
||||||
|
def status(self, v):
|
||||||
|
self._status = Order.order_statuses[self.key] = v
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self):
|
def state(self):
|
||||||
return self.status.state
|
return self.status.state
|
||||||
@@ -279,6 +288,33 @@ class Order:
|
|||||||
Order.vault_recently_closed_orders.listremove(key.vault, key.order_index)
|
Order.vault_recently_closed_orders.listremove(key.vault, key.order_index)
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.key)
|
||||||
|
|
||||||
|
|
||||||
|
async def pprint(self):
|
||||||
|
amount_token = self.order.tokenIn if self.order.amountIsInput else self.order.tokenOut
|
||||||
|
msg = f'''
|
||||||
|
SwapOrder {self.key}
|
||||||
|
status: {self.state.name}
|
||||||
|
placed: {from_timestamp(self.status.startTime)}
|
||||||
|
in: {self.order.tokenIn}
|
||||||
|
out: {self.order.tokenOut}
|
||||||
|
exchange: {self.order.route.exchange.name, self.order.route.fee}
|
||||||
|
amount: {"input" if self.order.amountIsInput else "output"} {await adjust_decimals(amount_token, self.filled):f}/{await adjust_decimals(amount_token, self.amount):f}{" to owner" if self.order.outputDirectlyToOwner else ""}
|
||||||
|
minFill: {await adjust_decimals(amount_token, self.min_fill_amount):f}
|
||||||
|
inverted: {self.order.inverted}
|
||||||
|
tranches:
|
||||||
|
'''
|
||||||
|
for i in range(len(self.order.tranches)):
|
||||||
|
tranche = self.order.tranches[i]
|
||||||
|
msg += f' {tranche}'
|
||||||
|
filled_amount = self.tranche_filled(i)
|
||||||
|
if filled_amount:
|
||||||
|
msg += f' filled {await adjust_decimals(amount_token, filled_amount)}'
|
||||||
|
msg += '\n'
|
||||||
|
return msg
|
||||||
|
|
||||||
# ORDER STATE
|
# ORDER STATE
|
||||||
# various blockstate fields hold different aspects of an order's state.
|
# various blockstate fields hold different aspects of an order's state.
|
||||||
|
|
||||||
@@ -310,8 +346,6 @@ class Order:
|
|||||||
'of', db=True, redis=True, pub=pub_order_fills,
|
'of', db=True, redis=True, pub=pub_order_fills,
|
||||||
str2key=OrderKey.str2key, value2str=lambda v: json.dumps(v.dump()), str2value=lambda s:OrderFilled.load(json.loads(s)))
|
str2key=OrderKey.str2key, value2str=lambda v: json.dumps(v.dump()), str2value=lambda s:OrderFilled.load(json.loads(s)))
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.order)
|
|
||||||
|
|
||||||
# "active" means the order wants to be executed now. this is not BlockData because it's cleared every block
|
# "active" means the order wants to be executed now. this is not BlockData because it's cleared every block
|
||||||
active_orders: dict[OrderKey,Order] = {}
|
active_orders: dict[OrderKey,Order] = {}
|
||||||
|
|||||||
@@ -2,13 +2,15 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from datetime import timedelta
|
||||||
from enum import Enum, auto
|
from enum import Enum, auto
|
||||||
from typing import Optional, Sequence
|
from typing import Optional, Sequence, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from sortedcontainers import SortedList
|
from sortedcontainers import SortedList
|
||||||
|
|
||||||
from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, DISTANT_PAST, Line
|
from dexorder.base.orderlib import SwapOrderState, PriceProof, DISTANT_FUTURE, DISTANT_PAST, Line, MIN_SLIPPAGE, \
|
||||||
|
MIN_SLIPPAGE_EPSILON
|
||||||
from dexorder.blockstate import BlockDict
|
from dexorder.blockstate import BlockDict
|
||||||
from .orderstate import Order
|
from .orderstate import Order
|
||||||
from .. import dec, order_log, timestamp, from_timestamp, config
|
from .. import dec, order_log, timestamp, from_timestamp, config
|
||||||
@@ -39,9 +41,6 @@ execution should be attempted on the tranche.
|
|||||||
# tranches which have passed all constraints and should be executed
|
# tranches which have passed all constraints and should be executed
|
||||||
active_tranches: BlockDict[TrancheKey, Optional[PriceProof]] = BlockDict('at')
|
active_tranches: BlockDict[TrancheKey, Optional[PriceProof]] = BlockDict('at')
|
||||||
|
|
||||||
# tranches which have an execute() transaction sent but not completed
|
|
||||||
inflight_execution_requests: set[TrancheKey] = set()
|
|
||||||
|
|
||||||
|
|
||||||
class OrderTriggers:
|
class OrderTriggers:
|
||||||
instances: dict[OrderKey, 'OrderTriggers'] = {}
|
instances: dict[OrderKey, 'OrderTriggers'] = {}
|
||||||
@@ -58,13 +57,13 @@ class OrderTriggers:
|
|||||||
self.order = order
|
self.order = order
|
||||||
self.triggers = triggers
|
self.triggers = triggers
|
||||||
OrderTriggers.instances[order.key] = self
|
OrderTriggers.instances[order.key] = self
|
||||||
log.debug(f'created OrderTriggers for {order.key}')
|
# log.debug(f'created OrderTriggers for {order.key}')
|
||||||
|
|
||||||
def disable(self):
|
def disable(self):
|
||||||
for t in self.triggers:
|
for t in self.triggers:
|
||||||
t.disable()
|
t.disable()
|
||||||
del OrderTriggers.instances[self.order.key]
|
del OrderTriggers.instances[self.order.key]
|
||||||
log.debug(f'disabled OrderTriggers for {self.order.key}')
|
# log.debug(f'disabled OrderTriggers for {self.order.key}')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def closed(self):
|
def closed(self):
|
||||||
@@ -74,6 +73,10 @@ class OrderTriggers:
|
|||||||
def open(self):
|
def open(self):
|
||||||
return not self.closed
|
return not self.closed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def error(self):
|
||||||
|
return any(t.error for t in self.triggers)
|
||||||
|
|
||||||
def check_complete(self):
|
def check_complete(self):
|
||||||
if self.closed:
|
if self.closed:
|
||||||
final_state = SwapOrderState.Filled if self.order.remaining == 0 or self.order.remaining < self.order.min_fill_amount else SwapOrderState.Expired
|
final_state = SwapOrderState.Filled if self.order.remaining == 0 or self.order.remaining < self.order.min_fill_amount else SwapOrderState.Expired
|
||||||
@@ -81,8 +84,8 @@ class OrderTriggers:
|
|||||||
|
|
||||||
def fill(self, tx: str, time: int, tranche_index, amount_in, amount_out, fee, next_activation_time):
|
def fill(self, tx: str, time: int, tranche_index, amount_in, amount_out, fee, next_activation_time):
|
||||||
self.order.add_fill(tx, time, tranche_index, amount_in, amount_out, fee, next_activation_time)
|
self.order.add_fill(tx, time, tranche_index, amount_in, amount_out, fee, next_activation_time)
|
||||||
if self.triggers[tranche_index].fill(amount_in, amount_out, next_activation_time):
|
self.triggers[tranche_index].fill(amount_in, amount_out, next_activation_time)
|
||||||
self.check_complete()
|
self.check_complete()
|
||||||
|
|
||||||
def expire_tranche(self, tranche_index):
|
def expire_tranche(self, tranche_index):
|
||||||
self.triggers[tranche_index].expire()
|
self.triggers[tranche_index].expire()
|
||||||
@@ -97,12 +100,13 @@ def start_trigger_updates():
|
|||||||
"""
|
"""
|
||||||
Called near the beginning of block handling to initialize any per-block trigger data structures
|
Called near the beginning of block handling to initialize any per-block trigger data structures
|
||||||
"""
|
"""
|
||||||
log.debug(f'current clock is {current_clock.get().timestamp} vs {timestamp()} => Δ{current_clock.get().timestamp-timestamp():.1f}s')
|
# log.debug(f'current clock is {current_clock.get().timestamp} vs {timestamp()} => Δ{current_clock.get().timestamp-timestamp():.1f}s')
|
||||||
TimeTrigger.update_all(current_clock.get().timestamp)
|
TimeTrigger.update_all(current_clock.get().timestamp)
|
||||||
PriceLineTrigger.clear_data()
|
PriceLineTrigger.clear_data()
|
||||||
|
|
||||||
|
|
||||||
async def update_balance_triggers(vault: str, token: str, balance: int):
|
async def update_balance_triggers(vault: str, token: str):
|
||||||
|
balance = vault_balances.get(vault, {}).get(token)
|
||||||
updates = [bt.update(balance) for bt in BalanceTrigger.by_vault_token.get((vault, token), [])]
|
updates = [bt.update(balance) for bt in BalanceTrigger.by_vault_token.get((vault, token), [])]
|
||||||
await asyncio.gather(*updates)
|
await asyncio.gather(*updates)
|
||||||
|
|
||||||
@@ -119,28 +123,27 @@ async def end_trigger_updates():
|
|||||||
Call once after all updates have been handled. This updates the active_tranches array based on final trigger state.
|
Call once after all updates have been handled. This updates the active_tranches array based on final trigger state.
|
||||||
"""
|
"""
|
||||||
PriceLineTrigger.end_updates(current_clock.get().timestamp)
|
PriceLineTrigger.end_updates(current_clock.get().timestamp)
|
||||||
# dirty can change
|
|
||||||
global _dirty
|
|
||||||
while _dirty:
|
while _dirty:
|
||||||
working_set = _dirty
|
tk = _dirty.pop()
|
||||||
_dirty = set()
|
# log.debug(f'check dirty tranche {tk}')
|
||||||
for tk in working_set:
|
if _trigger_state.get(tk,0) == 0:
|
||||||
log.debug(f'check dirty tranche {tk}')
|
# all clear for execution. add to active list with any necessary proofs
|
||||||
if _trigger_state.get(tk,0) == 0:
|
active_tranches[tk] = PriceProof(0)
|
||||||
# all clear for execution. add to active list with any necessary proofs
|
# log.debug(f'active tranche {tk}')
|
||||||
active_tranches[tk] = PriceProof(0)
|
else:
|
||||||
else:
|
# blocked by one or more triggers being False (nonzero mask)
|
||||||
# blocked by one or more triggers being False (nonzero mask)
|
reason = ', '.join(t.name for t in TrancheTrigger.all[tk].blocking_triggers)
|
||||||
# check expiry constraint
|
# log.debug(f'tranche {tk} blocked by {reason}')
|
||||||
try:
|
# check expiry constraint
|
||||||
TrancheTrigger.all[tk].check_expire()
|
try:
|
||||||
except KeyError:
|
TrancheTrigger.all[tk].check_expire()
|
||||||
pass
|
except KeyError:
|
||||||
# delete from active list.
|
pass
|
||||||
try:
|
# delete from active list.
|
||||||
del active_tranches[tk]
|
try:
|
||||||
except KeyError:
|
del active_tranches[tk]
|
||||||
pass
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _Order__disable_triggers(order):
|
def _Order__disable_triggers(order):
|
||||||
@@ -168,11 +171,20 @@ _dirty:set[TrancheKey] = set()
|
|||||||
|
|
||||||
|
|
||||||
class Trigger:
|
class Trigger:
|
||||||
def __init__(self, position: int, tk: TrancheKey, value: bool):
|
|
||||||
|
class TriggerType (Enum):
|
||||||
|
Balance = 0
|
||||||
|
Activation = 1
|
||||||
|
Expiration = 2
|
||||||
|
MinLine = 3
|
||||||
|
MaxLine = 4
|
||||||
|
|
||||||
|
def __init__(self, trigger_type: TriggerType, tk: TrancheKey, value: bool):
|
||||||
"""
|
"""
|
||||||
position is the bit position of the boolean result in the tranche's constraint bitfield.
|
position is the bit position of the boolean result in the tranche's constraint bitfield.
|
||||||
"""
|
"""
|
||||||
self.position = position
|
self.position = trigger_type.value
|
||||||
|
self.name = trigger_type.name
|
||||||
self.tk = tk
|
self.tk = tk
|
||||||
self.value = value
|
self.value = value
|
||||||
_dirty.add(self.tk)
|
_dirty.add(self.tk)
|
||||||
@@ -187,15 +199,13 @@ class Trigger:
|
|||||||
|
|
||||||
@value.setter
|
@value.setter
|
||||||
def value(self, value):
|
def value(self, value):
|
||||||
state = _trigger_state.get(self.tk,0)
|
state = _trigger_state.get(self.tk, 0)
|
||||||
old = state & (1 << self.position) == 0 # NOTE: inverted
|
if not value: # this conditional is inverted
|
||||||
|
_trigger_state[self.tk] = state | (1 << self.position) # set
|
||||||
if value != old:
|
else:
|
||||||
_dirty.add(self.tk)
|
_trigger_state[self.tk] = state & ~(1 << self.position) # clear
|
||||||
if not value: # this conditional is inverted
|
_dirty.add(self.tk)
|
||||||
_trigger_state[self.tk] = state | (1 << self.position) # set
|
if value != (state & (1 << self.position) == 0):
|
||||||
else:
|
|
||||||
_trigger_state[self.tk] = state & ~(1 << self.position) # clear
|
|
||||||
self._value_changed()
|
self._value_changed()
|
||||||
|
|
||||||
|
|
||||||
@@ -207,13 +217,13 @@ class Trigger:
|
|||||||
|
|
||||||
|
|
||||||
async def has_funds(tk: TrancheKey):
|
async def has_funds(tk: TrancheKey):
|
||||||
log.debug(f'has funds? {tk.vault}')
|
# log.debug(f'has funds? {tk.vault}')
|
||||||
order = Order.of(tk)
|
order = Order.of(tk)
|
||||||
balances = vault_balances.get(tk.vault, {})
|
balances = vault_balances.get(tk.vault, {})
|
||||||
log.debug(f'balances {balances}')
|
# log.debug(f'balances {balances}')
|
||||||
token_addr = order.status.order.tokenIn
|
token_addr = order.status.order.tokenIn
|
||||||
token_balance = balances.get(token_addr)
|
token_balance = balances.get(token_addr)
|
||||||
log.debug(f'amount of {token_addr} = {token_balance}')
|
# log.debug(f'amount of {token_addr} = {token_balance}')
|
||||||
if token_balance is None:
|
if token_balance is None:
|
||||||
# unknown balance
|
# unknown balance
|
||||||
token_balance = balances[token_addr] = await ERC20(token_addr).balanceOf(tk.vault)
|
token_balance = balances[token_addr] = await ERC20(token_addr).balanceOf(tk.vault)
|
||||||
@@ -249,15 +259,16 @@ class BalanceTrigger (Trigger):
|
|||||||
return BalanceTrigger(tk, value)
|
return BalanceTrigger(tk, value)
|
||||||
|
|
||||||
def __init__(self, tk: TrancheKey, value: bool):
|
def __init__(self, tk: TrancheKey, value: bool):
|
||||||
super().__init__(0, tk, value)
|
super().__init__(Trigger.TriggerType.Balance, tk, value)
|
||||||
self.order = Order.of(self.tk)
|
self.order = Order.of(self.tk)
|
||||||
self.vault_token = self.tk.vault, self.order.status.order.tokenIn
|
self.vault_token = self.tk.vault, self.order.status.order.tokenIn
|
||||||
BalanceTrigger.by_vault_token[self.vault_token].add(self)
|
BalanceTrigger.by_vault_token[self.vault_token].add(self)
|
||||||
|
self._value_changed()
|
||||||
# log.debug(f'initializing Balance Trigger {id(self)} {tk} {value} {self.value}')
|
# log.debug(f'initializing Balance Trigger {id(self)} {tk} {value} {self.value}')
|
||||||
|
|
||||||
async def update(self, balance):
|
async def update(self, balance):
|
||||||
self.value = await input_amount_is_sufficient(self.order, balance)
|
self.value = await input_amount_is_sufficient(self.order, balance)
|
||||||
# log.debug(f'update balance {balance} was sufficient? {self.value}')
|
# log.debug(f'update balance {balance} was sufficient? {self.value} {self.order.key}')
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
try:
|
try:
|
||||||
@@ -265,6 +276,17 @@ class BalanceTrigger (Trigger):
|
|||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _value_changed(self):
|
||||||
|
ok = self.value
|
||||||
|
order = Order.of(self.tk)
|
||||||
|
old_state = order.status.state
|
||||||
|
if not ok and old_state == SwapOrderState.Open:
|
||||||
|
order.status = order.status.copy()
|
||||||
|
order.status.state = SwapOrderState.Underfunded
|
||||||
|
elif ok and old_state == SwapOrderState.Underfunded:
|
||||||
|
order.status = order.status.copy()
|
||||||
|
order.status.state = SwapOrderState.Open
|
||||||
|
|
||||||
|
|
||||||
class TimeTrigger (Trigger):
|
class TimeTrigger (Trigger):
|
||||||
|
|
||||||
@@ -279,7 +301,7 @@ class TimeTrigger (Trigger):
|
|||||||
return TimeTrigger(is_start, tk, time, time_now)
|
return TimeTrigger(is_start, tk, time, time_now)
|
||||||
|
|
||||||
def __init__(self, is_start: bool, tk: TrancheKey, time: int, time_now: int):
|
def __init__(self, is_start: bool, tk: TrancheKey, time: int, time_now: int):
|
||||||
trigger_type = 1 if is_start else 2
|
trigger_type = Trigger.TriggerType.Activation if is_start else Trigger.TriggerType.Expiration
|
||||||
in_future = time_now >= time
|
in_future = time_now >= time
|
||||||
value = in_future is is_start
|
value = in_future is is_start
|
||||||
self.is_start = is_start
|
self.is_start = is_start
|
||||||
@@ -301,11 +323,8 @@ class TimeTrigger (Trigger):
|
|||||||
if time == self._time:
|
if time == self._time:
|
||||||
return
|
return
|
||||||
self._time = time
|
self._time = time
|
||||||
if self.active:
|
in_future = time_now >= time
|
||||||
# remove old trigger
|
self.value = in_future is self.is_start
|
||||||
TimeTrigger.all.remove(self)
|
|
||||||
self.active = False
|
|
||||||
self.update_active(time_now)
|
|
||||||
|
|
||||||
def update_active(self, time_now: int = None, time: int = None):
|
def update_active(self, time_now: int = None, time: int = None):
|
||||||
if time_now is None:
|
if time_now is None:
|
||||||
@@ -314,7 +333,7 @@ class TimeTrigger (Trigger):
|
|||||||
time = self._time
|
time = self._time
|
||||||
next_active = time_now < time
|
next_active = time_now < time
|
||||||
activate = not self.active and next_active
|
activate = not self.active and next_active
|
||||||
log.debug(f'update_active {self} | {self.active} => {next_active} = {activate}')
|
# log.debug(f'update_active {self} | {self.active} => {next_active} = {activate}')
|
||||||
if activate:
|
if activate:
|
||||||
# log.debug(f'adding time trigger {self}')
|
# log.debug(f'adding time trigger {self}')
|
||||||
TimeTrigger.all.add(self)
|
TimeTrigger.all.add(self)
|
||||||
@@ -358,6 +377,7 @@ class TimeTrigger (Trigger):
|
|||||||
|
|
||||||
class PriceLineTrigger (Trigger):
|
class PriceLineTrigger (Trigger):
|
||||||
by_pool: dict[str,set['PriceLineTrigger']] = defaultdict(set)
|
by_pool: dict[str,set['PriceLineTrigger']] = defaultdict(set)
|
||||||
|
diagonals: set['PriceLineTrigger'] = set()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create(tk: TrancheKey, inverted: bool, price: dec, line: Line, is_min: bool, is_barrier: bool):
|
def create(tk: TrancheKey, inverted: bool, price: dec, line: Line, is_min: bool, is_barrier: bool):
|
||||||
@@ -373,15 +393,20 @@ class PriceLineTrigger (Trigger):
|
|||||||
if inverted:
|
if inverted:
|
||||||
price_now = 1/price_now
|
price_now = 1/price_now
|
||||||
activated = value_now < price_now if is_min else value_now > price_now
|
activated = value_now < price_now if is_min else value_now > price_now
|
||||||
log.debug(f'initial price line {value_now} {"<" if is_min else ">"} {price_now} {activated}')
|
# log.debug(f'initial price line {value_now} {"<" if is_min else ">"} {price_now} {activated}')
|
||||||
super().__init__(3 if is_min else 4, tk, activated)
|
trigger_type = Trigger.TriggerType.MinLine if is_min else Trigger.TriggerType.MaxLine
|
||||||
|
super().__init__(trigger_type, tk, activated)
|
||||||
self.inverted = inverted
|
self.inverted = inverted
|
||||||
self.line = line
|
self.line = line
|
||||||
self.is_min = is_min
|
self.is_min = is_min
|
||||||
self.is_barrier = is_barrier
|
self.is_barrier = is_barrier
|
||||||
self.pool_address = Order.of(tk).pool_address
|
self.pool_address = Order.of(tk).pool_address
|
||||||
self.index: Optional[int] = None
|
self.index: Optional[int] = None
|
||||||
|
self.active = True
|
||||||
|
self.last_price = price_now
|
||||||
PriceLineTrigger.by_pool[self.pool_address].add(self)
|
PriceLineTrigger.by_pool[self.pool_address].add(self)
|
||||||
|
if self.line.slope != 0:
|
||||||
|
PriceLineTrigger.diagonals.add(self)
|
||||||
|
|
||||||
# lines that need evaluating add their data to these arrays, which are then sent to SIMD for evaluation. each
|
# lines that need evaluating add their data to these arrays, which are then sent to SIMD for evaluation. each
|
||||||
# array must always have the same size as the others.
|
# array must always have the same size as the others.
|
||||||
@@ -405,42 +430,58 @@ class PriceLineTrigger (Trigger):
|
|||||||
# oldPrice = price
|
# oldPrice = price
|
||||||
if self.inverted:
|
if self.inverted:
|
||||||
price = 1/price
|
price = 1/price
|
||||||
log.debug(f'price trigger {price}')
|
self.last_price = price
|
||||||
|
# log.debug(f'price trigger {price}')
|
||||||
if self not in PriceLineTrigger.triggers_set:
|
if self not in PriceLineTrigger.triggers_set:
|
||||||
self.index = len(PriceLineTrigger.y)
|
self.add_computation(price)
|
||||||
PriceLineTrigger.y.append(price)
|
|
||||||
PriceLineTrigger.m.append(self.line.slope)
|
|
||||||
PriceLineTrigger.b.append(self.line.intercept)
|
|
||||||
PriceLineTrigger.sign.append(1 if self.is_min else -1)
|
|
||||||
PriceLineTrigger.triggers.append(self)
|
|
||||||
PriceLineTrigger.triggers_set.add(self)
|
|
||||||
else:
|
else:
|
||||||
# update an existing equation's price
|
# update an existing equation's price
|
||||||
PriceLineTrigger.y[self.index] = price
|
PriceLineTrigger.y[self.index] = price
|
||||||
|
|
||||||
|
def touch(self):
|
||||||
|
if self not in PriceLineTrigger.triggers_set:
|
||||||
|
self.add_computation(self.last_price)
|
||||||
|
|
||||||
|
def add_computation(self, price):
|
||||||
|
self.index = len(PriceLineTrigger.y)
|
||||||
|
PriceLineTrigger.y.append(price)
|
||||||
|
PriceLineTrigger.m.append(self.line.slope)
|
||||||
|
PriceLineTrigger.b.append(self.line.intercept)
|
||||||
|
PriceLineTrigger.sign.append(1 if self.is_min else -1)
|
||||||
|
PriceLineTrigger.triggers.append(self)
|
||||||
|
PriceLineTrigger.triggers_set.add(self)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def end_updates(time: int):
|
def end_updates(time: int):
|
||||||
|
for t in PriceLineTrigger.diagonals:
|
||||||
|
t.touch() # always evaluate any line with a slope
|
||||||
|
|
||||||
# here we use numpy to compute all dirty lines using SIMD
|
# here we use numpy to compute all dirty lines using SIMD
|
||||||
y, m, b, sign = map(np.array,
|
y, m, b, sign = map(np.array,
|
||||||
(PriceLineTrigger.y, PriceLineTrigger.m, PriceLineTrigger.b, PriceLineTrigger.sign))
|
(PriceLineTrigger.y, PriceLineTrigger.m, PriceLineTrigger.b, PriceLineTrigger.sign))
|
||||||
line_value = m * time + b
|
line_value = m * time + b
|
||||||
price_diff = sign * (y - line_value)
|
price_diff = sign * (y - line_value)
|
||||||
activated = price_diff > 0
|
activated = price_diff > 0
|
||||||
for price, line, s, a, diff in zip(y, line_value, sign, activated, price_diff):
|
# for price, line, s, a, diff in zip(y, line_value, sign, activated, price_diff):
|
||||||
log.debug(f'price: {line} {"<" if s == 1 else ">"} {price} {a} ({diff:+})')
|
# log.debug(f'price: {line} {"<" if s == 1 else ">"} {price} {a} ({diff:+})')
|
||||||
for t, activated in zip(PriceLineTrigger.triggers, activated):
|
for t, activated in zip(PriceLineTrigger.triggers, activated):
|
||||||
t.handle_result(activated)
|
t.handle_result(activated)
|
||||||
PriceLineTrigger.clear_data()
|
PriceLineTrigger.clear_data()
|
||||||
|
|
||||||
def handle_result(self, value: bool):
|
def handle_result(self, value: bool):
|
||||||
if not self.is_barrier or value: # barriers that are False do not update their values to False
|
if self.active and (not self.is_barrier or value): # barriers that are False do not update their values to False
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
|
self.active = False
|
||||||
try:
|
try:
|
||||||
PriceLineTrigger.by_pool[self.pool_address].remove(self)
|
PriceLineTrigger.by_pool[self.pool_address].remove(self)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
try:
|
||||||
|
PriceLineTrigger.diagonals.remove(self)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
async def activate_orders():
|
async def activate_orders():
|
||||||
@@ -467,7 +508,8 @@ async def activate_order(order: Order):
|
|||||||
triggers = await OrderTriggers.create(order)
|
triggers = await OrderTriggers.create(order)
|
||||||
if triggers.closed:
|
if triggers.closed:
|
||||||
log.debug(f'order {order.key} was immediately closed')
|
log.debug(f'order {order.key} was immediately closed')
|
||||||
final_state = SwapOrderState.Filled if order.remaining == 0 or order.remaining < order.min_fill_amount \
|
final_state = SwapOrderState.Error if triggers.error \
|
||||||
|
else SwapOrderState.Filled if order.remaining == 0 or order.remaining < order.min_fill_amount \
|
||||||
else SwapOrderState.Expired
|
else SwapOrderState.Expired
|
||||||
order.complete(final_state)
|
order.complete(final_state)
|
||||||
|
|
||||||
@@ -497,11 +539,10 @@ class TrancheTrigger:
|
|||||||
# tranche minLine and maxLine are relative to the pool and will be flipped from the orderspec if the
|
# tranche minLine and maxLine are relative to the pool and will be flipped from the orderspec if the
|
||||||
# order is buying the base and selling the quote.
|
# order is buying the base and selling the quote.
|
||||||
price = pool_prices[pool['address']] * dec(10) ** -pool['decimals']
|
price = pool_prices[pool['address']] * dec(10) ** -pool['decimals']
|
||||||
inverted = order.order.tokenIn != pool['base']
|
inverted = order.order.inverted
|
||||||
assert inverted and order.order.tokenIn == pool['quote'] or not inverted and order.order.tokenIn == pool['base']
|
|
||||||
min_trigger = PriceLineTrigger.create(tk, inverted, price, tranche.minLine, True, tranche.minIsBarrier)
|
min_trigger = PriceLineTrigger.create(tk, inverted, price, tranche.minLine, True, tranche.minIsBarrier)
|
||||||
max_trigger = PriceLineTrigger.create(tk, inverted, price, tranche.maxLine, False, tranche.maxIsBarrier)
|
max_trigger = PriceLineTrigger.create(tk, inverted, price, tranche.maxLine, False, tranche.maxIsBarrier)
|
||||||
return TrancheTrigger(order, tk, balance_trigger, activation_trigger, expiration_trigger, min_trigger, max_trigger)
|
return TrancheTrigger(order, tk, balance_trigger, activation_trigger, expiration_trigger, min_trigger, max_trigger, tranche.marketOrder)
|
||||||
|
|
||||||
def __init__(self, order: Order, tk: TrancheKey,
|
def __init__(self, order: Order, tk: TrancheKey,
|
||||||
balance_trigger: BalanceTrigger,
|
balance_trigger: BalanceTrigger,
|
||||||
@@ -509,6 +550,7 @@ class TrancheTrigger:
|
|||||||
expiration_trigger: Optional[TimeTrigger],
|
expiration_trigger: Optional[TimeTrigger],
|
||||||
min_trigger: Optional[PriceLineTrigger],
|
min_trigger: Optional[PriceLineTrigger],
|
||||||
max_trigger: Optional[PriceLineTrigger],
|
max_trigger: Optional[PriceLineTrigger],
|
||||||
|
market_order: bool,
|
||||||
):
|
):
|
||||||
assert order.key.vault == tk.vault and order.key.order_index == tk.order_index
|
assert order.key.vault == tk.vault and order.key.order_index == tk.order_index
|
||||||
tranche = order.order.tranches[tk.tranche_index]
|
tranche = order.order.tranches[tk.tranche_index]
|
||||||
@@ -521,19 +563,33 @@ class TrancheTrigger:
|
|||||||
self.expiration_trigger = expiration_trigger
|
self.expiration_trigger = expiration_trigger
|
||||||
self.min_trigger = min_trigger
|
self.min_trigger = min_trigger
|
||||||
self.max_trigger = max_trigger
|
self.max_trigger = max_trigger
|
||||||
|
self.market_order = market_order
|
||||||
|
|
||||||
self.slippage = tranche.minLine.intercept if tranche.marketOrder else 0
|
self.slippage = tranche.minLine.intercept if tranche.marketOrder else 0
|
||||||
self.slash_count = 0
|
self.slash_count = 0
|
||||||
|
|
||||||
tranche_remaining = tranche.fraction_of(order.amount) - order.tranche_filled(self.tk.tranche_index)
|
tranche_remaining = tranche.fraction_of(order.amount) - order.tranche_filled(self.tk.tranche_index)
|
||||||
self.status = \
|
self.status = \
|
||||||
|
TrancheState.Error if self.market_order and self.slippage < MIN_SLIPPAGE - MIN_SLIPPAGE_EPSILON else \
|
||||||
TrancheState.Filled if tranche_remaining == 0 or tranche_remaining < self.order.min_fill_amount else \
|
TrancheState.Filled if tranche_remaining == 0 or tranche_remaining < self.order.min_fill_amount else \
|
||||||
TrancheState.Expired if self.expiration_trigger is not None and not self.expiration_trigger else \
|
TrancheState.Expired if self.expiration_trigger is not None and not self.expiration_trigger else \
|
||||||
TrancheState.Early if self.activation_trigger is None and not self.activation_trigger else \
|
TrancheState.Early if self.activation_trigger is not None and not self.activation_trigger else \
|
||||||
TrancheState.Active
|
TrancheState.Active
|
||||||
_dirty.add(tk)
|
_dirty.add(tk)
|
||||||
TrancheTrigger.all[tk] = self
|
TrancheTrigger.all[tk] = self
|
||||||
log.debug(f'Tranche {tk} initial status {self.status} {self}')
|
# log.debug(f'Tranche {tk} initial status {self.status} {self}')
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def order_trigger(self):
|
||||||
|
return OrderTriggers.instances[self.tk.order_key]
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def blocking_triggers(self):
|
||||||
|
triggers = [self.balance_trigger, self.activation_trigger, self.expiration_trigger,
|
||||||
|
self.min_trigger, self.max_trigger]
|
||||||
|
return [t for t in triggers if t is not None and not t.value]
|
||||||
|
|
||||||
|
|
||||||
def fill(self, _amount_in, _amount_out, _next_activation_time ):
|
def fill(self, _amount_in, _amount_out, _next_activation_time ):
|
||||||
@@ -551,8 +607,10 @@ class TrancheTrigger:
|
|||||||
self.disable()
|
self.disable()
|
||||||
else:
|
else:
|
||||||
order_log.debug(f'tranche part-filled {self.tk} in:{_amount_in} out:{_amount_out} remaining:{remaining}')
|
order_log.debug(f'tranche part-filled {self.tk} in:{_amount_in} out:{_amount_out} remaining:{remaining}')
|
||||||
|
if self.market_order:
|
||||||
|
order_log.debug(f'tranche {self.tk} delayed {config.slippage_control_delay} seconds due to slippage control')
|
||||||
|
self.deactivate(config.slippage_control_delay)
|
||||||
self.slash_count = 0 # reset slash count
|
self.slash_count = 0 # reset slash count
|
||||||
return filled
|
|
||||||
|
|
||||||
def touch(self):
|
def touch(self):
|
||||||
_dirty.add(self.tk)
|
_dirty.add(self.tk)
|
||||||
@@ -560,19 +618,20 @@ class TrancheTrigger:
|
|||||||
def check_expire(self):
|
def check_expire(self):
|
||||||
# if the expiration constraint has become False then the tranche can never execute again
|
# if the expiration constraint has become False then the tranche can never execute again
|
||||||
if self.expiration_trigger is not None and not self.expiration_trigger:
|
if self.expiration_trigger is not None and not self.expiration_trigger:
|
||||||
OrderTriggers.instances[self.tk.order_key].expire_tranche(self.tk.tranche_index)
|
self.order_trigger.expire_tranche(self.tk.tranche_index)
|
||||||
|
|
||||||
def expire(self):
|
def expire(self):
|
||||||
|
self.disable()
|
||||||
if self.closed:
|
if self.closed:
|
||||||
return
|
return
|
||||||
order_log.debug(f'tranche expired {self.tk}')
|
order_log.debug(f'tranche expired {self.tk}')
|
||||||
self.status = TrancheState.Expired
|
self.status = TrancheState.Expired
|
||||||
self.disable()
|
|
||||||
|
|
||||||
def kill(self):
|
def kill(self):
|
||||||
order_log.warning(f'tranche KILLED {self.tk}')
|
order_log.warning(f'tranche KILLED {self.tk}')
|
||||||
self.status = TrancheState.Error
|
self.status = TrancheState.Error
|
||||||
self.disable()
|
self.disable()
|
||||||
|
self.order_trigger.check_complete()
|
||||||
|
|
||||||
def slash(self):
|
def slash(self):
|
||||||
# slash() is called when an execute() transaction on this tranche reverts without a recognized reason.
|
# slash() is called when an execute() transaction on this tranche reverts without a recognized reason.
|
||||||
@@ -582,15 +641,26 @@ class TrancheTrigger:
|
|||||||
self.kill()
|
self.kill()
|
||||||
else:
|
else:
|
||||||
delay = round(config.slash_delay_base * config.slash_delay_mul ** (self.slash_count-1))
|
delay = round(config.slash_delay_base * config.slash_delay_mul ** (self.slash_count-1))
|
||||||
self.deactivate(timestamp()+delay)
|
self.deactivate(delay)
|
||||||
|
|
||||||
def deactivate(self, until):
|
def deactivate(self, interval: Union[timedelta, int, float]):
|
||||||
|
# todo this timestamp should be consistent with the trigger time which is blockchain
|
||||||
|
now = current_clock.get().timestamp
|
||||||
|
self.deactivate_until(now + (interval.total_seconds() if isinstance(interval, timedelta) else interval))
|
||||||
|
|
||||||
|
def deactivate_until(self, until):
|
||||||
# Temporarily deactivate the tranche due to a rate limit. Use disable() to permanently halt the trigger.
|
# Temporarily deactivate the tranche due to a rate limit. Use disable() to permanently halt the trigger.
|
||||||
log.debug(f'deactivating tranche {self.tk} until {from_timestamp(until)}')
|
now = current_clock.get().timestamp
|
||||||
|
if until < now:
|
||||||
|
return
|
||||||
if self.activation_trigger is None:
|
if self.activation_trigger is None:
|
||||||
self.activation_trigger = TimeTrigger.create(True, self.tk, until)
|
self.activation_trigger = TimeTrigger.create(True, self.tk, until)
|
||||||
else:
|
else:
|
||||||
self.activation_trigger.time = until
|
self.activation_trigger.time = max(until, self.activation_trigger.time)
|
||||||
|
try:
|
||||||
|
del active_tranches[self.tk]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
def disable(self):
|
def disable(self):
|
||||||
# permanently stop this trigger and deconstruct
|
# permanently stop this trigger and deconstruct
|
||||||
@@ -628,6 +698,10 @@ class TrancheTrigger:
|
|||||||
def open(self):
|
def open(self):
|
||||||
return not self.closed
|
return not self.closed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def error(self):
|
||||||
|
return self.status == TrancheState.Error
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
trigs = []
|
trigs = []
|
||||||
if self.balance_trigger is not None:
|
if self.balance_trigger is not None:
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from web3.exceptions import ContractLogicError
|
from web3.exceptions import ContractLogicError
|
||||||
from web3.types import EventData
|
from web3.types import EventData
|
||||||
|
|
||||||
from dexorder import dec, ADDRESS_0, from_timestamp, db
|
from dexorder import dec, ADDRESS_0, from_timestamp, db, config, NATIVE_TOKEN
|
||||||
from dexorder.addrmeta import address_metadata
|
from dexorder.addrmeta import address_metadata
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.base.orderlib import Exchange
|
from dexorder.base.orderlib import Exchange
|
||||||
@@ -15,7 +16,7 @@ from dexorder.blockstate import BlockDict
|
|||||||
from dexorder.blockstate.blockdata import K, V
|
from dexorder.blockstate.blockdata import K, V
|
||||||
from dexorder.database.model import Pool
|
from dexorder.database.model import Pool
|
||||||
from dexorder.database.model.pool import OldPoolDict
|
from dexorder.database.model.pool import OldPoolDict
|
||||||
from dexorder.tokens import get_token
|
from dexorder.tokens import get_token, adjust_decimals as adj_dec
|
||||||
from dexorder.uniswap import UniswapV3Pool, uniswapV3_pool_address
|
from dexorder.uniswap import UniswapV3Pool, uniswapV3_pool_address
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@@ -23,6 +24,7 @@ log = logging.getLogger(__name__)
|
|||||||
|
|
||||||
async def get_pool(address: str) -> OldPoolDict:
|
async def get_pool(address: str) -> OldPoolDict:
|
||||||
try:
|
try:
|
||||||
|
# noinspection PyTypeChecker
|
||||||
result: OldPoolDict = address_metadata[address]
|
result: OldPoolDict = address_metadata[address]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
result = address_metadata[address] = await load_pool(address)
|
result = address_metadata[address] = await load_pool(address)
|
||||||
@@ -61,6 +63,7 @@ async def load_pool(address: str, *, use_db=True) -> OldPoolDict:
|
|||||||
base=t0, quote=t1, fee=fee, decimals=decimals)
|
base=t0, quote=t1, fee=fee, decimals=decimals)
|
||||||
log.debug(f'new UniswapV3 pool {token0["symbol"]}/{token1["symbol"]} {fee/1_000_000:.2%} '
|
log.debug(f'new UniswapV3 pool {token0["symbol"]}/{token1["symbol"]} {fee/1_000_000:.2%} '
|
||||||
f'{("."+str(decimals)) if decimals >= 0 else (str(-decimals)+".")} {address}')
|
f'{("."+str(decimals)) if decimals >= 0 else (str(-decimals)+".")} {address}')
|
||||||
|
add_mark_pool(address, t0, t1, fee)
|
||||||
except ContractLogicError:
|
except ContractLogicError:
|
||||||
pass
|
pass
|
||||||
except ValueError as v:
|
except ValueError as v:
|
||||||
@@ -129,3 +132,86 @@ async def get_uniswap_data(swap: EventData) -> Optional[tuple[OldPoolDict, datet
|
|||||||
timestamp = await get_block_timestamp(swap['blockHash'])
|
timestamp = await get_block_timestamp(swap['blockHash'])
|
||||||
dt = from_timestamp(timestamp)
|
dt = from_timestamp(timestamp)
|
||||||
return pool, dt, price
|
return pool, dt, price
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Mark-to-Market
|
||||||
|
#
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MarkPool:
|
||||||
|
addr: str
|
||||||
|
base: str
|
||||||
|
quote: str
|
||||||
|
fee: int
|
||||||
|
inverted: bool
|
||||||
|
|
||||||
|
mark_pools: dict[str, MarkPool] = {}
|
||||||
|
|
||||||
|
quotes = [] # ordered list of preferred quote token addresses
|
||||||
|
|
||||||
|
|
||||||
|
def add_mark_pool(addr: str, base: str, quote: str, fee: int):
|
||||||
|
"""
|
||||||
|
Called for every discovered pool, this function registers the pool if it connects to a stablecoin or quotecoin and
|
||||||
|
has a better fee than other pools for that pair.
|
||||||
|
"""
|
||||||
|
# determine inversion
|
||||||
|
try:
|
||||||
|
base_index = quotes.index(base)
|
||||||
|
except ValueError:
|
||||||
|
base_index = None
|
||||||
|
try:
|
||||||
|
quote_index = quotes.index(quote)
|
||||||
|
except ValueError:
|
||||||
|
quote_index = None
|
||||||
|
if base_index is None and quote_index is None:
|
||||||
|
return
|
||||||
|
inverted = base_index is not None and (quote_index is None or base_index < quote_index)
|
||||||
|
if inverted:
|
||||||
|
base, quote = quote, base
|
||||||
|
|
||||||
|
# determine whether this pool is better than the already registered mark pool
|
||||||
|
add = False
|
||||||
|
if base not in mark_pools:
|
||||||
|
add = True
|
||||||
|
else:
|
||||||
|
mp = mark_pools[base]
|
||||||
|
mp_index = quotes.index(mp.quote)
|
||||||
|
try:
|
||||||
|
index = quotes.index(quote)
|
||||||
|
if index < mp_index or index == mp_index and fee < mp.fee:
|
||||||
|
add = True
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
if add:
|
||||||
|
pool = MarkPool(addr, base, quote, fee, inverted)
|
||||||
|
mark_pools[base] = pool
|
||||||
|
if base == config.nativecoin:
|
||||||
|
mark_pools[NATIVE_TOKEN] = pool
|
||||||
|
|
||||||
|
|
||||||
|
async def mark_to_market_adj_dec(token: str, amount: dec, adjust_decimals=True) -> Optional[dec]:
|
||||||
|
"""
|
||||||
|
Returns the current USD value for the amount of token.
|
||||||
|
"""
|
||||||
|
if adjust_decimals:
|
||||||
|
amount = await adj_dec(token, amount)
|
||||||
|
return mark_to_market(token, amount)
|
||||||
|
|
||||||
|
|
||||||
|
def mark_to_market(token: str, amount: dec = dec(1)) -> Optional[dec]:
|
||||||
|
"""
|
||||||
|
amount must already be adjusted for decimals
|
||||||
|
"""
|
||||||
|
if token in config.stablecoins:
|
||||||
|
return dec(1) * amount
|
||||||
|
try:
|
||||||
|
mp = mark_pools[token]
|
||||||
|
except KeyError:
|
||||||
|
log.info(f'no mark pool for token {token}')
|
||||||
|
return None
|
||||||
|
price = pool_prices[mp.addr]
|
||||||
|
value = amount / price if mp.inverted else amount * price
|
||||||
|
return mark_to_market(mp.quote, value)
|
||||||
|
|
||||||
|
|||||||
@@ -5,14 +5,13 @@ from datetime import timedelta
|
|||||||
from typing import Any, Iterable, Callable, Optional
|
from typing import Any, Iterable, Callable, Optional
|
||||||
|
|
||||||
from eth_bloom import BloomFilter
|
from eth_bloom import BloomFilter
|
||||||
# noinspection PyPackageRequirements
|
|
||||||
from websockets.exceptions import ConnectionClosedError
|
from websockets.exceptions import ConnectionClosedError
|
||||||
|
|
||||||
from dexorder import Blockchain, db, current_pub, async_yield, current_w3, config, now, timestamp
|
from dexorder import Blockchain, db, current_pub, async_yield, current_w3, config, now, timestamp, metric
|
||||||
from dexorder.base.block import Block, latest_block
|
from dexorder.base.block import Block, latest_block
|
||||||
from dexorder.base.chain import current_chain, current_clock, BlockClock
|
from dexorder.base.chain import current_chain, current_clock, BlockClock
|
||||||
from dexorder.blockchain.connection import create_w3_ws, create_w3
|
from dexorder.blockchain.connection import create_w3_ws, create_w3
|
||||||
from dexorder.blocks import cache_block, get_block, promotion_height, current_block
|
from dexorder.blocks import cache_block, get_block, promotion_height, current_block, fetch_latest_block
|
||||||
from dexorder.blockstate import BlockState, current_blockstate
|
from dexorder.blockstate import BlockState, current_blockstate
|
||||||
from dexorder.blockstate.branch import Branch
|
from dexorder.blockstate.branch import Branch
|
||||||
from dexorder.blockstate.diff import DiffEntryItem
|
from dexorder.blockstate.diff import DiffEntryItem
|
||||||
@@ -81,8 +80,7 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
async with w3ws as w3ws:
|
async with w3ws as w3ws:
|
||||||
log.debug('connecting to ws provider')
|
log.debug('connecting to ws provider')
|
||||||
await w3ws.provider.connect()
|
await w3ws.provider.connect()
|
||||||
subscription = await w3ws.eth.subscribe('newHeads') # the return value of this call is not consistent between anvil/hardhat/rpc.
|
await w3ws.eth.subscribe('newHeads') # the return value of this call is not consistent between anvil/hardhat/rpc.
|
||||||
log.debug(f'subscribed to newHeads {subscription}')
|
|
||||||
while self.running:
|
while self.running:
|
||||||
async for message in w3ws.ws.process_subscriptions():
|
async for message in w3ws.ws.process_subscriptions():
|
||||||
block = Block(chain_id, message['result'])
|
block = Block(chain_id, message['result'])
|
||||||
@@ -90,15 +88,19 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
self.new_head_event.set()
|
self.new_head_event.set()
|
||||||
log.debug(f'new head {block}')
|
log.debug(f'new head {block}')
|
||||||
if abs(block.timestamp-timestamp()) > 3:
|
if abs(block.timestamp-timestamp()) > 3:
|
||||||
log.warning(f'Blockchain {chain_id} time is off by {block.timestamp-timestamp():.1f}s')
|
log.info(f'Blockchain {chain_id} time is off by {block.timestamp-timestamp():.1f}s')
|
||||||
if not self.running:
|
if not self.running:
|
||||||
break
|
break
|
||||||
await async_yield()
|
await async_yield()
|
||||||
except (ConnectionClosedError, TimeoutError, asyncio.TimeoutError) as e:
|
except (TimeoutError, asyncio.TimeoutError) as e:
|
||||||
log.debug(f'runner timeout {e}')
|
log.debug(f'runner timeout {e}')
|
||||||
|
except ConnectionClosedError as e:
|
||||||
|
log.info(f'websocket connection closed {e}')
|
||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
log.warning(f'Could not connect to websocket {config.ws_url}')
|
log.warning(f'Could not connect to websocket {config.ws_url}')
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
|
except StopAsyncIteration:
|
||||||
|
log.info(f'websocket stream ended')
|
||||||
except Exception:
|
except Exception:
|
||||||
log.exception(f'Unhandled exception during run_ws()')
|
log.exception(f'Unhandled exception during run_ws()')
|
||||||
finally:
|
finally:
|
||||||
@@ -157,8 +159,7 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
# 'latest' polling for blocks, and we push the entire block to the queue since apparently this is the only
|
# 'latest' polling for blocks, and we push the entire block to the queue since apparently this is the only
|
||||||
# rpc call Hardhat seems to consistently support. The worker must then detect the type of object pushed to the
|
# rpc call Hardhat seems to consistently support. The worker must then detect the type of object pushed to the
|
||||||
# work queue and either use the block directly or query for the block if the queue object is a hashcode.
|
# work queue and either use the block directly or query for the block if the queue object is a hashcode.
|
||||||
blockdata = await w3.eth.get_block('latest')
|
block = await fetch_latest_block()
|
||||||
block = Block(chain.id, blockdata)
|
|
||||||
if block.hash == prev_blockhash and (
|
if block.hash == prev_blockhash and (
|
||||||
self.state is None or self.state.root_branch is None or self.state.height == block.height):
|
self.state is None or self.state.root_branch is None or self.state.height == block.height):
|
||||||
return prev_blockhash
|
return prev_blockhash
|
||||||
@@ -273,6 +274,8 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
elif was_behind:
|
elif was_behind:
|
||||||
log.info('Runner has caught up')
|
log.info('Runner has caught up')
|
||||||
was_behind = False
|
was_behind = False
|
||||||
|
metric.runner_latency.observe(behind)
|
||||||
|
metric.runner_loops.inc()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
fatal('Unhandled exception in runner worker', exception=e)
|
fatal('Unhandled exception in runner worker', exception=e)
|
||||||
finally:
|
finally:
|
||||||
@@ -284,6 +287,7 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
w3 = current_w3.get()
|
w3 = current_w3.get()
|
||||||
current_blockstate.set(self.state)
|
current_blockstate.set(self.state)
|
||||||
current_fork.set(fork)
|
current_fork.set(fork)
|
||||||
|
metric.block_current.set(fork.height)
|
||||||
batches = []
|
batches = []
|
||||||
pubs = []
|
pubs = []
|
||||||
session = db.make_session(autocommit=False)
|
session = db.make_session(autocommit=False)
|
||||||
@@ -395,21 +399,21 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
# propragate to the DB or Redis.
|
# propragate to the DB or Redis.
|
||||||
# TIME TICKS ARE DISABLED FOR THIS REASON
|
# TIME TICKS ARE DISABLED FOR THIS REASON
|
||||||
return
|
return
|
||||||
current_fork.set(fork)
|
# current_fork.set(fork)
|
||||||
session = db.session
|
# session = db.session
|
||||||
session.begin()
|
# session.begin()
|
||||||
try:
|
# try:
|
||||||
for callback, on_timer in self.callbacks:
|
# for callback, on_timer in self.callbacks:
|
||||||
if on_timer:
|
# if on_timer:
|
||||||
# noinspection PyCallingNonCallable
|
# # noinspection PyCallingNonCallable
|
||||||
await maywait(callback())
|
# await maywait(callback())
|
||||||
except BaseException:
|
# except BaseException:
|
||||||
session.rollback()
|
# session.rollback()
|
||||||
raise
|
# raise
|
||||||
else:
|
# else:
|
||||||
session.commit()
|
# session.commit()
|
||||||
finally:
|
# finally:
|
||||||
db.close_session()
|
# db.close_session()
|
||||||
|
|
||||||
|
|
||||||
async def do_state_init_cbs(self):
|
async def do_state_init_cbs(self):
|
||||||
@@ -424,5 +428,6 @@ class BlockStateRunner(BlockProgressor):
|
|||||||
def set_latest_block(block):
|
def set_latest_block(block):
|
||||||
cache_block(block)
|
cache_block(block)
|
||||||
latest_block[block.chain_id] = block
|
latest_block[block.chain_id] = block
|
||||||
|
metric.block_latest.set(block.height)
|
||||||
current_clock.get().update(block.timestamp)
|
current_clock.get().update(block.timestamp)
|
||||||
|
|
||||||
|
|||||||
@@ -4,9 +4,10 @@ from typing import Optional
|
|||||||
from eth_abi.exceptions import InsufficientDataBytes
|
from eth_abi.exceptions import InsufficientDataBytes
|
||||||
from web3.exceptions import BadFunctionCallOutput
|
from web3.exceptions import BadFunctionCallOutput
|
||||||
|
|
||||||
from dexorder import ADDRESS_0, config, db
|
from dexorder import ADDRESS_0, db, NATIVE_TOKEN, dec, current_w3
|
||||||
from dexorder.addrmeta import address_metadata
|
from dexorder.addrmeta import address_metadata
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
|
from dexorder.blocks import current_block
|
||||||
from dexorder.contract import ERC20, ContractProxy, CONTRACT_ERRORS
|
from dexorder.contract import ERC20, ContractProxy, CONTRACT_ERRORS
|
||||||
from dexorder.database.model import Token
|
from dexorder.database.model import Token
|
||||||
from dexorder.database.model.token import OldTokenDict
|
from dexorder.database.model.token import OldTokenDict
|
||||||
@@ -15,11 +16,40 @@ from dexorder.metadata import get_metadata
|
|||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# todo needs chain_id
|
# noinspection PyShadowingNames
|
||||||
|
async def get_balance(addr, token_addr, *, adjust_decimals=True) -> dec:
|
||||||
|
if token_addr == NATIVE_TOKEN:
|
||||||
|
return await get_native_balance(addr, adjust_decimals=adjust_decimals)
|
||||||
|
else:
|
||||||
|
return await get_erc20_balance(addr, token_addr, adjust_decimals=adjust_decimals)
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyShadowingNames
|
||||||
|
async def get_erc20_balance(addr, token_addr, *, adjust_decimals=True):
|
||||||
|
value = dec(await ERC20(token_addr).balanceOf(addr))
|
||||||
|
if adjust_decimals:
|
||||||
|
token = await get_token(token_addr)
|
||||||
|
value *= dec(10) ** dec(-token['decimals'])
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyShadowingNames
|
||||||
|
async def get_native_balance(addr, *, adjust_decimals=True) -> dec:
|
||||||
|
try:
|
||||||
|
block_id = current_block.get().height
|
||||||
|
except LookupError:
|
||||||
|
block_id = 'latest'
|
||||||
|
value = dec(await current_w3.get().eth.get_balance(addr, block_identifier=block_id))
|
||||||
|
if adjust_decimals:
|
||||||
|
value /= 10 ** 18
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
async def get_token(address) -> Optional[OldTokenDict]:
|
async def get_token(address) -> Optional[OldTokenDict]:
|
||||||
if address == ADDRESS_0:
|
if address == ADDRESS_0:
|
||||||
raise ValueError('No token at address 0')
|
raise ValueError('No token at address 0')
|
||||||
try:
|
try:
|
||||||
|
# noinspection PyTypeChecker
|
||||||
return address_metadata[address]
|
return address_metadata[address]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
result = address_metadata[address] = await load_token(address)
|
result = address_metadata[address] = await load_token(address)
|
||||||
@@ -61,9 +91,10 @@ async def load_token(address: str) -> Optional[OldTokenDict]:
|
|||||||
try:
|
try:
|
||||||
decimals = await dec_prom
|
decimals = await dec_prom
|
||||||
except CONTRACT_ERRORS:
|
except CONTRACT_ERRORS:
|
||||||
log.warning(f'token {address} has no decimals()')
|
log.info(f'token {address} has no decimals()')
|
||||||
decimals = 0
|
decimals = 0
|
||||||
approved = config.metadata is None
|
return None # we do not support coins that don't specify decimals.
|
||||||
|
approved = False # never approve new coins
|
||||||
chain_id = current_chain.get().id
|
chain_id = current_chain.get().id
|
||||||
symbol = await symbol_prom
|
symbol = await symbol_prom
|
||||||
name = await name_prom
|
name = await name_prom
|
||||||
@@ -78,5 +109,15 @@ async def load_token(address: str) -> Optional[OldTokenDict]:
|
|||||||
td['symbol'] = md['s']
|
td['symbol'] = md['s']
|
||||||
if 'd' in md:
|
if 'd' in md:
|
||||||
td['decimals'] = md['d']
|
td['decimals'] = md['d']
|
||||||
log.debug(f'new token {name} {symbol} {address}{" approved" if approved else ""}')
|
log.debug(f'new token {name} {symbol} {address}')
|
||||||
return td
|
return td
|
||||||
|
|
||||||
|
|
||||||
|
async def adjust_decimals(token, value):
|
||||||
|
if token == NATIVE_TOKEN:
|
||||||
|
decimals = 18
|
||||||
|
else:
|
||||||
|
token = await get_token(token)
|
||||||
|
decimals = token['decimals']
|
||||||
|
value *= dec(10) ** dec(-decimals)
|
||||||
|
return value
|
||||||
|
|||||||
@@ -1,21 +1,19 @@
|
|||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from dataclasses import dataclass
|
from typing import Optional
|
||||||
from typing import Union, Optional
|
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
from web3.exceptions import TransactionNotFound, ContractPanicError, ContractLogicError
|
from web3.exceptions import TransactionNotFound, ContractPanicError, ContractLogicError
|
||||||
|
|
||||||
from dexorder import db, current_w3, Account
|
from dexorder import db, current_w3, Account
|
||||||
from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_registry
|
from dexorder.alert import warningAlert
|
||||||
|
from dexorder.base import TransactionReceiptDict, TransactionRequest
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.base.order import TrancheKey, OrderKey
|
from dexorder.blockstate.fork import current_fork
|
||||||
from dexorder.base.orderlib import PriceProof
|
|
||||||
from dexorder.blockstate.diff import DiffEntryItem
|
|
||||||
from dexorder.blockstate.fork import current_fork, Fork
|
|
||||||
from dexorder.contract.contract_proxy import ContractTransaction
|
from dexorder.contract.contract_proxy import ContractTransaction
|
||||||
from dexorder.database.model.transaction import TransactionJob, TransactionJobState
|
from dexorder.database.model.transaction import TransactionJob, TransactionJobState
|
||||||
|
from dexorder.util import hexstr
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -32,7 +30,7 @@ class TransactionHandler:
|
|||||||
TransactionHandler.instances[tag] = self
|
TransactionHandler.instances[tag] = self
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def build_transaction(self, job_id: int, tr: TransactionRequest) -> ContractTransaction: ...
|
async def build_transaction(self, job_id: int, tr: TransactionRequest) -> Optional[ContractTransaction]: ...
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None: ...
|
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None: ...
|
||||||
@@ -40,50 +38,37 @@ class TransactionHandler:
|
|||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def transaction_exception(self, job: TransactionJob, e: Exception) -> None: ...
|
async def transaction_exception(self, job: TransactionJob, e: Exception) -> None: ...
|
||||||
|
|
||||||
@dataclass
|
# noinspection PyMethodMayBeStatic
|
||||||
class TrancheExecutionRequest (TransactionRequest):
|
async def acquire_account(self) -> Optional[Account]:
|
||||||
TYPE = 'te'
|
try:
|
||||||
|
async with asyncio.timeout(1):
|
||||||
|
return await Account.acquire()
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
return None
|
||||||
|
|
||||||
# type='te' for tranche execution
|
# noinspection PyMethodMayBeStatic
|
||||||
vault: str
|
async def release_account(self, account: Account):
|
||||||
order_index: int
|
account.release()
|
||||||
tranche_index: int
|
|
||||||
price_proof: Union[None,dict,tuple[int]]
|
|
||||||
|
|
||||||
def __init__(self, vault: str, order_index: int, tranche_index: int, price_proof: Union[None,dict,tuple[int]], **_):
|
|
||||||
super().__init__(TrancheExecutionRequest.TYPE)
|
|
||||||
self.vault = vault
|
|
||||||
self.order_index = order_index
|
|
||||||
self.tranche_index = tranche_index
|
|
||||||
self.price_proof = price_proof
|
|
||||||
|
|
||||||
@property
|
|
||||||
def order_key(self):
|
|
||||||
return OrderKey(self.vault, self.order_index)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def tranche_key(self):
|
|
||||||
return TrancheKey(self.vault, self.order_index, self.tranche_index)
|
|
||||||
|
|
||||||
# Must register the class for deserialization
|
|
||||||
transaction_request_registry[TrancheExecutionRequest.TYPE] = TrancheExecutionRequest
|
|
||||||
|
|
||||||
|
|
||||||
def new_tranche_execution_request(tk: TrancheKey, proof: Optional[PriceProof]=None) -> TrancheExecutionRequest:
|
in_flight = set()
|
||||||
if proof is None:
|
accounts_in_flight: dict[bytes, Account] = {} # tx_id_bytes: account
|
||||||
proof = PriceProof(0)
|
|
||||||
return TrancheExecutionRequest(tk.vault, tk.order_index, tk.tranche_index, proof.dump())
|
|
||||||
|
|
||||||
|
|
||||||
def submit_transaction_request(tr: TransactionRequest):
|
def submit_transaction_request(tr: TransactionRequest) -> Optional[TransactionJob]:
|
||||||
"""
|
"""
|
||||||
Once a transaction request has been submitted, it is this module's responsibility to see that it gets mined, at
|
Once a transaction request has been submitted, it is this module's responsibility to see that it gets mined, at
|
||||||
which point `tr.complete_transaction()` is called with the transaction receipt.
|
which point `tr.complete_transaction()` is called with the transaction receipt. If the same-keyed request is
|
||||||
The building of a transaction can also fail,
|
already in-flight, None is returned.
|
||||||
"""
|
"""
|
||||||
|
key = tr.type, tr.key
|
||||||
|
if key in in_flight:
|
||||||
|
log.debug(f'transaction request {tr.key} already in flight')
|
||||||
|
return None
|
||||||
job = TransactionJob(id=uuid4(), chain=current_chain.get(), height=current_fork.get().height,
|
job = TransactionJob(id=uuid4(), chain=current_chain.get(), height=current_fork.get().height,
|
||||||
state=TransactionJobState.Requested, request=tr)
|
state=TransactionJobState.Requested, request=tr)
|
||||||
db.session.add(job)
|
db.session.add(job)
|
||||||
|
in_flight.add(key)
|
||||||
return job
|
return job
|
||||||
|
|
||||||
|
|
||||||
@@ -107,36 +92,60 @@ async def create_and_send_transactions():
|
|||||||
# these errors can be thrown immediately when the tx is tested for gas
|
# these errors can be thrown immediately when the tx is tested for gas
|
||||||
log.warning(f'failed to build transaction request for {job.request.__class__.__name__} {job.id}')
|
log.warning(f'failed to build transaction request for {job.request.__class__.__name__} {job.id}')
|
||||||
job.state = TransactionJobState.Error
|
job.state = TransactionJobState.Error
|
||||||
db.session.add(job)
|
|
||||||
await handler.transaction_exception(job, x)
|
await handler.transaction_exception(job, x)
|
||||||
|
end_job(job)
|
||||||
return
|
return
|
||||||
except Exception as x:
|
except Exception as x:
|
||||||
log.warning(f'unable to send transaction for job {job.id}', exc_info=x)
|
log.warning(f'unable to send transaction for job {job.id}', exc_info=x)
|
||||||
return
|
return
|
||||||
|
if ctx is None:
|
||||||
|
log.info(f'Transaction request {job.request.__class__.__name__} {job.id} declined to build a tx.')
|
||||||
|
job.state = TransactionJobState.Declined
|
||||||
|
end_job(job)
|
||||||
|
return
|
||||||
w3 = current_w3.get()
|
w3 = current_w3.get()
|
||||||
account = Account.get_named(handler.tag)
|
account = await handler.acquire_account()
|
||||||
if account is None:
|
if account is None:
|
||||||
account = Account.get()
|
warningAlert(f'No account available for job {job.id} type "{handler.tag}"', 'no account available')
|
||||||
if account is None:
|
|
||||||
log.error(f'No account available for transaction request type "{handler.tag}"')
|
|
||||||
continue
|
continue
|
||||||
await ctx.sign(account)
|
|
||||||
job.state = TransactionJobState.Signed
|
|
||||||
job.tx_id = ctx.id_bytes
|
|
||||||
job.tx_data = ctx.data
|
|
||||||
db.session.add(job)
|
|
||||||
log.info(f'servicing job {job.request.__class__.__name__} {job.id} with tx {ctx.id}')
|
|
||||||
try:
|
try:
|
||||||
sent = await w3.eth.send_raw_transaction(job.tx_data)
|
await ctx.sign(account)
|
||||||
|
except Exception:
|
||||||
|
await handler.release_account(account)
|
||||||
|
raise
|
||||||
|
log.info(f'servicing job {job.request.__class__.__name__} {job.id} with account {account.address} nonce {ctx.tx["nonce"]} tx {ctx.id}')
|
||||||
|
# noinspection PyBroadException
|
||||||
|
try:
|
||||||
|
sent = await w3.eth.send_raw_transaction(ctx.data)
|
||||||
|
except ValueError as e:
|
||||||
|
try:
|
||||||
|
msg = e.args[0].get('message','')
|
||||||
|
except IndexError:
|
||||||
|
msg = ''
|
||||||
|
if msg.startswith('nonce too low'):
|
||||||
|
# Nonce too low
|
||||||
|
log.warning(f'Account {account.address} nonce too low')
|
||||||
|
log.info(f'Account nonce is {account._nonce} ({ctx.tx["nonce"]}) but should be {await current_w3.get().eth.get_transaction_count(account.address, 'pending')}')
|
||||||
|
account.reset_nonce()
|
||||||
|
elif msg.startswith('insufficient funds'):
|
||||||
|
warningAlert('Account Empty', f'Account {account.address} is out of funds!')
|
||||||
|
else:
|
||||||
|
log.exception(f'Failure sending transaction for job {job.id}')
|
||||||
|
await handler.release_account(account)
|
||||||
except:
|
except:
|
||||||
log.exception(f'Failure sending transaction for job {job.id}')
|
log.exception(f'Failure sending transaction for job {job.id}')
|
||||||
# todo pager
|
# todo pager
|
||||||
# todo send state unknown!
|
# todo send state unknown!
|
||||||
|
await handler.release_account(account)
|
||||||
else:
|
else:
|
||||||
assert sent == job.tx_id
|
account.tx_id = hexstr(ctx.id_bytes)
|
||||||
|
accounts_in_flight[ctx.id_bytes] = account
|
||||||
job.state = TransactionJobState.Sent
|
job.state = TransactionJobState.Sent
|
||||||
db.session.add(job)
|
job.tx_id = ctx.id_bytes
|
||||||
|
job.tx_data = ctx.data
|
||||||
|
assert sent == job.tx_id
|
||||||
|
|
||||||
|
ended_jobs = []
|
||||||
|
|
||||||
async def handle_transaction_receipts():
|
async def handle_transaction_receipts():
|
||||||
# log.debug('handle_transaction_receipts')
|
# log.debug('handle_transaction_receipts')
|
||||||
@@ -149,32 +158,40 @@ async def handle_transaction_receipts():
|
|||||||
try:
|
try:
|
||||||
receipt: TransactionReceiptDict = await w3.eth.get_transaction_receipt(job.tx_id)
|
receipt: TransactionReceiptDict = await w3.eth.get_transaction_receipt(job.tx_id)
|
||||||
except TransactionNotFound:
|
except TransactionNotFound:
|
||||||
pass
|
return
|
||||||
else:
|
fork = current_fork.get()
|
||||||
fork = current_fork.get()
|
assert fork is not None
|
||||||
assert fork is not None
|
if fork.branch.contiguous and receipt['blockHash'] in fork.branch.path or \
|
||||||
if fork.branch.contiguous and receipt['blockHash'] in fork.branch.path or \
|
fork.branch.disjoint and receipt['blockNumber'] <= fork.height:
|
||||||
fork.branch.disjoint and receipt['blockNumber'] <= fork.height:
|
|
||||||
try:
|
|
||||||
handler = TransactionHandler.of(job.request.type)
|
|
||||||
except KeyError:
|
|
||||||
# todo remove bad request?
|
|
||||||
log.warning(f'ignoring transaction request with bad type "{job.request.type}"')
|
|
||||||
else:
|
|
||||||
await handler.complete_transaction(job, receipt)
|
|
||||||
|
|
||||||
|
|
||||||
def finalize_transactions(_fork: Fork, diffs: list[DiffEntryItem]):
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
open_jobs = db.session.scalars(select(TransactionJob).where(
|
|
||||||
TransactionJob.chain == current_chain.get(),
|
|
||||||
TransactionJob.state == TransactionJobState.Sent
|
|
||||||
)).all()
|
|
||||||
open_txs = {job.tx_id:job for job in open_jobs}
|
|
||||||
for diff in diffs:
|
|
||||||
if diff.series == 'mined_txs' and diff.key in open_txs:
|
|
||||||
job = open_txs[diff.key]
|
|
||||||
job.state = TransactionJobState.Mined
|
job.state = TransactionJobState.Mined
|
||||||
job.receipt = diff.value
|
job.receipt = receipt
|
||||||
db.session.add(job)
|
try:
|
||||||
|
handler = TransactionHandler.of(job.request.type)
|
||||||
|
except KeyError:
|
||||||
|
# todo remove bad request?
|
||||||
|
log.warning(f'ignoring transaction request with bad type "{job.request.type}"')
|
||||||
|
else:
|
||||||
|
await handler.complete_transaction(job, receipt)
|
||||||
|
end_job(job)
|
||||||
|
try:
|
||||||
|
await handler.release_account(accounts_in_flight.pop(job.tx_id))
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def end_job(job):
|
||||||
|
ended_jobs.append(job)
|
||||||
|
|
||||||
|
|
||||||
|
async def cleanup_jobs():
|
||||||
|
for job in ended_jobs:
|
||||||
|
log.debug(f'ending job {job.id}')
|
||||||
|
if job.tx_id in accounts_in_flight:
|
||||||
|
try:
|
||||||
|
handler = TransactionHandler.of(job.request.type)
|
||||||
|
await handler.release_account(accounts_in_flight.pop(job.tx_id))
|
||||||
|
except KeyError:
|
||||||
|
log.warning(f'ignoring transaction request with bad type "{job.request.type}"')
|
||||||
|
in_flight.discard((job.request.type, job.request.key))
|
||||||
|
db.session.delete(job)
|
||||||
|
ended_jobs.clear()
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ def align_decimal(value, left_columns) -> str:
|
|||||||
return ' ' * pad + s
|
return ' ' * pad + s
|
||||||
|
|
||||||
|
|
||||||
def hexstr(value: Union[HexBytes, bytes, str]):
|
def hexstr(value: Union[HexBytes, bytes, str]) -> str:
|
||||||
""" returns an 0x-prefixed hex string """
|
""" returns an 0x-prefixed hex string """
|
||||||
if type(value) is HexBytes:
|
if type(value) is HexBytes:
|
||||||
return value.hex()
|
return value.hex()
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ from typing import Union, Awaitable, TypeVar
|
|||||||
|
|
||||||
|
|
||||||
async def async_yield():
|
async def async_yield():
|
||||||
# a value of exactly 0 doesn't seem to work as well, so we set 1 nanosecond
|
# a value of exactly 0 doesn't seem to work as well, so we use 1 microsecond
|
||||||
await asyncio.sleep(1e-9)
|
await asyncio.sleep(1e-6)
|
||||||
|
|
||||||
|
|
||||||
Args = TypeVar('Args')
|
Args = TypeVar('Args')
|
||||||
|
|||||||
@@ -25,3 +25,22 @@ def encode_IEEE754(value: float) -> int:
|
|||||||
|
|
||||||
def decode_IEEE754(value: int) -> float:
|
def decode_IEEE754(value: int) -> float:
|
||||||
return struct.unpack('>f', struct.pack('>I', value))[0]
|
return struct.unpack('>f', struct.pack('>I', value))[0]
|
||||||
|
|
||||||
|
|
||||||
|
def to_base_exp(value, precision=8, roundingFunc=round) -> tuple[int, int]:
|
||||||
|
"""
|
||||||
|
Convert a value to base-2 exponent form.
|
||||||
|
Precision is the number of bits available to the base component
|
||||||
|
"""
|
||||||
|
if value <= 0:
|
||||||
|
raise ValueError("Value must be greater than zero")
|
||||||
|
max_base = 2 ** precision
|
||||||
|
exp = int(math.log2(value)) - precision
|
||||||
|
base = roundingFunc(value / (2 ** exp))
|
||||||
|
if base >= max_base:
|
||||||
|
base //= 2
|
||||||
|
exp += 1
|
||||||
|
return base, exp
|
||||||
|
|
||||||
|
def from_base_exp(base, exp):
|
||||||
|
return base << exp
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from dexorder import current_pub, dec
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.blockstate import BlockDict
|
from dexorder.blockstate import BlockDict
|
||||||
from dexorder.contract import ERC20, CONTRACT_ERRORS
|
from dexorder.contract import ERC20, CONTRACT_ERRORS
|
||||||
from dexorder.contract.dexorder import VaultContract, vault_address
|
from dexorder.contract.dexorder import VaultContract, vault_address
|
||||||
from dexorder.util import json
|
from dexorder.util import json, align_decimal
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -77,3 +79,29 @@ def balance_adjuster(vault, token_address, amount):
|
|||||||
result[taddr] = new_amt
|
result[taddr] = new_amt
|
||||||
return result
|
return result
|
||||||
return functools.partial(_adjust, vault, token_address, amount)
|
return functools.partial(_adjust, vault, token_address, amount)
|
||||||
|
|
||||||
|
|
||||||
|
def publish_vaults(chain_id, owner):
|
||||||
|
vaults = []
|
||||||
|
for num in range(MAX_VAULTS):
|
||||||
|
addr = vault_address(owner, num)
|
||||||
|
if addr in vault_owners:
|
||||||
|
vaults.append(addr)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
log.debug(f'publish_vaults {chain_id} {owner} {vaults}')
|
||||||
|
current_pub.get()(f'{chain_id}|{owner}', 'vaults', chain_id, owner, vaults)
|
||||||
|
|
||||||
|
|
||||||
|
async def refresh_vault_balances(vault, *tokens):
|
||||||
|
amounts = await asyncio.gather(*(ERC20(token).balanceOf(vault) for token in tokens))
|
||||||
|
|
||||||
|
def _adjust(vaddr, toks, amts, old_balances):
|
||||||
|
result = dict(old_balances) # copy
|
||||||
|
for t, a in zip(toks, amts):
|
||||||
|
result[t] = a
|
||||||
|
return result
|
||||||
|
vault_balances.modify(vault, functools.partial(_adjust, vault, tokens, amounts))
|
||||||
|
|
||||||
|
def pretty_balances(b: dict[str,dec], padding=8) -> str:
|
||||||
|
return '\n'.join(f'{k:>} {align_decimal(v,padding)}' for k,v in b.items())
|
||||||
|
|||||||
103
src/dexorder/vaultcreationhandler.py
Normal file
103
src/dexorder/vaultcreationhandler.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
from eth_utils import to_checksum_address
|
||||||
|
from web3.exceptions import ContractLogicError
|
||||||
|
|
||||||
|
from dexorder import db
|
||||||
|
from dexorder.accounting import accounting_transaction_gas
|
||||||
|
from dexorder.base import TransactionReceiptDict, TransactionRequest, transaction_request_deserializers
|
||||||
|
from dexorder.base.chain import current_chain
|
||||||
|
from dexorder.contract import ContractProxy
|
||||||
|
from dexorder.contract.contract_proxy import ContractTransaction
|
||||||
|
from dexorder.contract.dexorder import get_factory_contract, vault_address
|
||||||
|
from dexorder.database.model import TransactionJob
|
||||||
|
from dexorder.database.model import VaultCreationRequest as DbVaultCreationRequest
|
||||||
|
from dexorder.database.model.accounting import AccountingSubcategory
|
||||||
|
from dexorder.transactions import TransactionHandler, submit_transaction_request
|
||||||
|
from dexorder.vault_blockdata import publish_vaults, vault_owners
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class VaultCreationRequest (TransactionRequest):
|
||||||
|
TYPE = 'vcr'
|
||||||
|
|
||||||
|
chain_id: int
|
||||||
|
owner: str
|
||||||
|
num: int
|
||||||
|
|
||||||
|
def __init__(self, chain_id: int, owner: str, num: int):
|
||||||
|
super().__init__(VaultCreationRequest.TYPE, (chain_id, owner, num))
|
||||||
|
self.chain_id = chain_id
|
||||||
|
self.owner = to_checksum_address(owner)
|
||||||
|
self.orig_owner = owner # for the database key
|
||||||
|
self.num = num
|
||||||
|
|
||||||
|
def key(self) -> Any:
|
||||||
|
return self.chain_id, self.owner, self.num
|
||||||
|
|
||||||
|
def deserialize_vault_creation_request(**data) -> VaultCreationRequest:
|
||||||
|
return VaultCreationRequest(data['chain_id'], data['owner'], data['num'])
|
||||||
|
|
||||||
|
# Must register the class for deserialization
|
||||||
|
transaction_request_deserializers[VaultCreationRequest.TYPE] = deserialize_vault_creation_request
|
||||||
|
|
||||||
|
in_flight = set() # (chain, owner, num)
|
||||||
|
|
||||||
|
|
||||||
|
class VaultCreationHandler (TransactionHandler):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(VaultCreationRequest.TYPE)
|
||||||
|
|
||||||
|
async def build_transaction(self, job_id: int, tr: VaultCreationRequest) -> Optional[ContractTransaction]:
|
||||||
|
owner_addr = to_checksum_address(tr.owner)
|
||||||
|
vault_addr = vault_address(owner_addr, tr.num)
|
||||||
|
if vault_owners.get(vault_addr):
|
||||||
|
# existing vault detected
|
||||||
|
publish_vaults(tr.chain_id, owner_addr)
|
||||||
|
return None
|
||||||
|
factory = get_factory_contract()
|
||||||
|
try:
|
||||||
|
return await factory.build.deployVault(owner_addr, tr.num)
|
||||||
|
except ContractLogicError:
|
||||||
|
in_flight.discard((tr.chain_id, tr.owner, tr.num))
|
||||||
|
# maybe the vault already exists?
|
||||||
|
owner = await ContractProxy(vault_addr, 'Vault').owner()
|
||||||
|
if owner == owner_addr:
|
||||||
|
log.debug(f'detected existing vault at {vault_addr}')
|
||||||
|
publish_vaults(tr.chain_id, owner)
|
||||||
|
return None
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
in_flight.discard((tr.chain_id, tr.owner, tr.num))
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def complete_transaction(self, job: TransactionJob, receipt: TransactionReceiptDict) -> None:
|
||||||
|
await accounting_transaction_gas(receipt, AccountingSubcategory.VaultCreation) # vault creation gas
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
req: VaultCreationRequest = job.request
|
||||||
|
in_flight.discard((req.chain_id, req.owner, req.num))
|
||||||
|
|
||||||
|
|
||||||
|
async def transaction_exception(self, job: TransactionJob, e: Exception) -> None:
|
||||||
|
log.error(f'exception from createVault transaction: {job.tx_id}', exc_info=e)
|
||||||
|
|
||||||
|
VaultCreationHandler()
|
||||||
|
|
||||||
|
last_seen = None
|
||||||
|
|
||||||
|
def handle_vault_creation_requests():
|
||||||
|
for req in db.session.query(DbVaultCreationRequest).where(
|
||||||
|
DbVaultCreationRequest.vault == None, DbVaultCreationRequest.chain==current_chain.get()):
|
||||||
|
req: DbVaultCreationRequest
|
||||||
|
owner = to_checksum_address(req.owner)
|
||||||
|
key = req.chain.id, owner, req.num
|
||||||
|
if key not in in_flight:
|
||||||
|
vcr = VaultCreationRequest(*key)
|
||||||
|
submit_transaction_request(vcr)
|
||||||
|
in_flight.add(key)
|
||||||
@@ -4,7 +4,7 @@ from asyncio import Queue
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Union, Callable
|
from typing import Union, Callable
|
||||||
|
|
||||||
from dexorder import config, db, now, current_w3
|
from dexorder import config, db, now, current_w3, metric
|
||||||
from dexorder.base.block import Block, BlockInfo, latest_block
|
from dexorder.base.block import Block, BlockInfo, latest_block
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
from dexorder.blocks import promotion_height
|
from dexorder.blocks import promotion_height
|
||||||
@@ -57,6 +57,7 @@ class BlockWalker (BlockProgressor):
|
|||||||
processed_height = cur + config.backfill if config.backfill < 0 else cur
|
processed_height = cur + config.backfill if config.backfill < 0 else cur
|
||||||
|
|
||||||
log.info(f'walker starting at block {processed_height}')
|
log.info(f'walker starting at block {processed_height}')
|
||||||
|
metric.block_current.set(processed_height)
|
||||||
last_flush = processed_height if self.flush_type == 'blocks' else now() if self.flush_type == 'time' else None
|
last_flush = processed_height if self.flush_type == 'blocks' else now() if self.flush_type == 'time' else None
|
||||||
prev_height = None
|
prev_height = None
|
||||||
session = db.session
|
session = db.session
|
||||||
@@ -67,6 +68,7 @@ class BlockWalker (BlockProgressor):
|
|||||||
latest_blockdata: BlockInfo = await w3.eth.get_block('latest')
|
latest_blockdata: BlockInfo = await w3.eth.get_block('latest')
|
||||||
latest = Block(chain_id, latest_blockdata)
|
latest = Block(chain_id, latest_blockdata)
|
||||||
latest_block[chain_id] = latest
|
latest_block[chain_id] = latest
|
||||||
|
metric.block_latest.set(latest.height)
|
||||||
if prev_height is None or latest.height > prev_height:
|
if prev_height is None or latest.height > prev_height:
|
||||||
prev_height = latest.height
|
prev_height = latest.height
|
||||||
log.debug(f'polled new block {latest.height}')
|
log.debug(f'polled new block {latest.height}')
|
||||||
@@ -93,6 +95,7 @@ class BlockWalker (BlockProgressor):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
db.session.begin()
|
db.session.begin()
|
||||||
processed_height = cur_height
|
processed_height = cur_height
|
||||||
|
metric.block_current.set(cur_height)
|
||||||
if not self.running or config.walker_stop is not None and config.walker_stop <= processed_height:
|
if not self.running or config.walker_stop is not None and config.walker_stop <= processed_height:
|
||||||
break
|
break
|
||||||
await asyncio.sleep(config.polling or 1)
|
await asyncio.sleep(config.polling or 1)
|
||||||
|
|||||||
Reference in New Issue
Block a user