BlockState working in memory with basic event triggers

This commit is contained in:
Tim Olson
2023-09-18 17:41:56 -04:00
commit 68647364cd
44 changed files with 1786 additions and 0 deletions

5
.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
venv/
*secret*
dexorder.toml
./contract
.idea

111
alembic.ini Normal file
View File

@@ -0,0 +1,111 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# this is set programatically in the env.py
sqlalchemy.url =
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

79
alembic/env.py Normal file
View File

@@ -0,0 +1,79 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# DEXORDER SETUP
from sys import path
path.append('src')
import dexorder.db.model
target_metadata = dexorder.db.model.Base.metadata
config.set_main_option('sqlalchemy.url', dexorder.config.db_url)
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

27
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,27 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import dexorder.db
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

9
requirements.txt Normal file
View File

@@ -0,0 +1,9 @@
sqlalchemy~=2.0.20
alembic~=1.11.3
omegaconf~=2.3.0
web3==6.9.0
psycopg2-binary
orjson~=3.9.7
sortedcontainers
hexbytes~=0.3.1
defaultlist~=1.0.0

21
src/dexorder/__init__.py Normal file
View File

@@ -0,0 +1,21 @@
# NARG is used in argument defaults to mean "not specified" rather than "specified as None"
class _NARG:
def __bool__(self): return False
NARG = _NARG()
ADDRESS_0 = '0x0000000000000000000000000000000000000000'
WEI = 1
GWEI = 1_000_000_000
ETH = 1_000_000_000_000_000_000
# noinspection PyProtectedMember
from .util.cwd import _cwd
_cwd() # do this first so that config has the right current working directory
# ordering here is important!
from .base.chain import Blockchain # the singletons are loaded into the dexorder.blockchain.* namespace
from .util import async_yield
from .base.fixed import Fixed2, FixedDecimals, Dec18
from .configuration import config
from .base.account import Account # must come before context
from .base.context import ctx
from .base.token import Token, tokens

View File

View File

@@ -0,0 +1,66 @@
from typing import Union, Optional
import eth_account
from eth_account.signers.local import LocalAccount
from web3.middleware import construct_sign_and_send_raw_middleware
from dexorder import NARG, config
# this is just here for typing the extra .name. the __new__() function returns an eth_account...LocalAccount
# we do it this way because web3py expects a LocalAccount object but we cannot construct one directly with a super()
# call but must instead use a factory :(
class Account (LocalAccount):
@staticmethod
# noinspection PyInitNewSignature
def get(account:[Union,str]=NARG) -> Optional[LocalAccount]:
if account is NARG:
account = config.account
if type(account) is not str:
return account
key_str = config.accounts.get(account, account)
try:
local_account = eth_account.Account.from_key(key_str)
return Account(local_account, key_str, account)
except ValueError:
try:
# was the key missing a leading '0x'?
fixed = '0x' + key_str
local_account = eth_account.Account.from_key(fixed)
print(f'WARNING: account "{account}" is missing a leading "0x"')
return Account(local_account, fixed, account)
except ValueError:
pass
try:
# was the key an integer posing as a string?
converted = f'{int(key_str):#0{66}x}'
local_account = eth_account.Account.from_key(converted)
print(f'WARNING: account "{account}" is set as an integer instead of a string. Converted to: {converted}')
return Account(local_account, converted, account)
except ValueError:
pass
raise ValueError(f'Could not construct account for name "{account}"')
def __init__(self, local_account: LocalAccount, key_str, name: str): # todo chain_id?
super().__init__(local_account._key_obj, local_account._publicapi) # from digging into the source code
self.name = name
self.transaction_counter = 0 # used by GasHandler to detect when new transactions were fired
self.key_str = key_str
self.signing_middleware = construct_sign_and_send_raw_middleware(self)
def attach(self, w3):
w3.eth.default_account = self.address
try:
w3.middleware_onion.remove('account_signer')
except ValueError:
pass
w3.middleware_onion.add(self.signing_middleware, 'account_signer')
def balance(self):
return ctx.w3.eth.get_balance(self.address)
def __str__(self):
return self.name

View File

@@ -0,0 +1,220 @@
from collections import defaultdict
from contextvars import ContextVar
from logging import Logger
from typing import Union, TypeVar, Generic, Any
from sortedcontainers import SortedList
from dexorder import NARG
from dexorder.db.model.block import Block
log = Logger('dexorder.blockstate')
class BlockState:
DELETE = object()
by_chain: dict[int, 'BlockState'] = {}
"""
Since recent blocks can be part of temporary forks, we need to be able to undo certain operations if they were part of a reorg. Instead of implementing
undo, we recover state via snapshot plus replay of recent diffs. When old blocks become low enough in the blockheight they may be considered canonical
at which point the deltas may be reliably incorporated into a new snapshot or rolling permanent collection. BlockState manages separate memory areas
for every block, per-block state that defaults to its parent's state, up the ancestry tree to the root. State clients may read the state for their block,
applying any diffs from the root state to the target block.
"""
@staticmethod
def cur() -> 'BlockState':
return _cur.get()
@staticmethod
def set_cur(value: 'BlockState'):
_cur.set(value)
def __init__(self, root_block: Block, root_state: dict):
self.root_block: Block = root_block
self.root_state: dict = root_state
self.by_height: SortedList[tuple[int, Block]] = SortedList(key=lambda x: x[0])
self.by_hash: dict[bytes, Block] = {root_block.hash: root_block}
self.diffs: dict[bytes, dict[Any, dict[Any, Union[Any, BlockState.DELETE]]]] = defaultdict(dict) # by series
self.ancestors: dict[bytes, Block] = {}
BlockState.by_chain[root_block.chain] = self
def add_block(self, block: Block) -> Union[int, Block, None]:
"""
If block is the same age as root_height or older, it is ignored and None is returned. Otherwise, returns the found parent block if available
or else self.root_height.
The ancestor block is set in the ancestors dictionary and any state updates to block are considered to have occured between the registered ancestor
block and the given block. This could be an interval of many blocks, and the ancestor does not need to be the block's immediate parent.
"""
# check height
height_diff = block.height - self.root_block.height
if height_diff <= 0:
log.debug(f'IGNORING old block {block}')
return None
if block.hash not in self.by_hash:
self.by_hash[block.hash] = block
self.by_height.add((block.height, block))
log.debug(f'new block state {block}')
parent = self.by_hash.get(block.parent)
if parent is None:
self.ancestors[block.hash] = self.root_block
return self.root_block.height
else:
self.ancestors[block.hash] = parent
return parent
def promote_root(self, block):
assert block.hash in self.by_hash
diffs = self.collect_diffs(block)
BlockState.apply_diffs(self.root_state, diffs)
del self.by_hash[self.root_block.hash]
while self.by_height and self.by_height[0][0] <= block.height:
height, dead = self.by_height.pop(0)
if dead is not block:
try:
del self.by_hash[dead.hash]
except KeyError:
pass
try:
del self.diffs[dead.hash]
except KeyError:
pass
try:
del self.ancestors[dead.hash]
except KeyError:
pass
self.root_block = block
@staticmethod
def apply_diffs(obj, diffs):
for series_key, series in diffs.items():
for key, value in series.items():
if value is BlockState.DELETE:
try:
del obj[series_key][key]
except KeyError:
pass
else:
series_obj = obj.get(series_key)
if series_obj is None:
obj[series_key] = series_obj = {}
series_obj[key] = value
def collect_diffs(self, block, series_key=NARG):
diffs = {}
while block is not self.root_block:
block_diffs = self.diffs.get(block.hash)
if block_diffs is not None:
if series_key is NARG:
for s_key, series in block_diffs.items():
series_diffs = diffs.get(s_key)
if series_diffs is None:
series_diffs = diffs[s_key] = {}
for k, v in series.items():
series_diffs.setdefault(k, v)
else:
series = block_diffs.get(series_key)
if series is not None:
for k, v in series.items():
diffs.setdefault(k, v)
block = self.ancestors[block.hash]
return diffs
_cur = ContextVar[BlockState]('BlockState.cur')
T = TypeVar('T')
class BlockDict(Generic[T]):
def __init__(self, series_key):
self.series_key = series_key
def __setitem__(self, item, value):
BlockDict.setitem(self.series_key, item, value)
def __getitem__(self, item):
return BlockDict.getitem(self.series_key, item)
def __delitem__(self, item):
BlockDict.delitem(self.series_key, item)
def __contains__(self, item):
return BlockDict.contains(self.series_key, item)
def add(self, item):
""" set-like semantics. the item key is added with a value of None. """
BlockDict.setitem(self.series_key, item, None)
def items(self):
return BlockDict.iter_items(self.series_key)
@staticmethod
def setitem(series_key, item, value):
state = BlockState.cur()
block = Block.cur()
if block.height > state.root_block.height:
diffs = state.diffs[block.hash]
series = diffs.get(series_key)
if series is None:
series = diffs[series_key] = {}
else:
series = state.root_state.get(series_key)
if series is None:
series = state.root_state[series_key] = {}
series[item] = value
@staticmethod
def getitem(series_key, item):
state = BlockState.cur()
block = Block.cur()
while block.height > state.root_block.height:
diffs = state.diffs.get(block.hash)
if diffs is not None:
series = diffs.get(series_key)
if series is not None:
value = series.get(item, NARG)
if value is BlockState.DELETE:
raise KeyError
if value is not NARG:
return value
block = state.ancestors[block.hash]
if block is not state.root_block:
raise ValueError('Orphaned block is invalid',Block.cur().hash)
root_series = state.root_state.get(series_key)
if root_series is not None:
value = root_series.get(item, NARG)
if value is BlockState.DELETE:
raise KeyError
if value is not NARG:
return value
raise KeyError
@staticmethod
def delitem(series_key, item):
BlockDict.setitem(series_key, item, BlockState.DELETE)
@staticmethod
def contains(series_key, item):
try:
BlockDict.getitem(series_key, item)
return True
except KeyError:
return False
@staticmethod
def iter_items(series_key):
state = BlockState.cur()
block = Block.cur()
root = state.root_state.get(series_key,{})
diffs = state.collect_diffs(block, series_key)
# first output recent changes in the diff obj
yield from ((k,v) for k,v in diffs.items() if v is not BlockState.DELETE)
# then all the items not diffed
yield from ((k,v) for k,v in root.items() if k not in diffs)

View File

@@ -0,0 +1,50 @@
from contextvars import ContextVar
class Blockchain:
@staticmethod
def cur() -> 'Blockchain':
return _cur.get()
@staticmethod
def set_cur(value: 'Blockchain'):
_cur.set(value)
@staticmethod
def for_id(chain_id):
result = Blockchain._instances_by_id.get(chain_id)
if result is None:
result = Blockchain(chain_id, 'Unknown')
return result
@staticmethod
def for_name(chain_name):
return Blockchain._instances_by_name[chain_name]
@staticmethod
def get(name_or_id):
return Blockchain.for_name(name_or_id) if type(name_or_id) is str else Blockchain.for_id(name_or_id)
def __init__(self, chain_id, name):
self.chain_id = chain_id
self.name = name
Blockchain._instances_by_id[chain_id] = self
Blockchain._instances_by_name[name] = self
def __str__(self):
return self.name
_instances_by_id = {}
_instances_by_name = {}
# https://chainlist.org/
Ethereum = Blockchain(1, 'Ethereum')
Goerli = Blockchain(5, 'Goerli')
Polygon = Blockchain(137, 'Polygon') # POS not zkEVM
Mumbai = Blockchain(80001, 'Mumbai')
BSC = Blockchain(56, 'BSC')
Arbitrum = ArbitrumOne = Blockchain(42161, 'ArbitrumOne')
_cur = ContextVar[Blockchain]('Blockchain.cur')

View File

@@ -0,0 +1,24 @@
from eth_utils import keccak
from dexorder.base.blockstate import BlockDict
class EventManager:
def __init__(self):
self.all_topics = set()
self.triggers:dict[str,BlockDict] = {}
def add_handler(self, topic: str, callback):
if not topic.startswith('0x'):
topic = '0x'+keccak(text=topic).hex().lower()
triggers = self.triggers.get(topic)
if triggers is None:
triggers = self.triggers[topic] = BlockDict(topic)
triggers.add(callback)
self.all_topics.add(topic)
def handle_logs(self, logs):
for log in logs:
for callback, _ in self.triggers.get(log.topics[0].hex(), []).items():
callback(log)

121
src/dexorder/base/fixed.py Normal file
View File

@@ -0,0 +1,121 @@
# binary fixed point math
from _decimal import Decimal
from typing import Tuple
class FixedDecimals:
def __init__(self, decimals):
self._denom = Decimal(10) ** Decimal(decimals)
def dec(self, amount: int) -> Decimal:
return Decimal(amount) / self._denom
def amount(self, decimal_like: [Decimal, int, str, float]) -> int:
return round(Decimal(decimal_like) * self._denom)
class Fixed2:
def __init__(self, int_value, denom_bits):
assert (int_value == int(int_value))
self.int_value = int_value
self.dbits = denom_bits
@property
def value(self):
return self.int_value / 2 ** self.dbits
def round(self, denom_bits):
if self.dbits > denom_bits:
int_value = round(self.int_value / 2 ** (self.dbits - denom_bits))
elif self.dbits < denom_bits:
int_value = self.int_value * 2 ** (denom_bits - self.dbits)
else:
int_value = self.int_value
return Fixed2(int_value, self.dbits)
def __format__(self, format_spec):
return self.value.__format__(format_spec) if format_spec.endswith('f') \
else self.int_value.__format__(format_spec[:-1])
def __str__(self) -> str:
return str(self.int_value)
def __repr__(self) -> str:
return f'Fixed({self.int_value},{self.dbits})'
def __hash__(self):
return hash(self.int_value) ^ hash(self.dbits)
def __int__(self) -> int:
return self.int_value
def __float__(self) -> float:
return self.value
def __abs__(self) -> 'Fixed2':
return Fixed2(abs(self.int_value), self.dbits)
def __hex__(self) -> str:
return hex(self.int_value)
def __neg__(self) -> 'Fixed2':
return Fixed2(-self.int_value, self.dbits)
def __bool__(self) -> bool:
return bool(self.int_value)
def __add__(self, other: 'Fixed2') -> 'Fixed2':
dbits = max(self.dbits, other.dbits)
return Fixed2(self.round(dbits).int_value + other.round(dbits).int_value, dbits)
def __sub__(self, other: 'Fixed2') -> 'Fixed2':
dbits = max(self.dbits, other.dbits)
return Fixed2(self.round(dbits).int_value - other.round(dbits).int_value, dbits)
def __mul__(self, other: 'Fixed2') -> 'Fixed2':
dbits = max(self.dbits, other.dbits)
self_up = self.round(dbits)
other_up = other.round(dbits)
return Fixed2(self_up.int_value * other_up.int_value // 2 ** dbits, dbits)
def __floordiv__(self, other):
dbits = max(self.dbits, other.dbits)
self_up = self.round(dbits)
other_up = other.round(dbits)
return self_up.int_value // other_up.int_value
def __divmod__(self, other: 'Fixed2') -> Tuple['Fixed2', 'Fixed2']:
dbits = max(self.dbits, other.dbits)
self_up = self.round(dbits)
other_up = other.round(dbits)
div, mod = self_up.int_value.__divmod__(other_up.int_value)
return Fixed2(div, dbits), Fixed2(mod, dbits) # mod not supported
def __eq__(self, other):
return (self - other).int_value == 0
def __le__(self, other):
return (self - other).int_value <= 0
def __gt__(self, other):
return (self - other).int_value > 0
class Fixed2Type:
def __init__(self, denominator_bits):
self.dbits = denominator_bits
def __call__(self, int_value):
return Fixed2(int_value, self.dbits)
def from_string(self, str_value):
return self(round(Decimal(str_value) * 2 ** self.dbits))
def from_number(self, value):
return self(round(value) * 2 ** self.dbits)
X96 = Fixed2Type(96)
X128 = Fixed2Type(128)
Dec18 = FixedDecimals(18)

120
src/dexorder/base/token.py Normal file
View File

@@ -0,0 +1,120 @@
from collections import defaultdict
from decimal import Decimal
from sqlalchemy.orm import Mapped
from web3 import Web3
from dexorder import config, ctx, Blockchain, NARG, FixedDecimals, ADDRESS_0
from dexorder.blockchain import ByBlockchainDict
from dexorder.base.chain import Polygon, ArbitrumOne, Ethereum
from dexorder.contract import ContractProxy, abis
import dexorder.db.column as col
class Token (ContractProxy, FixedDecimals):
chain: Mapped[col.Blockchain]
address: Mapped[col.Address]
decimals: Mapped[col.Uint8]
name: Mapped[str]
symbol: Mapped[str]
@staticmethod
def get(name_or_address:str, *, chain_id=None) -> 'Token':
try:
return tokens.get(name_or_address, default=NARG, chain_id=chain_id) # default=NARG will raise
except KeyError:
try:
# noinspection PyTypeChecker
return Web3.to_checksum_address(name_or_address)
except ValueError:
raise ValueError(f'Could not resolve token {name_or_address} for chain {ctx.chain_id}')
def __init__(self, chain_id, address, decimals, symbol, name, *, abi=None):
FixedDecimals.__init__(self, decimals)
if abi is None:
load = 'ERC20'
else:
load = None
abi = abis.get(abi,abi)
ContractProxy.__init__(self, address, load, abi=abi)
self.chain_id = chain_id
self.address = address
self.decimals = decimals
self.symbol = symbol
self.name = name
def balance(self, address: str = None) -> int:
if address is None:
address = ctx.address
return self.balanceOf(address)
def balance_dec(self, address: str = None) -> Decimal:
return self.dec(self.balance(address))
def __str__(self):
return self.symbol
def __repr__(self):
return f'{self.symbol}({self.address},{self.decimals})'
def __eq__(self, other):
return self.chain_id == other.chain_id and self.address == other.address
def __hash__(self):
return hash((self.chain_id,self.address))
class NativeToken (FixedDecimals):
""" Token-like but not a contract. """
@staticmethod
def get( chain_id = None) -> 'NativeToken':
if chain_id is None:
chain_id = ctx.chain_id
return _native_tokens[chain_id]
def __init__(self, chain_id, decimals, symbol, name, *, wrapper_token = None):
self.chain_id = chain_id
self.address = ADDRESS_0 # todo i think there's actually an address? like 0x11 or something?
super().__init__(decimals)
self.symbol = symbol
self.name = name
self._wrapper_token = wrapper_token if wrapper_token is not None else _tokens_by_chain[chain_id]['W'+symbol]
def balance(self, address: str = None) -> int:
if address is None:
address = ctx.address
assert ctx.chain_id == self.chain_id
return ctx.w3.eth.get_balance(address)
def balance_dec(self, address: str = None) -> Decimal:
return self.dec(self.balance(address))
@property
def wrapper(self) -> Token:
return self._wrapper_token
def __repr__(self):
return self.symbol
# convert TokenConfigs to Tokens
_tokens_by_chain:dict[int,dict[str,Token]] = defaultdict(dict)
for _c in config.tokens:
_chain_id = Blockchain.get(_c.chain).chain_id
_tokens_by_chain[_chain_id][_c.symbol] = Token(_chain_id, _c.address, _c.decimals, _c.symbol, _c.name, abi=_c.abi)
_native_tokens: dict[int, NativeToken] = {
# Ethereum.chain_id: NativeToken(Ethereum.chain_id, 18, 'ETH', 'Ether'), # todo need WETH on Ethereum
# Polygon.chain_id: NativeToken(Polygon.chain_id, 18, 'MATIC', 'Polygon'),
}
for _chain_id, _native in _native_tokens.items():
# noinspection PyTypeChecker
_tokens_by_chain[_chain_id][_native.symbol] = _native
tokens = ByBlockchainDict[Token](_tokens_by_chain)

View File

View File

@@ -0,0 +1,50 @@
import logging
from asyncio import CancelledError
from traceback import print_exception
import asyncio
from signal import Signals
from typing import Coroutine
from dexorder import configuration
if __name__ == '__main__':
raise Exception('this file is meant to be imported not executed')
ignorable_exceptions = [CancelledError]
log = logging.getLogger(__name__)
async def _shutdown_coro(_sig, loop, extra_shutdown):
log.info('shutting down')
if extra_shutdown is not None:
extra_shutdown()
tasks = [t for t in asyncio.all_tasks() if t is not
asyncio.current_task()]
for task in tasks:
task.cancel()
exceptions = await asyncio.gather(*tasks, return_exceptions=True)
loop.stop()
for x in exceptions:
if x is not None and x.__class__ not in ignorable_exceptions:
print_exception(x)
def execute(main:Coroutine, shutdown=None, parse_args=True):
if parse_args:
configuration.parse_args()
loop = asyncio.get_event_loop()
signals = Signals.SIGQUIT, Signals.SIGTERM, Signals.SIGINT
for s in signals:
loop.add_signal_handler(s, lambda sig=s: asyncio.create_task(_shutdown_coro(sig, loop, shutdown)))
task = loop.create_task(main)
loop.run_until_complete(task)
x = task.exception()
if x is not None:
print_exception(x)
for t in asyncio.all_tasks():
t.cancel()
else:
loop.run_forever()
loop.stop()
loop.close()

122
src/dexorder/bin/main.py Normal file
View File

@@ -0,0 +1,122 @@
import logging
from asyncio import CancelledError
from hexbytes import HexBytes
from web3 import AsyncWeb3, WebsocketProviderV2, AsyncHTTPProvider
from web3.types import FilterParams
from dexorder import config, Blockchain
from dexorder.base.blockstate import BlockState, BlockDict
from dexorder.base.event_manager import EventManager
from dexorder.bin.executable import execute
from dexorder.configuration import resolve_rpc_url
from dexorder.db.model import Block
log = logging.getLogger('dexorder')
ROOT_AGE = 10 # todo set per chain
wallets = BlockDict('wallets')
def handle_transfer(event):
to_address = event.topics[2].hex()
wallets.add(to_address)
def setup_triggers(event_manager: EventManager):
event_manager.add_handler('Transfer(address,address,uint256)', handle_transfer)
async def main():
"""
1. load root stateBlockchain
a. if no root, init from head
b. if root is old, batch forward by height
2. discover new heads
2b. find in-memory ancestor else use root
3. context = ancestor->head diff
4. query global log filter
5. process new vaults
6. process new orders and cancels
a. new pools
7. process Swap events and generate pool prices
8. process price horizons
9. process token movement
10. process swap triggers (zero constraint tranches)
11. process price tranche triggers
12. process horizon tranche triggers
13. filter by time tranche triggers
14. bundle execution requests and send tx. tx has require(block<deadline)
15. on tx confirmation, the block height of all executed trigger requests is set to the tx block
"""
# db.connect()
# blockchain.connect()
ws_provider = WebsocketProviderV2(resolve_rpc_url(config.ws_url))
w3ws = AsyncWeb3.persistent_websocket(ws_provider)
http_provider = AsyncHTTPProvider(resolve_rpc_url(config.rpc_url))
w3 = AsyncWeb3(http_provider)
# w3.middleware_onion.remove('attrdict')
try:
chain_id = await w3ws.eth.chain_id
Blockchain.set_cur(Blockchain.for_id(chain_id))
event_manager = EventManager()
# todo load root
state = None
async with w3ws as w3ws:
await w3ws.eth.subscribe('newHeads')
while True:
async for head in w3ws.listen_to_websocket():
log.debug('head', head)
block_data = await w3.eth.get_block(head.hash.hex(), True)
block = Block(chain=chain_id,height=block_data.number,hash=block_data.hash,parent=block_data.parentHash,data=block_data)
block.set_latest(block)
block.set_cur(block)
if state is None:
state = BlockState(block,{})
BlockState.set_cur(state)
setup_triggers(event_manager)
log.info('Created new empty root state')
else:
ancestor = BlockState.cur().add_block(block)
if ancestor is None:
log.debug(f'discarded late-arriving head {block}')
elif type(ancestor) is int:
# todo backfill batches
log.error(f'backfill unimplemented for range {ancestor} to {block}')
else:
logs_filter = FilterParams(topics=list(event_manager.all_topics), blockhash=HexBytes(block.hash).hex())
log.debug(f'get logs {logs_filter}')
logs = await w3.eth.get_logs(logs_filter)
if logs:
log.debug('handle logs')
event_manager.handle_logs(logs)
# check for root promotion
if block.height - state.root_block.height > ROOT_AGE:
b = block
try:
for _ in range(1,ROOT_AGE):
# we walk backwards ROOT_AGE and promote what's there
b = state.by_hash[b.parent]
except KeyError:
pass
else:
log.debug(f'promoting root {b}')
state.promote_root(b)
log.debug('wallets: '+' '.join(k for k,_ in wallets.items()))
except CancelledError:
pass
finally:
if ws_provider.is_connected():
await ws_provider.disconnect()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
log = logging.getLogger('dexorder')
log.setLevel(logging.DEBUG)
execute(main())
log.info('exiting')

View File

@@ -0,0 +1,4 @@
from .old_dispatch import OldDispatcher
from .by_blockchain import ByBlockchainDict, ByBlockchainList, ByBlockchainCollection
from .connection import connect
from dexorder.base.chain import Ethereum, Polygon, Goerli, Mumbai, ArbitrumOne, BSC

View File

@@ -0,0 +1,55 @@
from typing import Generic, TypeVar, Any, Iterator
from dexorder import ctx, NARG
_T = TypeVar('_T')
class ByBlockchainSingleton:
def __init__(self, by_blockchain:dict[int,Any]):
self.by_blockchain = by_blockchain
def __getattr__(self, item):
return self.by_blockchain.__getattribute__(item)
class ByBlockchainCollection (Generic[_T]):
def __init__(self, by_blockchain:dict[int,dict[Any,_T]]=None):
self.by_blockchain = by_blockchain if by_blockchain is not None else {}
def __getitem__(self, item) -> _T:
return self.by_blockchain[ctx.chain_id][item]
class ByBlockchainDict (ByBlockchainCollection[_T], Generic[_T]):
def __getattr__(self, name: str) -> _T:
return self.by_blockchain[ctx.chain_id][name]
def get(self, item, default=None, *, chain_id=None) -> _T:
# will raise if default is NARG
if chain_id is None:
chain_id = ctx.chain_id
if chain_id is None:
raise KeyError('no ctx.chain_id set')
found = self.by_blockchain.get(chain_id, {}).get(item, default)
if found is NARG:
raise KeyError
return found
class ByBlockchainList (ByBlockchainCollection[_T], Generic[_T]):
def __iter__(self) -> Iterator[_T]:
return iter(self.by_blockchain[ctx.chain_id])
def iter(self, *, chain_id=None) -> Iterator[_T]:
if chain_id is None:
chain_id = ctx.chain_id
return iter(self.by_blockchain[chain_id])
def get(self, index, *, chain_id=None) -> _T:
if chain_id is None:
chain_id = ctx.chain_id
if chain_id is None:
raise KeyError('no ctx.chain_id set')
return self.by_blockchain[chain_id][index]

View File

@@ -0,0 +1,2 @@
from dexorder import Blockchain

View File

@@ -0,0 +1,50 @@
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware, simple_cache_middleware
from dexorder import ctx
from dexorder.blockchain.util import get_contract_data
from ..configuration import resolve_rpc_url
def connect(rpc_url=None):
"""
connects to the rpc_url and configures the context
if you don't want to use ctx.account for this w3, either set ctx.account first or
use create_w3() and set w3.eth.default_account separately
"""
w3 = create_w3(rpc_url)
ctx.w3 = w3
return w3
def create_w3(rpc_url=None):
"""
this constructs a Web3 object but does NOT attach it to the context. consider using connect(...) instead
this does *not* attach any signer to the w3. make sure to inject the proper middleware with Account.attach(w3)
"""
# todo create a proxy w3 that rotates among rpc urls
# self.w3s = tuple(create_w3(url) for url in rpc_url_or_tag)
# chain_id = self.w3s[0].eth.chain_id
# assert all(w3.eth.chain_id == chain_id for w3 in self.w3s) # all rpc urls must be the same blockchain
# self.w3iter = itertools.cycle(self.w3s)
url = resolve_rpc_url(rpc_url)
w3 = Web3(HTTPProvider(url))
w3.middleware_onion.inject(geth_poa_middleware, layer=0)
w3.middleware_onion.add(simple_cache_middleware)
w3.eth.Contract = _make_contract(w3.eth)
return w3
def _make_contract(w3_eth):
def f(address, abi_or_name): # if abi, then it must already be in native object format, not a string
if type(abi_or_name) is str:
data = get_contract_data(abi_or_name)
abi = data['abi']
bytecode = data['bytecode']['object'] if address is None else None
else:
abi = abi_or_name
bytecode = None
return w3_eth.contract(address,abi=abi,bytecode=bytecode)
return f

View File

@@ -0,0 +1,7 @@
import json
def get_contract_data(name):
with open(f'contract/out/{name}.sol/{name}.json') as file:
return json.load(file)

View File

@@ -0,0 +1,3 @@
from .standard_accounts import test_accounts
from .load import config, parse_args
from .resolve import resolve_rpc_url

View File

@@ -0,0 +1,94 @@
import os
import tomllib
from tomllib import TOMLDecodeError
from omegaconf import OmegaConf, DictConfig
from omegaconf.errors import OmegaConfBaseException
from .schema import Config
from .standard_accounts import default_accounts_config
from .standard_tokens import default_token_config
schema = OmegaConf.structured(Config())
class ConfigException (Exception):
pass
def load_config():
# noinspection PyTypeChecker
result:ConfigDict = OmegaConf.merge(
schema,
load_tokens(),
load_accounts(),
from_toml('pool.toml'),
from_toml('dexorder.toml'),
from_toml('config.toml'),
from_toml('.secret.toml'),
from_env()
)
return result
def load_tokens():
token_conf = OmegaConf.create({'tokens': default_token_config})
try:
OmegaConf.merge(schema, token_conf)
return token_conf
except OmegaConfBaseException as _x:
raise ConfigException(f'Error while processing default tokens:\n{_x}')
def load_accounts():
accounts_conf = OmegaConf.create({'accounts': default_accounts_config})
try:
OmegaConf.merge(schema, accounts_conf)
return accounts_conf
except OmegaConfBaseException as _x:
raise ConfigException(f'Error while processing default accounts:\n{_x}')
def from_env(prefix='DEXORDER_'):
dotlist = []
for key, value in os.environ.items():
if key.startswith(prefix):
key = key[len(prefix):].lower().replace('__','.')
dotlist.append(key+'='+value)
result = OmegaConf.from_dotlist(dotlist)
try:
OmegaConf.merge(schema, result)
return result
except OmegaConfBaseException as x:
raise ConfigException(f'Error while parsing environment config:\n{x}')
def from_toml(filename):
try:
try:
with open(filename, 'rb') as file:
toml = tomllib.load(file)
result = OmegaConf.create(toml)
except FileNotFoundError:
return OmegaConf.create()
OmegaConf.merge(schema, result)
return result
except (OmegaConfBaseException, TOMLDecodeError) as x:
raise ConfigException(f'Error while loading {filename}:\n{x}')
def parse_args(args=None):
""" should be called from binaries to parse args as command-line config settings """
# noinspection PyTypeChecker
try:
config.merge_with(OmegaConf.from_cli(args)) # updates config in-place. THANK YOU OmegaConf!
except OmegaConfBaseException as x:
raise ConfigException(f'Could not parse command-line args:\n{x}')
class ConfigDict (Config, DictConfig): # give type hints from Config plus methods from DictConfig
pass
config = load_config()

View File

@@ -0,0 +1,13 @@
from typing import Callable
from .schema import Config
_handlers = []
def add_config_handler(callback:Callable[[Config],None]):
_handlers.append(callback)
def postprocess_config(conf:Config):
for callback in _handlers:
callback(conf)

View File

@@ -0,0 +1,13 @@
from .load import config
def resolve_rpc_url(rpc_url=None):
if rpc_url is None:
rpc_url = config.rpc_url
if rpc_url == 'test':
return 'http://localhost:8545'
try:
return config.rpc_urls[rpc_url] # look up aliases
except KeyError:
pass
return rpc_url

View File

@@ -0,0 +1,73 @@
from dataclasses import dataclass, field
from typing import Optional, Union
# SCHEMA NOTES:
# - avoid using int keys since (1) they are hard to decipher by a human and (2) the Python TOML parser mistypes int keys
# as strings in certain situations
# - do not nest structured types more than one level deep. it confuses the config's typing system
@dataclass
class Config:
db_url: str = 'postgresql://dexorder:redroxed@localhost/dexorder'
dump_sql: bool = False
chain: Union[int,str] = 'Arbitrum'
# rpc_url may also reference the aliases from the foundry.toml's rpc_endpoints section
rpc_url: str = 'http://localhost:8545'
ws_url: str = 'ws://localhost:8545'
rpc_urls: Optional[dict[str,str]] = field(default_factory=dict)
account: Optional[str] = None # may be a private key or an account alias
accounts: Optional[dict[str,str]] = field(default_factory=dict) # account aliases
min_gas: str = '0'
tokens: list['TokenConfig'] = field(default_factory=list)
dexorders: list['DexorderConfig'] = field(default_factory=list)
pools: list['PoolConfig'] = field(default_factory=list)
query_helpers: dict[str,str] = field(default_factory=dict)
# Dispatcher
polling_interval: float = 0.2
backoff_factor: float = 1.5
max_interval: float = 10
# positive numbers are absolute block numbers and negative numbers are relative to the latest block
backfill: int = 0
@dataclass
class TokenConfig:
name: str
symbol: str
decimals: int
chain: str
address: str
abi: Optional[str] = None
@dataclass
class PoolConfig:
chain: str
address: str
token_a: str
token_b: str
fee: int
enabled: bool = False
@dataclass
class DexorderConfig:
chain: str
address: str
pool: str
owner: str
name: Optional[str] = None
width: Optional[int] = None # in bps aka ticks
width_above: Optional[int] = None # defaults to width
width_below: Optional[int] = None # defaults to width
offset: Optional[int] = None # in bps aka ticks
offset_above: Optional[int] = None # defaults to offset
offset_below: Optional[int] = None # defaults to offset
ema: Optional[int] = None

View File

@@ -0,0 +1,18 @@
test_accounts = {
'test0': '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80',
'test1': '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d',
'test2': '0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a',
'test3': '0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6',
'test4': '0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a',
'test5': '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba',
'test6': '0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e',
'test7': '0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356',
'test8': '0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97',
'test9': '0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6',
}
default_accounts_config = {}
default_accounts_config.update(test_accounts)
default_accounts_config['test'] = default_accounts_config['test0']

View File

@@ -0,0 +1,8 @@
from .schema import TokenConfig
default_token_config = [
# TokenConfig('Wrapped Matic', 'WMATIC', 18, 'Polygon', '0x0d500B1d8E8eF31E21C99d1Db9A6444d3ADf1270', abi='WMATIC'),
# TokenConfig('Wrapped Ethereum','WETH', 18, 'Polygon', '0x7ceB23fD6bC0adD59E62ac25578270cFf1b9f619'),
# TokenConfig('Wrapped Bitcoin', 'WBTC', 8, 'Polygon', '0x1BFD67037B42Cf73acF2047067bd4F2C47D9BfD6'),
# TokenConfig('USD Coin', 'USDC', 6, 'Polygon', '0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174'),
]

View File

@@ -0,0 +1,25 @@
import sqlalchemy
from .migrate import migrate_database
from .. import config, ctx
# noinspection PyShadowingNames
def connect(url=None, migrate=True, reconnect=False, dump_sql=None):
if ctx.engine is not None and not reconnect:
return
if url is None:
url = config.db_url
if dump_sql is None:
dump_sql = config.dump_sql
engine = sqlalchemy.create_engine(url, echo=dump_sql)
if migrate:
migrate_database()
with engine.connect() as connection:
connection.execute(sqlalchemy.text("SET TIME ZONE 'UTC'"))
result = connection.execute(sqlalchemy.text("select version_num from alembic_version"))
for row in result:
print(f'database revision {row[0]}')
ctx.engine = engine
return
raise Exception('database version not found')

87
src/dexorder/db/column.py Normal file
View File

@@ -0,0 +1,87 @@
from sqlalchemy import SMALLINT, INTEGER, BIGINT
from sqlalchemy.orm import mapped_column
from typing_extensions import Annotated
from dexorder import Fixed2, Blockchain as NativeBlockchain
from . import column_types as t
# noinspection DuplicatedCode
Uint8 = Annotated[int, mapped_column(SMALLINT)]
Uint16 = Annotated[int, mapped_column(SMALLINT)]
Uint24 = Annotated[int, mapped_column(INTEGER)]
Uint32 = Annotated[int, mapped_column(INTEGER)]
Uint40 = Annotated[int, mapped_column(BIGINT)]
Uint48 = Annotated[int, mapped_column(BIGINT)]
Uint56 = Annotated[int, mapped_column(BIGINT)]
Uint64 = Annotated[int, mapped_column(t.IntBits(64, False))] # cannot use BIGINT since an unsigned value could overflow it
Uint72 = Annotated[int, mapped_column(t.IntBits(72, False))]
Uint80 = Annotated[int, mapped_column(t.IntBits(80, False))]
Uint88 = Annotated[int, mapped_column(t.IntBits(88, False))]
Uint96 = Annotated[int, mapped_column(t.IntBits(96, False))]
Uint104 = Annotated[int, mapped_column(t.IntBits(104, False))]
Uint112 = Annotated[int, mapped_column(t.IntBits(112, False))]
Uint120 = Annotated[int, mapped_column(t.IntBits(120, False))]
Uint128 = Annotated[int, mapped_column(t.IntBits(128, False))]
Uint136 = Annotated[int, mapped_column(t.IntBits(136, False))]
Uint144 = Annotated[int, mapped_column(t.IntBits(144, False))]
Uint152 = Annotated[int, mapped_column(t.IntBits(152, False))]
Uint160 = Annotated[int, mapped_column(t.IntBits(160, False))]
Uint168 = Annotated[int, mapped_column(t.IntBits(168, False))]
Uint176 = Annotated[int, mapped_column(t.IntBits(176, False))]
Uint184 = Annotated[int, mapped_column(t.IntBits(184, False))]
Uint192 = Annotated[int, mapped_column(t.IntBits(192, False))]
Uint200 = Annotated[int, mapped_column(t.IntBits(200, False))]
Uint208 = Annotated[int, mapped_column(t.IntBits(208, False))]
Uint216 = Annotated[int, mapped_column(t.IntBits(216, False))]
Uint224 = Annotated[int, mapped_column(t.IntBits(224, False))]
Uint232 = Annotated[int, mapped_column(t.IntBits(232, False))]
Uint240 = Annotated[int, mapped_column(t.IntBits(240, False))]
Uint248 = Annotated[int, mapped_column(t.IntBits(248, False))]
Uint256 = Annotated[int, mapped_column(t.IntBits(256, False))]
# noinspection DuplicatedCode
Int8 = Annotated[int, mapped_column(SMALLINT)]
Int16 = Annotated[int, mapped_column(SMALLINT)]
Int24 = Annotated[int, mapped_column(INTEGER)]
Int32 = Annotated[int, mapped_column(INTEGER)]
Int40 = Annotated[int, mapped_column(BIGINT)]
Int48 = Annotated[int, mapped_column(BIGINT)]
Int56 = Annotated[int, mapped_column(BIGINT)]
Int64 = Annotated[int, mapped_column(BIGINT)]
Int72 = Annotated[int, mapped_column(t.IntBits(72, True))]
Int80 = Annotated[int, mapped_column(t.IntBits(80, True))]
Int88 = Annotated[int, mapped_column(t.IntBits(88, True))]
Int96 = Annotated[int, mapped_column(t.IntBits(96, True))]
Int104 = Annotated[int, mapped_column(t.IntBits(104, True))]
Int112 = Annotated[int, mapped_column(t.IntBits(112, True))]
Int120 = Annotated[int, mapped_column(t.IntBits(120, True))]
Int128 = Annotated[int, mapped_column(t.IntBits(128, True))]
Int136 = Annotated[int, mapped_column(t.IntBits(136, True))]
Int144 = Annotated[int, mapped_column(t.IntBits(144, True))]
Int152 = Annotated[int, mapped_column(t.IntBits(152, True))]
Int160 = Annotated[int, mapped_column(t.IntBits(160, True))]
Int168 = Annotated[int, mapped_column(t.IntBits(168, True))]
Int176 = Annotated[int, mapped_column(t.IntBits(176, True))]
Int184 = Annotated[int, mapped_column(t.IntBits(184, True))]
Int192 = Annotated[int, mapped_column(t.IntBits(192, True))]
Int200 = Annotated[int, mapped_column(t.IntBits(200, True))]
Int208 = Annotated[int, mapped_column(t.IntBits(208, True))]
Int216 = Annotated[int, mapped_column(t.IntBits(216, True))]
Int224 = Annotated[int, mapped_column(t.IntBits(224, True))]
Int232 = Annotated[int, mapped_column(t.IntBits(232, True))]
Int240 = Annotated[int, mapped_column(t.IntBits(240, True))]
Int248 = Annotated[int, mapped_column(t.IntBits(248, True))]
Int256 = Annotated[int, mapped_column(t.IntBits(256, True))]
Address = Annotated[str, mapped_column(t.Address())]
BlockCol = Annotated[int, mapped_column(BIGINT)]
Blockchain = Annotated[NativeBlockchain, mapped_column(t.Blockchain)]
# Uniswap aliases
Tick = Int24
SqrtPriceX96 = Uint160
Liquidity = Uint128
Q128X96 = Annotated[Fixed2, mapped_column(t.Fixed(128, 96))]
Q256X128 = Annotated[Fixed2, mapped_column(t.Fixed(256, 128))]

View File

@@ -0,0 +1,71 @@
import math
from sqlalchemy import TypeDecorator, BIGINT
from sqlalchemy.dialects.postgresql import BYTEA
from web3 import Web3
from dexorder import Fixed2 as NativeFixed, Blockchain as NativeBlockchain
class Address(TypeDecorator):
impl = BYTEA
cache_ok = True
def process_bind_param(self, value, dialect):
return bytes.fromhex(value[2:] if value.startswith('0x') else value)
def process_result_value(self, value: bytes, dialect):
return Web3.to_checksum_address(value.hex())
class Blockchain(TypeDecorator):
impl = BIGINT
cache_ok = True
def process_bind_param(self, value: NativeBlockchain, dialect):
return value.chain_id
def process_result_value(self, value: int, dialect):
return Blockchain.for_id(value)
# Alembic instantiates these custom column types by calling their __init__ function as if they were the underlying
# type. Therefore, we cannot change the init function signature, so we use wrapper methods to set member variables
# after instance creation. This way, Alembic calls __init__ with the params for the underlying type, but at runtime
# when we construct our custom column types, they have the extra members set by the wrapper function.
class _IntBits(TypeDecorator):
impl = BYTEA
cache_ok = True
def process_bind_param(self, value, dialect):
assert( not self.signed or value >= 0)
return int(value).to_bytes(self.length, 'big', signed=self.signed)
def process_result_value(self, value, dialect):
return int.from_bytes(value, 'big', signed=self.signed)
def IntBits(bits, signed):
result = _IntBits(math.ceil(bits/8))
result.bits = bits
result.signed = signed
return result
class _Fixed(TypeDecorator):
impl = BYTEA
cache_ok = True
def process_bind_param(self, value, dialect):
assert( not self.signed or value >= 0)
return int(value).to_bytes(self.length, 'big', signed=self.signed)
def process_result_value(self, value, dialect):
return NativeFixed(int.from_bytes(value, 'big', signed=self.signed), self.dbits)
def Fixed(bits, dbits, signed=False):
result = _Fixed(math.ceil(bits/8))
result.bits = bits
result.dbits = dbits
result.signed = signed
return result

View File

@@ -0,0 +1,10 @@
import subprocess
import sys
def migrate_database():
completed = subprocess.run('alembic upgrade head', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if completed.returncode != 0:
print(completed.stdout.decode(), file=sys.stderr)
print('FATAL: database migration failed!', file=sys.stderr)
exit(1)

View File

@@ -0,0 +1,2 @@
from .base import Base
from .block import Block

View File

@@ -0,0 +1,16 @@
from sqlalchemy.orm import DeclarativeBase, declared_attr
from dexorder import ctx
# add Base as the -last- class inherited on classes which should get tables
class Base(DeclarativeBase):
# noinspection PyMethodParameters
@declared_attr.directive
def __tablename__(cls) -> str:
return cls.__name__.lower()
@classmethod
def get(cls, **kwargs):
return ctx.session.get(cls, kwargs)

View File

@@ -0,0 +1,37 @@
from contextvars import ContextVar
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column
from dexorder.db.model import Base
class Block(Base):
chain: Mapped[int] = mapped_column(primary_key=True)
height: Mapped[int] = mapped_column(primary_key=True) # timescaledb index
hash: Mapped[bytes] = mapped_column(primary_key=True)
parent: Mapped[bytes]
data: Mapped[dict] = mapped_column(JSONB)
def __str__(self):
return f'{self.height}_{self.hash.hex()}'
@staticmethod
def cur() -> 'Block':
return _cur.get()
@staticmethod
def set_cur(value: 'Block'):
_cur.set(value)
@staticmethod
def latest() -> 'Block':
return _latest.get()
@staticmethod
def set_latest(value: 'Block'):
_latest.set(value)
_cur = ContextVar[Block]('Block.cur')
_latest = ContextVar[Block]('Block.latest')

View File

@@ -0,0 +1,14 @@
import re
from .async_yield import async_yield
from .tick_math import nearest_available_ticks, round_tick, spans_tick, spans_range
def align_decimal(value, left_columns) -> str:
"""
returns a string where the decimal point in value is aligned to have left_columns of characters before it
"""
s = str(value)
pad = max(left_columns - len(re.sub(r'[^0-9]*$','',s.split('.')[0])), 0)
return ' ' * pad + s

View File

@@ -0,0 +1,5 @@
import asyncio
async def async_yield():
# a value of exactly 0 doesn't seem to work as well, so we set 1 nanosecond
await asyncio.sleep(1e-9)

View File

@@ -0,0 +1,20 @@
import math
def price_to_tick(p):
return round(math.log(p)/math.log(1.0001))
def tick_to_price(t):
return math.pow(1.0001,t)
def tick_to_sqrt_price(t):
return math.pow(1.0001**.5,t)
def to_fixed(value, decimals):
return round(value * 10**decimals)
def from_fixed(value, decimals):
return value / 10**decimals
def tick_to_sqrtPriceX96(tick):
return round(math.sqrt(tick_to_price(tick)) * 2**96)

9
src/dexorder/util/cwd.py Normal file
View File

@@ -0,0 +1,9 @@
import os
def _cwd():
while 'alembic.ini' not in os.listdir():
if os.getcwd() == '/':
print('FATAL: could not find project root directory')
exit(1)
os.chdir(os.path.normpath('..'))

16
src/dexorder/util/json.py Normal file
View File

@@ -0,0 +1,16 @@
from hexbytes import HexBytes
from orjson import orjson
from web3.datastructures import ReadableAttributeDict
def _serialize(v):
# todo wrap json.dumps()
if isinstance(v,HexBytes):
return v.hex()
if isinstance(v,ReadableAttributeDict):
return v.__dict__
raise ValueError(v)
def dumps(obj):
return orjson.dumps(obj, default=_serialize)

View File

@@ -0,0 +1,9 @@
import logging
log = logging.getLogger('dexorder')
def fatal(message, exception=None):
if exception is None and isinstance(message, (BaseException,RuntimeError)):
exception = message
log.exception(message, exc_info=exception)
exit(1)

14
src/dexorder/util/sql.py Normal file
View File

@@ -0,0 +1,14 @@
from datetime import datetime, timedelta
from typing import Union
def where_time_range(sql, time_column, start: Union[datetime,timedelta,None] = None, end: Union[datetime,timedelta,None] = None):
if start is not None:
if isinstance(start, timedelta):
start = datetime.now() - abs(start)
sql = sql.where(time_column >= start)
if end is not None:
if isinstance(end, timedelta):
end = datetime.now() - abs(end)
sql = sql.where(time_column < end)
return sql

View File

@@ -0,0 +1,24 @@
def round_tick(tick, tick_spacing):
"""
returns the nearest available tick
"""
return round(tick/tick_spacing) * tick_spacing
def nearest_available_ticks(tick, tick_spacing):
"""
returns the two available ticks just below and above the given tick
"""
lower = tick // tick_spacing * tick_spacing
upper = lower + tick_spacing
return lower, upper
def spans_tick(tick, lower, upper):
return spans_range( *nearest_available_ticks(tick), lower, upper)
def spans_range(below, above, lower, upper):
return lower < above and upper > below

62
test/test_blockstate.py Normal file
View File

@@ -0,0 +1,62 @@
from dexorder.base.blockstate import BlockState, BlockDict
from dexorder.db.model.block import Block
block_10 = Block(chain=1, height=10, hash=bytes.fromhex('10'), parent=bytes.fromhex('09'), data=None)
block_11a = Block(chain=1, height=11, hash=bytes.fromhex('1a'), parent=block_10.hash, data=None)
block_11b = Block(chain=1, height=11, hash=bytes.fromhex('1b'), parent=block_10.hash, data=None)
block_12a = Block(chain=1, height=12, hash=bytes.fromhex('12'), parent=block_11a.hash, data=None)
state = BlockState(block_10, {'series':{'foo':'bar'}})
BlockState.set_cur(state)
d = BlockDict('series')
def start_block(b):
Block.set_cur(b)
state.add_block(b)
start_block(block_11a)
del d['foo']
d['foue'] = 'barre'
start_block(block_12a)
d['foo'] = 'bar2'
start_block(block_11b)
d['fu'] = 'ku'
def print_dict(x:dict=d):
for k, v in x.items():
print(f'{k:>10} : {v}')
for block in [block_10,block_11a,block_12a,block_11b]:
Block.set_cur(block)
print()
print(Block.cur().hash)
print_dict()
def test11b():
Block.set_cur(block_11b)
assert 'fu' in d
assert d['fu'] == 'ku'
assert 'foo' in d
assert d['foo'] == 'bar'
def test12a():
Block.set_cur(block_12a)
assert 'fu' not in d
assert 'foo' in d
assert d['foo'] == 'bar2'
assert 'foue' in d
assert d['foue'] == 'barre'
test11b()
test12a()
state.promote_root(block_11a)
print()
print('promoted root')
print_dict(state.root_state)
test12a()
state.promote_root(block_12a)
print()
print('promoted root')
print_dict(state.root_state)
test12a()