triggers impl, written but not connected or tested
This commit is contained in:
@@ -1,2 +1,2 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
docker run --network=host --ulimit memlock=-1 docker.dragonflydb.io/dragonflydb/dragonfly --maxmemory 1G "$@"
|
docker run --network=host --ulimit memlock=-1 docker.dragonflydb.io/dragonflydb/dragonfly --maxmemory 1G --dbfilename '' "$@"
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ class _NARG:
|
|||||||
def __bool__(self): return False
|
def __bool__(self): return False
|
||||||
NARG = _NARG()
|
NARG = _NARG()
|
||||||
DELETE = object() # used as a value token to indicate removal of the key
|
DELETE = object() # used as a value token to indicate removal of the key
|
||||||
|
UNLOAD = object() # used as a value token to indicate the key is no longer needed in memory
|
||||||
ADDRESS_0 = '0x0000000000000000000000000000000000000000'
|
ADDRESS_0 = '0x0000000000000000000000000000000000000000'
|
||||||
WEI = 1
|
WEI = 1
|
||||||
GWEI = 1_000_000_000
|
GWEI = 1_000_000_000
|
||||||
|
|||||||
@@ -44,5 +44,6 @@ Polygon = Blockchain(137, 'Polygon') # POS not zkEVM
|
|||||||
Mumbai = Blockchain(80001, 'Mumbai')
|
Mumbai = Blockchain(80001, 'Mumbai')
|
||||||
BSC = Blockchain(56, 'BSC')
|
BSC = Blockchain(56, 'BSC')
|
||||||
Arbitrum = Blockchain(42161, 'Arbitrum', 10, batch_size=1000) # todo configure batch size... does it depend on log count? :(
|
Arbitrum = Blockchain(42161, 'Arbitrum', 10, batch_size=1000) # todo configure batch size... does it depend on log count? :(
|
||||||
|
Mock = Blockchain(1338, 'Mock', 10)
|
||||||
|
|
||||||
current_chain = ContextVar[Blockchain]('current_chain')
|
current_chain = ContextVar[Blockchain]('current_chain')
|
||||||
|
|||||||
@@ -1,3 +1,13 @@
|
|||||||
|
from eth_abi.packed import encode_packed
|
||||||
|
from eth_utils import keccak, to_bytes, to_checksum_address
|
||||||
|
|
||||||
|
from dexorder import dec
|
||||||
|
from dexorder.contract import uniswapV3
|
||||||
|
from dexorder.util import hexbytes
|
||||||
|
|
||||||
|
UNISWAPV3_POOL_INIT_CODE_HASH = hexbytes('0xe34f199b19b2b4f47f68442619d555527d244f78a3297ea89325f843f87b8b54')
|
||||||
|
|
||||||
|
|
||||||
class Fee:
|
class Fee:
|
||||||
LOWEST = 100
|
LOWEST = 100
|
||||||
LOW = 500
|
LOW = 500
|
||||||
@@ -8,21 +18,21 @@ class Fee:
|
|||||||
def ordered_addresses(addr_a:str, addr_b:str):
|
def ordered_addresses(addr_a:str, addr_b:str):
|
||||||
return (addr_a, addr_b) if addr_a.lower() <= addr_b.lower() else (addr_b, addr_a)
|
return (addr_a, addr_b) if addr_a.lower() <= addr_b.lower() else (addr_b, addr_a)
|
||||||
|
|
||||||
|
def uniswapV3_pool_address( addr_a: str, addr_b: str, fee: int):
|
||||||
|
return uniswap_pool_address(uniswapV3['factory'], addr_a, addr_b, fee)
|
||||||
|
|
||||||
def pool_address(_factory_addr:str, _addr_a:str, _addr_b:str):
|
def uniswap_pool_address(factory_addr: str, addr_a: str, addr_b: str, fee: int) -> str:
|
||||||
# todo compute pool address
|
token0, token1 = ordered_addresses(addr_a, addr_b)
|
||||||
raise NotImplementedError
|
salt = keccak(encode_packed(['address','address','uint24'],[token0, token1, fee]))
|
||||||
# addr0, addr1 = ordered_addresses(addr_a, addr_b)
|
contract_address = keccak(
|
||||||
# get_create2_address()
|
b"\xff"
|
||||||
|
+ to_bytes(hexstr=factory_addr)
|
||||||
|
+ salt
|
||||||
|
+ UNISWAPV3_POOL_INIT_CODE_HASH
|
||||||
|
).hex()[-40:]
|
||||||
|
return to_checksum_address(contract_address)
|
||||||
|
|
||||||
# use the util.liquidity or util.simple_liquidity package instead
|
def uniswap_price(sqrt_price):
|
||||||
|
d = dec(sqrt_price)
|
||||||
# def liquidity_for_amount_0(lower: int, upper: int, amount_0: int):
|
price = d * d / dec(2 ** (96 * 2))
|
||||||
# lower = convert.tick_to_price(lower)
|
return price
|
||||||
# upper = convert.tick_to_price(upper)
|
|
||||||
# return amount_0 * upper * lower / (upper - lower)
|
|
||||||
#
|
|
||||||
# def liquidity_for_amount_1(lower: int, upper: int, amount_1: int ):
|
|
||||||
# lower = convert.tick_to_price(lower)
|
|
||||||
# upper = convert.tick_to_price(upper)
|
|
||||||
# return amount_1 / (upper - lower)
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ def get_factory() -> ContractProxy:
|
|||||||
if found is None:
|
if found is None:
|
||||||
deployment_tag = config.deployments.get(str(chain_id), 'latest')
|
deployment_tag = config.deployments.get(str(chain_id), 'latest')
|
||||||
try:
|
try:
|
||||||
with open(f'contract/broadcast/Deploy.sol/{chain_id}/run-{deployment_tag}.json', 'rt') as file:
|
with open(f'../contract/broadcast/Deploy.sol/{chain_id}/run-{deployment_tag}.json', 'rt') as file:
|
||||||
deployment = json.load(file)
|
deployment = json.load(file)
|
||||||
for tx in deployment['transactions']:
|
for tx in deployment['transactions']:
|
||||||
if tx['contractName'] == 'Factory':
|
if tx['contractName'] == 'Factory':
|
||||||
@@ -31,18 +31,21 @@ def get_factory() -> ContractProxy:
|
|||||||
|
|
||||||
|
|
||||||
def get_contract_data(name):
|
def get_contract_data(name):
|
||||||
with open(f'contract/out/{name}.sol/{name}.json', 'rt') as file:
|
with open(f'../contract/out/{name}.sol/{name}.json', 'rt') as file:
|
||||||
return json.load(file)
|
return json.load(file)
|
||||||
|
|
||||||
def get_contract_event(contract_name:str, event_name:str):
|
def get_contract_event(contract_name:str, event_name:str):
|
||||||
return getattr(current_w3.get().eth.contract(abi=get_contract_data(contract_name)['abi']).events, event_name)()
|
return getattr(current_w3.get().eth.contract(abi=get_contract_data(contract_name)['abi']).events, event_name)()
|
||||||
|
|
||||||
|
|
||||||
with open('contract/out/Vault.sol/Vault.json', 'rt') as _file:
|
VAULT_INIT_CODE_HASH = None
|
||||||
_vault_info = json.load(_file)
|
|
||||||
VAULT_INIT_CODE_HASH = keccak(to_bytes(hexstr=_vault_info['bytecode']['object']))
|
|
||||||
|
|
||||||
def vault_address(owner, num):
|
def vault_address(owner, num):
|
||||||
|
global VAULT_INIT_CODE_HASH
|
||||||
|
if VAULT_INIT_CODE_HASH is None:
|
||||||
|
with open('../contract/out/Vault.sol/Vault.json', 'rt') as _file:
|
||||||
|
vault_info = json.load(_file)
|
||||||
|
VAULT_INIT_CODE_HASH = keccak(to_bytes(hexstr=vault_info['bytecode']['object']))
|
||||||
salt = keccak(encode_packed(['address','uint8'],[owner,num]))
|
salt = keccak(encode_packed(['address','uint8'],[owner,num]))
|
||||||
contract_address = keccak(
|
contract_address = keccak(
|
||||||
b"\xff"
|
b"\xff"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from .diff import DiffEntry, DiffItem, DELETE
|
from .diff import DiffEntry, DiffItem, DELETE, UNLOAD
|
||||||
from .state import BlockState, current_blockstate
|
from .state import BlockState, current_blockstate
|
||||||
from .blockdata import DataType, BlockDict, BlockSet
|
from .blockdata import DataType, BlockDict, BlockSet
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import TypeVar, Generic, Iterable, Union, Any
|
from typing import TypeVar, Generic, Iterable, Union, Any, Iterator
|
||||||
|
|
||||||
from dexorder import NARG, DELETE
|
from dexorder import NARG, DELETE, UNLOAD
|
||||||
from dexorder.base.fork import current_fork
|
from dexorder.base.fork import current_fork
|
||||||
from .state import current_blockstate
|
from .state import current_blockstate
|
||||||
from dexorder.util import key2str as util_key2str, str2key as util_str2key
|
from dexorder.util import key2str as util_key2str, str2key as util_str2key
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
T = TypeVar('T')
|
T = TypeVar('T')
|
||||||
|
K = TypeVar('K')
|
||||||
|
V = TypeVar('V')
|
||||||
|
|
||||||
|
|
||||||
class DataType(Enum):
|
class DataType(Enum):
|
||||||
@@ -61,6 +63,14 @@ class BlockData:
|
|||||||
def delitem(self, item, overwrite=True):
|
def delitem(self, item, overwrite=True):
|
||||||
self.setitem(item, DELETE, overwrite)
|
self.setitem(item, DELETE, overwrite)
|
||||||
|
|
||||||
|
def unload(self, item):
|
||||||
|
"""
|
||||||
|
forgets the entry in memory when the current block is finalized, but does not delete the key from the collection. may only be
|
||||||
|
used when lazy_getitem is set
|
||||||
|
"""
|
||||||
|
assert self.lazy_getitem is not None
|
||||||
|
self.setitem(item, UNLOAD)
|
||||||
|
|
||||||
def contains(self, item):
|
def contains(self, item):
|
||||||
try:
|
try:
|
||||||
self.getitem(item)
|
self.getitem(item)
|
||||||
@@ -89,41 +99,41 @@ class BlockSet(Generic[T], Iterable[T], BlockData):
|
|||||||
super().__init__(DataType.SET, series, **tags)
|
super().__init__(DataType.SET, series, **tags)
|
||||||
self.series = series
|
self.series = series
|
||||||
|
|
||||||
def add(self, item):
|
def add(self, item: T):
|
||||||
""" set-like semantics. the item key is added with a value of None. """
|
""" set-like semantics. the item key is added with a value of None. """
|
||||||
self.setitem(item, None, overwrite=False)
|
self.setitem(item, None, overwrite=False)
|
||||||
|
|
||||||
def __delitem__(self, item):
|
def remove(self, item: T):
|
||||||
self.delitem(item, overwrite=False)
|
self.delitem(item, overwrite=False)
|
||||||
|
|
||||||
def __contains__(self, item):
|
def __contains__(self, item: T) -> bool:
|
||||||
return self.contains(item)
|
return self.contains(item)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self) -> Iterator[T]:
|
||||||
yield from (k for k,v in self.iter_items(self.series))
|
yield from (k for k,v in self.iter_items(self.series))
|
||||||
|
|
||||||
|
|
||||||
class BlockDict(Generic[T], BlockData):
|
class BlockDict(Generic[K,V], BlockData):
|
||||||
|
|
||||||
def __init__(self, series: Any, **tags):
|
def __init__(self, series: Any, **tags):
|
||||||
super().__init__(DataType.DICT, series, **tags)
|
super().__init__(DataType.DICT, series, **tags)
|
||||||
|
|
||||||
def __setitem__(self, item, value):
|
def __setitem__(self, item: K, value: V) -> None:
|
||||||
self.setitem(item, value)
|
self.setitem(item, value)
|
||||||
|
|
||||||
def __getitem__(self, item):
|
def __getitem__(self, item: K) -> V:
|
||||||
return self.getitem(item)
|
return self.getitem(item)
|
||||||
|
|
||||||
def __delitem__(self, item):
|
def __delitem__(self, item: K) -> None:
|
||||||
self.delitem(item)
|
self.delitem(item)
|
||||||
|
|
||||||
def __contains__(self, item):
|
def __contains__(self, item: K) -> bool:
|
||||||
return self.contains(item)
|
return self.contains(item)
|
||||||
|
|
||||||
def items(self):
|
def items(self) -> Iterable[tuple[K,V]]:
|
||||||
return self.iter_items(self.series)
|
return self.iter_items(self.series)
|
||||||
|
|
||||||
def get(self, item, default=None):
|
def get(self, item: K, default: V = None) -> V:
|
||||||
return self.getitem(item, default)
|
return self.getitem(item, default)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from typing import Iterable, Optional, Union, Any
|
|||||||
from . import DiffItem, BlockSet, BlockDict, DELETE, BlockState, current_blockstate, DataType
|
from . import DiffItem, BlockSet, BlockDict, DELETE, BlockState, current_blockstate, DataType
|
||||||
from .blockdata import BlockData, SeriesCollection
|
from .blockdata import BlockData, SeriesCollection
|
||||||
from .diff import DiffEntryItem
|
from .diff import DiffEntryItem
|
||||||
from .. import db
|
from .. import db, UNLOAD
|
||||||
from ..base.chain import current_chain
|
from ..base.chain import current_chain
|
||||||
from ..base.fork import current_fork, Fork
|
from ..base.fork import current_fork, Fork
|
||||||
from ..database.model import SeriesSet, SeriesDict, Block
|
from ..database.model import SeriesSet, SeriesDict, Block
|
||||||
@@ -52,6 +52,8 @@ class DbState(SeriesCollection):
|
|||||||
if diff.value is DELETE:
|
if diff.value is DELETE:
|
||||||
Entity = SeriesSet if t == DataType.SET else SeriesDict if t == DataType.DICT else None
|
Entity = SeriesSet if t == DataType.SET else SeriesDict if t == DataType.DICT else None
|
||||||
db.session.query(Entity).filter(Entity.chain==chain_id, Entity.series==diffseries, Entity.key==diffkey).delete()
|
db.session.query(Entity).filter(Entity.chain==chain_id, Entity.series==diffseries, Entity.key==diffkey).delete()
|
||||||
|
elif diff.value is UNLOAD:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
# upsert
|
# upsert
|
||||||
if t == DataType.SET:
|
if t == DataType.SET:
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Union, Any
|
from typing import Union, Any
|
||||||
|
|
||||||
from dexorder import DELETE
|
from dexorder import DELETE, UNLOAD
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class DiffEntry:
|
class DiffEntry:
|
||||||
value: Union[Any, DELETE]
|
value: Union[Any, DELETE, UNLOAD]
|
||||||
height: int
|
height: int
|
||||||
hash: bytes
|
hash: bytes
|
||||||
|
|
||||||
@@ -18,7 +18,7 @@ class DiffItem:
|
|||||||
value: Any
|
value: Any
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.series}.{self.key}={"[DEL]" if self.value is DELETE else self.value}'
|
return f'{self.series}.{self.key}={"[DEL]" if self.value is DELETE else "[UNL]" if self.value is UNLOAD else self.value}'
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class DiffEntryItem:
|
class DiffEntryItem:
|
||||||
@@ -31,4 +31,5 @@ class DiffEntryItem:
|
|||||||
return self.entry.value
|
return self.entry.value
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.entry.hash.hex()} {self.series}.{self.key}={"[DEL]" if self.entry.value is DELETE else self.entry.value}'
|
return (f'{self.entry.hash.hex()} {self.series}.{self.key}='
|
||||||
|
f'{"[DEL]" if self.entry.value is DELETE else "[UNL]" if self.value is UNLOAD else self.entry.value}')
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from typing import Any, Optional, Union, Sequence, Reversible
|
|||||||
|
|
||||||
from sortedcontainers import SortedList
|
from sortedcontainers import SortedList
|
||||||
|
|
||||||
from dexorder import NARG
|
from dexorder import NARG, UNLOAD
|
||||||
from dexorder.base.fork import Fork, DisjointFork
|
from dexorder.base.fork import Fork, DisjointFork
|
||||||
from dexorder.database.model import Block
|
from dexorder.database.model import Block
|
||||||
from dexorder.util import hexstr
|
from dexorder.util import hexstr
|
||||||
@@ -125,7 +125,7 @@ class BlockState:
|
|||||||
|
|
||||||
def _get_from_diffs(self, fork, diffs):
|
def _get_from_diffs(self, fork, diffs):
|
||||||
for diff in reversed(diffs):
|
for diff in reversed(diffs):
|
||||||
if diff.height <= self.root_block.height or fork is not None and diff in fork:
|
if diff.height <= self.root_block.height or fork is not None and diff in fork and diff.value is not UNLOAD:
|
||||||
if diff.value is DELETE:
|
if diff.value is DELETE:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@@ -134,7 +134,6 @@ class BlockState:
|
|||||||
return diff.value
|
return diff.value
|
||||||
return DELETE
|
return DELETE
|
||||||
|
|
||||||
|
|
||||||
def set(self, fork: Optional[Fork], series, key, value, overwrite=True):
|
def set(self, fork: Optional[Fork], series, key, value, overwrite=True):
|
||||||
diffs = self.diffs_by_series[series][key]
|
diffs = self.diffs_by_series[series][key]
|
||||||
if overwrite or self._get_from_diffs(fork, diffs) != value:
|
if overwrite or self._get_from_diffs(fork, diffs) != value:
|
||||||
@@ -162,6 +161,7 @@ class BlockState:
|
|||||||
# walk the by_height list to delete any aged-out block data
|
# walk the by_height list to delete any aged-out block data
|
||||||
# in order to prune diffs_by_series, updated_keys remembers all the keys that were touched by any aged-out block
|
# in order to prune diffs_by_series, updated_keys remembers all the keys that were touched by any aged-out block
|
||||||
series_deletions = []
|
series_deletions = []
|
||||||
|
key_unloads: list[tuple[Any,Any]] = []
|
||||||
updated_keys = set()
|
updated_keys = set()
|
||||||
while self.by_height and self.by_height[0].height <= block.height:
|
while self.by_height and self.by_height[0].height <= block.height:
|
||||||
dead = self.by_height.pop(0)
|
dead = self.by_height.pop(0)
|
||||||
@@ -175,6 +175,8 @@ class BlockState:
|
|||||||
for d in block_diffs:
|
for d in block_diffs:
|
||||||
if d.key == BlockState._DELETE_SERIES_KEY and dead.hash in new_root_fork:
|
if d.key == BlockState._DELETE_SERIES_KEY and dead.hash in new_root_fork:
|
||||||
series_deletions.append(d.series)
|
series_deletions.append(d.series)
|
||||||
|
elif d.value is UNLOAD and dead.hash in new_root_fork:
|
||||||
|
key_unloads.append((d.series, d.key))
|
||||||
else:
|
else:
|
||||||
updated_keys.add((d.series, d.key))
|
updated_keys.add((d.series, d.key))
|
||||||
del self.diffs_by_hash[dead.hash]
|
del self.diffs_by_hash[dead.hash]
|
||||||
@@ -200,6 +202,11 @@ class BlockState:
|
|||||||
# if only one diff remains, and it's old, and it's a delete, then we can actually delete the diff list
|
# if only one diff remains, and it's old, and it's a delete, then we can actually delete the diff list
|
||||||
if not difflist or len(difflist) == 1 and difflist[0].value == DELETE and difflist[0].height <= new_root_fork.height:
|
if not difflist or len(difflist) == 1 and difflist[0].value == DELETE and difflist[0].height <= new_root_fork.height:
|
||||||
del self.diffs_by_series[s][k]
|
del self.diffs_by_series[s][k]
|
||||||
|
for s,k in key_unloads:
|
||||||
|
try:
|
||||||
|
del self.diffs_by_series[s][k]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
for s in series_deletions:
|
for s in series_deletions:
|
||||||
del self.diffs_by_series[s]
|
del self.diffs_by_series[s]
|
||||||
self.root_block = block
|
self.root_block = block
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from .abi import abis
|
from .abi import abis
|
||||||
from .contract_proxy import ContractProxy, Transaction
|
from .contract_proxy import ContractProxy, Transaction
|
||||||
from .pool_contract import UniswapV3Pool
|
from .pool_contract import UniswapV3Pool
|
||||||
from .uniswap_contracts import uniswap
|
from .uniswap_contracts import uniswapV3
|
||||||
|
|
||||||
from eth_abi.codec import ABIDecoder, ABIEncoder
|
from eth_abi.codec import ABIDecoder, ABIEncoder
|
||||||
from eth_abi.registry import registry as default_registry
|
from eth_abi.registry import registry as default_registry
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ class ContractProxy:
|
|||||||
def __init__(self, address: str = None, name=None, *, _contracts=None, _wrapper=call_wrapper, abi=None):
|
def __init__(self, address: str = None, name=None, *, _contracts=None, _wrapper=call_wrapper, abi=None):
|
||||||
"""
|
"""
|
||||||
For regular contract use, either name or abi must be supplied. If abi is None, then name is used to find
|
For regular contract use, either name or abi must be supplied. If abi is None, then name is used to find
|
||||||
the ABI in the project's contract/out/ directory. Otherwise, abi may be either a string or preparsed dict.
|
the ABI in the project's ../contract/out/ directory. Otherwise, abi may be either a string or preparsed dict.
|
||||||
If address is not supplied, this proxy may still be used to construct a new contract via deploy(). After
|
If address is not supplied, this proxy may still be used to construct a new contract via deploy(). After
|
||||||
deploy() completes, the address member will be populated.
|
deploy() completes, the address member will be populated.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from dexorder.base.chain import Ethereum, Goerli, Polygon, Mumbai
|
from dexorder.base.chain import Ethereum, Goerli, Polygon, Mumbai, Arbitrum, Mock
|
||||||
from dexorder.contract.contract_proxy import ContractProxy
|
from dexorder.contract.contract_proxy import ContractProxy
|
||||||
from dexorder.blockchain import ByBlockchainDict
|
from dexorder.blockchain import ByBlockchainDict
|
||||||
|
|
||||||
@@ -12,8 +12,8 @@ class _UniswapContracts (ByBlockchainDict[ContractProxy]):
|
|||||||
'quoter': ContractProxy('0xb27308f9F90D607463bb33eA1BeBb41C27CE5AB6', 'IQuoter'),
|
'quoter': ContractProxy('0xb27308f9F90D607463bb33eA1BeBb41C27CE5AB6', 'IQuoter'),
|
||||||
'swap_router': ContractProxy('0xE592427A0AEce92De3Edee1F18E0157C05861564', 'ISwapRouter'),
|
'swap_router': ContractProxy('0xE592427A0AEce92De3Edee1F18E0157C05861564', 'ISwapRouter'),
|
||||||
}
|
}
|
||||||
super().__init__({chain.chain_id:std for chain in (Ethereum, Polygon, Goerli, Mumbai)})
|
super().__init__({chain.chain_id:std for chain in (Ethereum, Polygon, Goerli, Mumbai, Arbitrum, Mock)})
|
||||||
|
|
||||||
|
|
||||||
uniswap = _UniswapContracts()
|
uniswapV3 = _UniswapContracts()
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,9 @@ import logging
|
|||||||
|
|
||||||
from web3.types import EventData
|
from web3.types import EventData
|
||||||
|
|
||||||
from dexorder import dec, current_pub, current_w3
|
from dexorder import current_pub, current_w3
|
||||||
from dexorder.base.chain import current_chain
|
from dexorder.base.chain import current_chain
|
||||||
|
from dexorder.blockchain.uniswap import uniswap_price
|
||||||
from dexorder.blockchain.util import vault_address, get_contract_event, get_factory, get_contract_data
|
from dexorder.blockchain.util import vault_address, get_contract_event, get_factory, get_contract_data
|
||||||
from dexorder.contract import VaultContract
|
from dexorder.contract import VaultContract
|
||||||
from dexorder.data import pool_prices, vault_owners, vault_tokens, underfunded_vaults
|
from dexorder.data import pool_prices, vault_owners, vault_tokens, underfunded_vaults
|
||||||
@@ -81,8 +82,7 @@ def handle_swap(swap: EventData):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return
|
return
|
||||||
addr = swap['address']
|
addr = swap['address']
|
||||||
d = dec(sqrt_price)
|
price = uniswap_price(sqrt_price)
|
||||||
price = d * d / dec(2 ** (96 * 2))
|
|
||||||
log.debug(f'pool {addr} {price}')
|
log.debug(f'pool {addr} {price}')
|
||||||
pool_prices[addr] = price
|
pool_prices[addr] = price
|
||||||
|
|
||||||
|
|||||||
@@ -4,20 +4,22 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from dexorder.contract import abi_decoder, abi_encoder
|
from dexorder import dec
|
||||||
|
from dexorder.blockchain.uniswap import uniswap_pool_address, uniswap_price, uniswapV3_pool_address
|
||||||
|
from dexorder.contract import abi_decoder, abi_encoder, uniswapV3
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# enum SwapOrderState {
|
# enum SwapOrderState {
|
||||||
# Open, Canceled, Filled, Template
|
# Open, Canceled, Filled, Expired
|
||||||
# }
|
# }
|
||||||
|
|
||||||
class SwapOrderState (Enum):
|
class SwapOrderState (Enum):
|
||||||
Open = 0
|
Open = 0
|
||||||
Canceled = 1
|
Canceled = 1
|
||||||
Filled = 2
|
Filled = 2
|
||||||
Template = 3
|
Expired = 3
|
||||||
|
|
||||||
class Exchange (Enum):
|
class Exchange (Enum):
|
||||||
UniswapV2 = 0
|
UniswapV2 = 0
|
||||||
@@ -54,9 +56,16 @@ class SwapOrder:
|
|||||||
return (self.tokenIn, self.tokenOut, self.route.dump(), self.amount, self.amountIsInput,
|
return (self.tokenIn, self.tokenOut, self.route.dump(), self.amount, self.amountIsInput,
|
||||||
self.outputDirectlyToOwner, self.chainOrder, [t.dump() for t in self.tranches])
|
self.outputDirectlyToOwner, self.chainOrder, [t.dump() for t in self.tranches])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pool_address(self):
|
||||||
|
if self.route.exchange == Exchange.UniswapV3:
|
||||||
|
return uniswapV3_pool_address( self.tokenIn, self.tokenOut, self.route.fee )
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class SwapStatus:
|
class SwapStatus:
|
||||||
state: SwapOrderState # todo refactor into canceled flag
|
state: SwapOrderState
|
||||||
start: int
|
start: int
|
||||||
ocoGroup: Optional[int]
|
ocoGroup: Optional[int]
|
||||||
filledIn: Optional[int] # if None then look in the order_filled blockstate
|
filledIn: Optional[int] # if None then look in the order_filled blockstate
|
||||||
@@ -69,7 +78,7 @@ class SwapStatus:
|
|||||||
class SwapOrderStatus (SwapStatus):
|
class SwapOrderStatus (SwapStatus):
|
||||||
order: SwapOrder
|
order: SwapOrder
|
||||||
|
|
||||||
def __init__(self, order, *swapstatus_args):
|
def __init__(self, order: SwapOrder, *swapstatus_args):
|
||||||
""" init with order object first follewed by the swap status args"""
|
""" init with order object first follewed by the swap status args"""
|
||||||
super().__init__(*swapstatus_args)
|
super().__init__(*swapstatus_args)
|
||||||
self.order = order
|
self.order = order
|
||||||
@@ -89,6 +98,8 @@ class SwapOrderStatus (SwapStatus):
|
|||||||
def dump(self):
|
def dump(self):
|
||||||
return self.order.dump(), self.state.value, self.start, self.ocoGroup, self.filledIn, self.filledOut, self.trancheFilledIn, self.trancheFilledOut
|
return self.order.dump(), self.state.value, self.start, self.ocoGroup, self.filledIn, self.filledOut, self.trancheFilledIn, self.trancheFilledOut
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
return SwapOrderStatus.load(self.dump())
|
||||||
|
|
||||||
NO_OCO = 18446744073709551615 # max uint64
|
NO_OCO = 18446744073709551615 # max uint64
|
||||||
|
|
||||||
@@ -117,20 +128,33 @@ class Constraint (ABC):
|
|||||||
def _dump(self, types, values):
|
def _dump(self, types, values):
|
||||||
return self.mode, abi_encoder.encode(types, values)
|
return self.mode, abi_encoder.encode(types, values)
|
||||||
|
|
||||||
|
|
||||||
|
class PriceConstraint (Constraint, ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def passes(self, old_price: dec, new_price: dec) -> bool:...
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class PriceConstraint (Constraint):
|
class LimitConstraint (PriceConstraint):
|
||||||
isAbove: bool
|
isAbove: bool
|
||||||
isRatio: bool
|
isRatio: bool
|
||||||
valueSqrtX96: int
|
valueSqrtX96: int
|
||||||
|
|
||||||
TYPES = 'bool','bool','uint160'
|
TYPES = 'bool','bool','uint160'
|
||||||
|
|
||||||
|
def __init__(self, *args):
|
||||||
|
self.isAbove, self.isRatio, self.valueSqrtX96 = args
|
||||||
|
self.limit = uniswap_price(self.valueSqrtX96)
|
||||||
|
|
||||||
def load(self, obj):
|
def load(self, obj):
|
||||||
isAbove, isRatio, valueSqrtX96 = abi_decoder.decode(PriceConstraint.TYPES, obj)
|
isAbove, isRatio, valueSqrtX96 = abi_decoder.decode(LimitConstraint.TYPES, obj)
|
||||||
return PriceConstraint(ConstraintMode.Limit, isAbove, isRatio, valueSqrtX96)
|
return LimitConstraint(ConstraintMode.Limit, isAbove, isRatio, valueSqrtX96)
|
||||||
|
|
||||||
def dump(self):
|
def dump(self):
|
||||||
return self._dump(PriceConstraint.TYPES, (self.isAbove, self.isRatio, self.valueSqrtX96))
|
return self._dump(LimitConstraint.TYPES, (self.isAbove, self.isRatio, self.valueSqrtX96))
|
||||||
|
|
||||||
|
def passes(self, old_price: dec, new_price: dec) -> bool:
|
||||||
|
return self.isAbove and new_price >= self.limit or not self.isAbove and new_price <= self.limit
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import logging
|
import logging
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from typing import overload
|
||||||
|
|
||||||
from dexorder.blockstate import BlockDict
|
from dexorder.blockstate import BlockDict, BlockSet
|
||||||
from dexorder.order.orderlib import SwapStatus
|
from dexorder.order.orderlib import SwapOrderStatus, SwapOrderState
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -33,33 +34,167 @@ class TrancheKey (OrderKey):
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Remaining:
|
class Filled:
|
||||||
isInput: bool # True iff the remaining amount is in terms of the input token
|
filled_in: int
|
||||||
remaining: int
|
filled_out: int
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def basic2remaining(basic):
|
def basic2remaining(basic):
|
||||||
return Remaining(*basic)
|
return Filled(*basic)
|
||||||
|
|
||||||
def remaining2basic(self):
|
def remaining2basic(self):
|
||||||
return self.isInput, self.remaining
|
return self.filled_in, self.filled_out
|
||||||
|
|
||||||
|
|
||||||
# ORDER STATE
|
|
||||||
# various blockstate fields hold different aspects of an order's state.
|
|
||||||
|
|
||||||
# all order and status data: writes to db but lazy-loads
|
|
||||||
orders = BlockDict[OrderKey,SwapStatus]('o', str2key=OrderKey.str2key, db='lazy') # todo lazy what's that about?
|
|
||||||
|
|
||||||
# the set of unfilled, not-canceled orders
|
|
||||||
active_orders = BlockDict[OrderKey]('ao', str2key=OrderKey.str2key, db=True, redis=True)
|
|
||||||
|
|
||||||
# total remaining amount per order, for all unfilled, not-canceled orders
|
|
||||||
order_remaining = BlockDict[OrderKey,Remaining](
|
|
||||||
'or', str2key=OrderKey.str2key, value2basic=Remaining.remaining2basic, basic2value=Remaining.basic2remaining, db=True, redis=True)
|
|
||||||
|
|
||||||
# total remaining amount per tranche
|
|
||||||
tranche_remaining = BlockDict[TrancheKey,Remaining](
|
|
||||||
'tr', str2key=TrancheKey.str2key, value2basic=Remaining.remaining2basic, basic2value=Remaining.basic2remaining, db=True, redis=True)
|
|
||||||
|
|
||||||
# todo oco groups
|
# todo oco groups
|
||||||
|
|
||||||
|
class Order:
|
||||||
|
"""
|
||||||
|
represents the canonical internal representation of an order. some members are immutable like the order spec, and some are
|
||||||
|
represented in various blockstate structures. this class hides that complexity to provide a clean interface to orders.
|
||||||
|
"""
|
||||||
|
|
||||||
|
instances: dict[OrderKey, 'Order'] = {}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@overload
|
||||||
|
def of(key: OrderKey):...
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@overload
|
||||||
|
def of(vault: str, order_index: int):...
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def of(a, b=None):
|
||||||
|
return Order.instances[a if b is None else OrderKey(a,b)]
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create(vault: str, order_index: int, status: SwapOrderStatus):
|
||||||
|
""" use when a brand new order is detected by the system """
|
||||||
|
key = OrderKey(vault, order_index)
|
||||||
|
Order._statuses[key] = status.copy() # always copy the struct when setting. values in BlockData must be immutable
|
||||||
|
order = Order(key)
|
||||||
|
if order.is_open:
|
||||||
|
Order._open_keys.add(key)
|
||||||
|
Order._order_filled[key] = Filled(status.filledIn, status.filledOut)
|
||||||
|
for i, tk in enumerate(order.tranche_keys):
|
||||||
|
Order._tranche_filled[tk] = Filled(status.trancheFilledIn[i], status.trancheFilledOut[i])
|
||||||
|
return order
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(self, key: OrderKey): ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(self, vault: str, order_index: int): ...
|
||||||
|
|
||||||
|
def __init__(self, a, b=None):
|
||||||
|
""" references an existing Order in the system. to create a new order, use create() """
|
||||||
|
key = a if b is None else OrderKey(a,b)
|
||||||
|
assert key not in Order.instances
|
||||||
|
self.key = key
|
||||||
|
self.status: SwapOrderStatus = Order._statuses[key].copy()
|
||||||
|
self.pool_address: str = self.status.order.pool_address
|
||||||
|
self.tranche_keys = [TrancheKey(key.vault, key.order_index, i) for i in range(len(self.status.trancheFilledIn))]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
return self.status.state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def order(self):
|
||||||
|
return self.status.order
|
||||||
|
|
||||||
|
@property
|
||||||
|
def amount(self):
|
||||||
|
return self.order.amount
|
||||||
|
|
||||||
|
@property
|
||||||
|
def remaining(self):
|
||||||
|
return self.amount - self.filled
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filled_in(self):
|
||||||
|
return Order._order_filled[self.key].filled_in if self.is_open else self.status.filledIn
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filled_out(self):
|
||||||
|
return Order._order_filled[self.key].filled_out if self.is_open else self.status.filledOut
|
||||||
|
|
||||||
|
def tranche_filled_in(self, tk: TrancheKey):
|
||||||
|
return Order._tranche_filled[tk].filled_in if self.is_open else self.status.trancheFilledIn[tk.tranche_index]
|
||||||
|
|
||||||
|
def tranche_filled_out(self, tk: TrancheKey):
|
||||||
|
return Order._tranche_filled[tk].filled_out if self.is_open else self.status.trancheFilledIn[tk.tranche_index]
|
||||||
|
|
||||||
|
def tranche_filled(self, tk: TrancheKey):
|
||||||
|
return self.tranche_filled_in(tk) if self.amount_is_input else self.tranche_filled_out(tk)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filled(self):
|
||||||
|
return self.filled_in if self.amount_is_input else self.filled_out
|
||||||
|
|
||||||
|
@property
|
||||||
|
def amount_is_input(self):
|
||||||
|
return self.order.amountIsInput
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_open(self):
|
||||||
|
return self.state is SwapOrderState.Open
|
||||||
|
|
||||||
|
|
||||||
|
def add_fill(self, tranche_index: int, filled_in: int, filled_out: int):
|
||||||
|
# order fill
|
||||||
|
old = Order._order_filled[self.key]
|
||||||
|
fin = old.filled_in + filled_in
|
||||||
|
fout = old.filled_out + filled_out
|
||||||
|
Order._order_filled[self.key] = Filled(fin, fout)
|
||||||
|
# tranche fill
|
||||||
|
tk = self.tranche_keys[tranche_index]
|
||||||
|
old = Order._tranche_filled[tk]
|
||||||
|
fin = old.filled_in + filled_in
|
||||||
|
fout = old.filled_out + filled_out
|
||||||
|
Order._tranche_filled[tk] = Filled(fin, fout)
|
||||||
|
|
||||||
|
def complete(self, final_state: SwapOrderState):
|
||||||
|
""" updates the static order record with its final values, then deletes all its dynamic blockstate and removes the Order from the actives list """
|
||||||
|
assert final_state is not SwapOrderState.Open
|
||||||
|
status = self.status
|
||||||
|
status.state = final_state
|
||||||
|
if self.is_open:
|
||||||
|
del Order._open_keys[self.key]
|
||||||
|
filled = Order._order_filled[self.key]
|
||||||
|
del Order._order_filled[self.key]
|
||||||
|
status.filledIn = filled.filled_in
|
||||||
|
status.filledOut = filled.filled_out
|
||||||
|
for i, tk in enumerate(self.tranche_keys):
|
||||||
|
filled = Order._tranche_filled[tk]
|
||||||
|
del Order._tranche_filled[tk]
|
||||||
|
status.trancheFilledIn[i] = filled.filled_in
|
||||||
|
status.trancheFilledOut[i] = filled.filled_out
|
||||||
|
final_status = status.copy()
|
||||||
|
Order._statuses[self.key] = final_status # set the status in order to save it
|
||||||
|
Order._statuses.unload(self.key) # but then unload from memory after root promotion
|
||||||
|
|
||||||
|
# ORDER STATE
|
||||||
|
# various blockstate fields hold different aspects of an order's state.
|
||||||
|
|
||||||
|
# this series holds "everything" about an order in the canonical format specified by the contract orderlib, except
|
||||||
|
# the filled amount fields for active orders are maintained in the order_remainings and tranche_remainings series.
|
||||||
|
_statuses: BlockDict[OrderKey, SwapOrderStatus] = BlockDict('o', db='lazy', str2key=OrderKey.str2key)
|
||||||
|
|
||||||
|
# open orders = the set of unfilled, not-canceled orders
|
||||||
|
_open_keys: BlockSet[OrderKey] = BlockSet('oo', db=True, redis=True, str2key=OrderKey.str2key)
|
||||||
|
|
||||||
|
# total remaining amount per order, for all unfilled, not-canceled orders
|
||||||
|
_order_filled: BlockDict[OrderKey, Filled] = BlockDict(
|
||||||
|
'of', db=True, redis=True, str2key=OrderKey.str2key, value2basic=Filled.remaining2basic, basic2value=Filled.basic2remaining)
|
||||||
|
|
||||||
|
# total remaining amount per tranche
|
||||||
|
_tranche_filled: BlockDict[TrancheKey, Filled] = BlockDict(
|
||||||
|
'tf', db=True, redis=True, str2key=TrancheKey.str2key, value2basic=Filled.remaining2basic, basic2value=Filled.basic2remaining)
|
||||||
|
|
||||||
|
|
||||||
|
active_orders: dict[OrderKey,Order] = {}
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,23 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from enum import Enum
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from dexorder.blockstate import BlockSet
|
from dexorder.blockstate import BlockSet, BlockDict
|
||||||
from .orderlib import SwapOrderStatus, TimeConstraint, PriceConstraint, ConstraintMode
|
from .orderlib import TimeConstraint, LimitConstraint, ConstraintMode
|
||||||
from dexorder.util import defaultdictk
|
from dexorder.util import defaultdictk
|
||||||
|
from .orderstate import TrancheKey, Order
|
||||||
|
from ..database.model.block import current_block
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
# todo time and price triggers should be BlockSortedSets that support range queries
|
# todo time and price triggers should be BlockSortedSets that support range queries
|
||||||
TimeTrigger = Callable[[int, int], None] # func(start_timestamp, end_timestamp)
|
TimeTrigger = Callable[[int, int], None] # func(previous_timestamp, current_timestamp)
|
||||||
time_triggers:BlockSet[TimeTrigger] = BlockSet('tt')
|
time_triggers:BlockSet[TimeTrigger] = BlockSet('tt')
|
||||||
|
|
||||||
PriceTrigger = Callable[[int, int], None] # pool previous price, pool new price
|
PriceTrigger = Callable[[int, int], None] # pool previous price, pool new price
|
||||||
price_triggers:dict[str, BlockSet[PriceTrigger]] = defaultdictk(BlockSet) # different BlockSet per pool address
|
price_triggers:dict[str, BlockSet[PriceTrigger]] = defaultdictk(lambda addr:BlockSet(f'pt|{addr}')) # different BlockSet per pool address
|
||||||
|
|
||||||
|
execution_requests:BlockDict[TrancheKey,int] = BlockDict('te') # value is block height of the request
|
||||||
|
|
||||||
def intersect_ranges( a_low, a_high, b_low, b_high):
|
def intersect_ranges( a_low, a_high, b_low, b_high):
|
||||||
low, high = max(a_low,b_low), min(a_high,b_high)
|
low, high = max(a_low,b_low), min(a_high,b_high)
|
||||||
@@ -21,35 +25,90 @@ def intersect_ranges( a_low, a_high, b_low, b_high):
|
|||||||
low, high = None, None
|
low, high = None, None
|
||||||
return low, high
|
return low, high
|
||||||
|
|
||||||
|
class TrancheStatus (Enum):
|
||||||
|
Early = 0 # first time trigger hasnt happened yet
|
||||||
|
Pricing = 1 # we are inside the time window and checking prices
|
||||||
|
Filled = 1 # tranche has no more available amount
|
||||||
|
Expired = 2 # time deadline has past and this tranche cannot be filled
|
||||||
|
|
||||||
class TrancheTrigger:
|
class TrancheTrigger:
|
||||||
def __init__(self, vault: str, order_index:int, tranche_index: int):
|
def __init__(self, order: Order, tranche_key: TrancheKey):
|
||||||
self.series = f'{vault}|{order_index}|{tranche_index}|'
|
assert order.key.vault == tranche_key.vault and order.key.order_index == tranche_key.order_index
|
||||||
self.vault = vault
|
self.order = order
|
||||||
self.order_index = order_index
|
self.tk = tranche_key
|
||||||
self.tranche_index = tranche_index
|
self.status = TrancheStatus.Early
|
||||||
|
|
||||||
# todo refactor so we have things like tranche amount filled as blockstate, order amount remaining
|
tranche = order.order.tranches[self.tk.tranche_index]
|
||||||
|
tranche_amount = order.amount * tranche.fraction // 10**18
|
||||||
|
tranche_filled = order.tranche_filled(self.tk)
|
||||||
|
tranche_remaining = tranche_amount - tranche_filled
|
||||||
|
|
||||||
def enable(self, status: SwapOrderStatus):
|
if tranche_remaining <= 0:
|
||||||
tranche = status.order.tranches[self.tranche_index]
|
self.status = TrancheStatus.Filled
|
||||||
tranche_amount = status.order.amount * tranche.fraction // 10**18
|
|
||||||
tranche_filled = status.trancheFilledIn[self.tranche_index] if status.order.amountIsInput else status.trancheFilledOut[self.tranche_index]
|
|
||||||
order_filled = status.filledIn if status.order.amountIsInput else status.filledOut
|
|
||||||
remaining = min(tranche_amount - tranche_filled, status.order.amount - order_filled)
|
|
||||||
if remaining <= 0: # todo dust?
|
|
||||||
return
|
return
|
||||||
|
|
||||||
time_constraint = None
|
self.time_constraint = time_constraint = None
|
||||||
price_constraints = []
|
self.price_constraints = []
|
||||||
if status.filledOut:
|
|
||||||
...
|
|
||||||
for c in tranche.constraints:
|
for c in tranche.constraints:
|
||||||
if c.mode == ConstraintMode.Time:
|
if c.mode == ConstraintMode.Time:
|
||||||
c: TimeConstraint
|
c: TimeConstraint
|
||||||
time_constraint = (c.earliest, c.latest) if time_constraint is None else intersect_ranges(*time_constraint, c.earliest, c.latest)
|
time_constraint = (c.earliest, c.latest) if time_constraint is None else intersect_ranges(*time_constraint, c.earliest, c.latest)
|
||||||
elif c.mode == ConstraintMode.Limit:
|
elif c.mode == ConstraintMode.Limit:
|
||||||
c: PriceConstraint
|
c: LimitConstraint
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
if time_constraint is None:
|
||||||
|
self.status = TrancheStatus.Pricing
|
||||||
|
else:
|
||||||
|
timestamp = current_block.get().timestamp
|
||||||
|
earliest, latest = time_constraint
|
||||||
|
self.status = TrancheStatus.Early if timestamp < earliest else TrancheStatus.Expired if timestamp > latest else TrancheStatus.Pricing
|
||||||
|
self.enable_time_trigger()
|
||||||
|
if self.status == TrancheStatus.Pricing:
|
||||||
|
self.enable_price_trigger()
|
||||||
|
|
||||||
|
def enable_time_trigger(self):
|
||||||
|
if self.time_constraint:
|
||||||
|
time_triggers.add(self.time_trigger)
|
||||||
|
|
||||||
|
def disable_time_trigger(self):
|
||||||
|
if self.time_constraint:
|
||||||
|
time_triggers.remove(self.time_trigger)
|
||||||
|
|
||||||
|
def time_trigger(self, _prev, now):
|
||||||
|
if now >= self.time_constraint[1]:
|
||||||
|
self.disable()
|
||||||
|
self.status = TrancheStatus.Expired
|
||||||
|
if self.status == TrancheStatus.Early and now >= self.time_constraint[0]:
|
||||||
|
self.status = TrancheStatus.Pricing
|
||||||
|
self.enable_price_trigger()
|
||||||
|
|
||||||
|
def enable_price_trigger(self):
|
||||||
|
price_triggers[self.order.pool_address].add(self.price_trigger)
|
||||||
|
|
||||||
|
def disable_price_trigger(self):
|
||||||
|
price_triggers[self.order.pool_address].remove(self.price_trigger)
|
||||||
|
|
||||||
|
def price_trigger(self, prev, cur):
|
||||||
|
if all(pc.passes(prev,cur) for pc in self.price_constraints):
|
||||||
|
self.execute()
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
log.info(f'execution request for {self.tk}')
|
||||||
|
execution_requests[self.tk] = current_block.get().height
|
||||||
|
|
||||||
|
def disable(self):
|
||||||
|
self.disable_time_trigger()
|
||||||
|
self.disable_price_trigger()
|
||||||
|
|
||||||
|
|
||||||
|
class OrderTriggers:
|
||||||
|
def __init__(self, order: Order):
|
||||||
|
self.order = order
|
||||||
|
self.triggers = [TrancheTrigger(order, tk) for tk in self.order.tranche_keys]
|
||||||
|
|
||||||
|
def disable(self):
|
||||||
|
for t in self.triggers:
|
||||||
|
t.disable()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user