transaction management bugfixes

This commit is contained in:
Tim Olson
2024-01-08 22:43:25 -04:00
parent 063ef3fc65
commit d9f0711ce8
7 changed files with 60 additions and 13 deletions

View File

@@ -57,6 +57,19 @@ class SwapOrder:
return (self.tokenIn, self.tokenOut, self.route.dump(), str(self.amount), str(self.minFillAmount), self.amountIsInput, return (self.tokenIn, self.tokenOut, self.route.dump(), str(self.amount), str(self.minFillAmount), self.amountIsInput,
self.outputDirectlyToOwner, self.chainOrder, [t.dump() for t in self.tranches]) self.outputDirectlyToOwner, self.chainOrder, [t.dump() for t in self.tranches])
def __str__(self):
msg = f'''
SwapOrder
in: {self.tokenIn}
out: {self.tokenOut}
exchange: {self.route.exchange, self.route.fee}
amount: {"input" if self.amountIsInput else "output"} {self.amount} {"to owner" if self.outputDirectlyToOwner else ""}
minFill: {self.minFillAmount}
tranches:
'''
for tranche in self.tranches:
msg += f' {tranche}\n'
return msg
@dataclass @dataclass
class SwapStatus: class SwapStatus:
@@ -207,6 +220,20 @@ class Tranche:
self.startTime, self.endTime, minB, minM, maxB, maxM, self.startTime, self.endTime, minB, minM, maxB, maxM,
) )
def __str__(self):
msg = f'{self.fraction/MAX_FRACTION:.1%} {self.startTime} to {self.endTime}'
if self.marketOrder:
msg += ' market order'
else:
if self.minIntercept or self.minSlope:
msg += f' >{self.minIntercept:.5g}'
if self.minSlope:
msg += f'{self.minSlope:+.5g}'
if self.maxIntercept or self.maxSlope:
msg += f' <{self.maxIntercept:.5g}'
if self.maxSlope:
msg += f'{self.maxSlope:+.5g}'
return msg
@dataclass @dataclass
class PriceProof: class PriceProof:

View File

@@ -7,7 +7,7 @@ from .contract_proxy import ContractProxy
def get_contract_data(name): def get_contract_data(name):
if name == "Vault" and os.path.exists(f'../contract/out/I{name}.sol/I{name}.json') : if name == "Vault" and os.path.exists(f'../contract/out/I{name}.sol/I{name}.json') :
logging.warning("getting abi from IVault.json instead of Vault.json") # logging.debug("getting abi from IVault.json instead of Vault.json")
name = "IVault" # Special case for proxy Vault name = "IVault" # Special case for proxy Vault
with open(f'../contract/out/{name}.sol/{name}.json', 'rt') as file: with open(f'../contract/out/{name}.sol/{name}.json', 'rt') as file:
return json.load(file) return json.load(file)

View File

@@ -17,6 +17,12 @@ def call_wrapper(func):
def transact_wrapper(func): def transact_wrapper(func):
async def f(*args, **kwargs):
return await func(*args, **kwargs).transact()
return f
def build_wrapper(func):
async def f(*args, **kwargs): async def f(*args, **kwargs):
try: try:
account = current_account.get() account = current_account.get()
@@ -78,6 +84,11 @@ class ContractProxy:
# noinspection PyTypeChecker # noinspection PyTypeChecker
return ContractProxy(self.address, self._interface_name, _contracts=self._contracts, _wrapper=transact_wrapper, abi=self._abi) return ContractProxy(self.address, self._interface_name, _contracts=self._contracts, _wrapper=transact_wrapper, abi=self._abi)
@property
def build(self):
# noinspection PyTypeChecker
return ContractProxy(self.address, self._interface_name, _contracts=self._contracts, _wrapper=build_wrapper, abi=self._abi)
def __getattr__(self, item): def __getattr__(self, item):
return self._wrapper(self.contract.constructor if item == 'constructor' else self.contract.functions[item]) return self._wrapper(self.contract.constructor if item == 'constructor' else self.contract.functions[item])

View File

@@ -115,7 +115,7 @@ async def handle_order_placed(event: EventData):
log.debug(f'raw order status {obj}') log.debug(f'raw order status {obj}')
order = Order.create(vault.address, index, obj) order = Order.create(vault.address, index, obj)
await activate_order(order) await activate_order(order)
log.debug(f'new order {order}') log.debug(f'new order {order} {order.order}')
def handle_swap_filled(event: EventData): def handle_swap_filled(event: EventData):
@@ -265,26 +265,27 @@ def process_active_tranches():
for tk, proof in active_tranches.items(): for tk, proof in active_tranches.items():
old_req = execution_requests.get(tk) old_req = execution_requests.get(tk)
height = current_block.get().height height = current_block.get().height
if old_req is None or old_req.height <= height: if old_req is None or old_req.height <= height: # <= used so proof is updated with more recent values
log.info(f'execution request for {tk}') log.info(f'execution request for {tk}')
execution_requests[tk] = ExecutionRequest(height, proof) execution_requests[tk] = ExecutionRequest(height, proof)
async def process_execution_requests(): async def process_execution_requests():
height = current_block.get().height height = current_block.get().height
execs = [] # which requests to act on execs = {} # which requests to act on
for tk, er in execution_requests.items(): for tk, er in execution_requests.items():
tk: TrancheKey tk: TrancheKey
er: ExecutionRequest er: ExecutionRequest
pending = inflight_execution_requests.get(tk) pending = inflight_execution_requests.get(tk)
if pending is None or pending > height or height-pending >= 30: # todo execution timeout => retry ; should we use timestamps? configure per-chain. log.debug(f'tranche key {tk} pending height: {pending}')
execs.append((tk,er)) if pending is None or height-pending >= 30: # todo execution timeout => retry ; should we use timestamps? configure per-chain.
execs[tk] = er
else: else:
log.debug(f'tranche {tk} is pending execution') log.debug(f'tranche {tk} is pending execution')
# execute the list # execute the list
# todo batch execution # todo batch execution
for tk, er in execs: for tk, er in execs.items():
log.info(f'executing tranche {tk}') log.info(f'executing tranche {tk}')
job = submit_transaction_request(new_tranche_execution_request(tk, er.proof)) job = submit_transaction_request(new_tranche_execution_request(tk, er.proof))
inflight_execution_requests[tk] = height inflight_execution_requests[tk] = height
@@ -326,10 +327,14 @@ def finish_execution_request(req: TrancheExecutionRequest, error: str):
elif error == 'TF': elif error == 'TF':
# Tranche Filled # Tranche Filled
log.warning(f'tranche already filled {tk}') log.warning(f'tranche already filled {tk}')
triggers = OrderTriggers.instances[order.key] try:
tranche_trigger = triggers.triggers[tk.tranche_index] triggers = OrderTriggers.instances[order.key]
tranche_trigger.status = TrancheStatus.Filled tranche_trigger = triggers.triggers[tk.tranche_index]
tranche_trigger.disable() except KeyError:
pass
else:
tranche_trigger.status = TrancheStatus.Filled
tranche_trigger.disable()
elif error == 'Too little received': elif error == 'Too little received':
# from UniswapV3 SwapRouter when not even 1 satoshi of output was gained # from UniswapV3 SwapRouter when not even 1 satoshi of output was gained
log.debug('warning: de minimis liquidity in pool') log.debug('warning: de minimis liquidity in pool')

View File

@@ -19,7 +19,7 @@ class TrancheExecutionHandler (TransactionHandler):
async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> dict: async def build_transaction(self, job_id: UUID, req: TrancheExecutionRequest) -> dict:
# noinspection PyBroadException # noinspection PyBroadException
try: try:
return await get_dexorder_contract().transact.execute(job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof)) return await get_dexorder_contract().build.execute(job_id.bytes, (req.vault, req.order_index, req.tranche_index, req.price_proof))
except ContractPanicError as px: except ContractPanicError as px:
log.error(f'While executing job {job_id}: {px}') log.error(f'While executing job {job_id}: {px}')
await self.complete_transaction(db.session.get(TransactionJob, job_id)) await self.complete_transaction(db.session.get(TransactionJob, job_id))

View File

@@ -327,6 +327,8 @@ class BlockStateRunner:
async def handle_time_tick(self, blockhash): async def handle_time_tick(self, blockhash):
if current_blockstate.get() is None:
return
# similar to handle_head, but we only call the postprocess events, since there was only a time tick and no new block data # similar to handle_head, but we only call the postprocess events, since there was only a time tick and no new block data
block = self.state.by_hash[blockhash] block = self.state.by_hash[blockhash]
fork = self.state.fork(block) fork = self.state.fork(block)

View File

@@ -67,9 +67,10 @@ async def create_transaction(job: TransactionJob):
else: else:
ctx: ContractTransaction = await handler.build_transaction(job.id, job.request) ctx: ContractTransaction = await handler.build_transaction(job.id, job.request)
if ctx is None: if ctx is None:
log.warning(f'unable to build transaction for job {job.id}') log.warning(f'unable to send transaction for job {job.id}')
return return
job.state = TransactionJobState.Signed # todo lazy signing job.state = TransactionJobState.Signed # todo lazy signing
db.session.add(job)
dbtx = DbTransaction(id=ctx.id_bytes, job=job, data=ctx.data, receipt=None) dbtx = DbTransaction(id=ctx.id_bytes, job=job, data=ctx.data, receipt=None)
db.session.add(dbtx) db.session.add(dbtx)
log.info(f'servicing transaction request {job.request.__class__.__name__} {job.id} with tx {ctx.id}') log.info(f'servicing transaction request {job.request.__class__.__name__} {job.id} with tx {ctx.id}')
@@ -83,6 +84,7 @@ async def send_transactions():
TransactionJob.chain == current_chain.get(), TransactionJob.chain == current_chain.get(),
TransactionJob.state == TransactionJobState.Signed TransactionJob.state == TransactionJobState.Signed
): ):
log.debug(f'sending transaction for job {job.id}')
sent = await w3.eth.send_raw_transaction(job.tx.data) sent = await w3.eth.send_raw_transaction(job.tx.data)
assert sent == job.tx.id assert sent == job.tx.id
job.state = TransactionJobState.Sent job.state = TransactionJobState.Sent