refactored watcher; produces logs

This commit is contained in:
Tim Olson
2023-09-01 18:58:04 -04:00
parent 97234c955f
commit 2e1d2aaa96
9 changed files with 238 additions and 68 deletions

30
abi.js
View File

@@ -1,3 +1,9 @@
import {readFile} from './misc.js'
import {ethers} from "ethers";
const ABI_BASE_PATH = '../contract/out'
export const erc20Abi = [ export const erc20Abi = [
'function name() view returns (string)', 'function name() view returns (string)',
'function symbol() view returns (string)', 'function symbol() view returns (string)',
@@ -26,6 +32,7 @@ const TimedOrderSpec = '(' +
'bool amountIsInput' + 'bool amountIsInput' +
')' ')'
export const timedOrderAbi = [ export const timedOrderAbi = [
'event TimedOrderCreated (address owner, uint64 index, Spec spec)', 'event TimedOrderCreated (address owner, uint64 index, Spec spec)',
'event TimedOrderFilled (address owner, uint64 index, uint256 amountIn, uint256 amountOut)', 'event TimedOrderFilled (address owner, uint64 index, uint256 amountIn, uint256 amountOut)',
@@ -35,8 +42,31 @@ export const timedOrderAbi = [
] ]
const vaultDeployerAbi = [
'function deployVault(address owner) returns (address vault)',
'event VaultCreated( address deployer, address owner )',
]
export const abi = { export const abi = {
'ERC20': erc20Abi, 'ERC20': erc20Abi,
'TimedOrder': timedOrderAbi, 'TimedOrder': timedOrderAbi,
'VaultDeployer': vaultDeployerAbi,
} }
export async function getAbi(className) {
let found = abi[className]
if (found === undefined) {
console.log('warning: loading ABI from filesystem for '+className)
const data = await readFile(ABI_BASE_PATH + `/${className}.sol/${className}.json`)
found = JSON.parse(data.toString())['abi']
abi[className] = found
}
return found
}
export async function getInterface(className) {
return new ethers.Interface(await getAbi(className))
}

View File

@@ -1,15 +1,22 @@
import {ethers} from "ethers"; import {ethers} from "ethers";
const providers = {} // indexed by chain id export const chains = {}
const _chainInfo = [
{id:42161, name:'Arbitrum'},
]
for( const chain of _chainInfo )
chains[chain.id] = chain
const providers = {} // indexed by chain id
export function getProvider(chainId) { export function getProvider(chainId) {
let result = providers[chainId] let result = providers[chainId]
if( result === undefined ) { if( result === undefined ) {
const rpc_url = process.env['DEXORDER_RPC_URL_'+chainId] let rpc_url = process.env['DEXORDER_RPC_URL_'+chainId]
if( rpc_url === undefined ) { if( rpc_url === undefined ) {
console.error('No provider found for chainId',chainId) console.error(`WARNING: No provider found for chainId ${chainId}. Using localhost.`)
return null rpc_url = 'http://localhost:8545'
} }
result = rpc_url.startsWith('ws') ? result = rpc_url.startsWith('ws') ?
new ethers.WebSocketProvider(rpc_url, chainId) : new ethers.WebSocketProvider(rpc_url, chainId) :
@@ -19,24 +26,28 @@ export function getProvider(chainId) {
return result return result
} }
const signers = {} // indexed by chain id, value is an array to be used in round-robin fashion const signers = {} // indexed by chain id
const signerIndexes = {}
export function signer(chainId) { // todo multiple signers per chain, checked out of a pool
let chainSigners = signers[chainId] export function getSigner(chainId) {
if (chainSigners === undefined) { let signer = signers[chainId]
chainSigners = [] if (signer === undefined) {
const private_keys = process.env['DEXORDER_ACCOUNTS_' + chainId] const private_keys = process.env['DEXORDER_ACCOUNTS_' + chainId]
for (const match of private_keys.matchAll(/([^,]+),?/g)) if( !private_keys ) {
chainSigners.push(new ethers.Wallet(match[1])) console.log(`DEXORDER_ACCOUNTS_${chainId} not defined`)
signers[chainId] = chainSigners return null // todo fatal
signerIndexes[chainId] = 0
} }
const result = chainSigners[signerIndexes[chainId]] // for (const match of private_keys.matchAll(/([^,]+),?/g))
signerIndexes[chainId]++ // signer.push(new ethers.Wallet(match[1]))
if( signerIndexes[chainId] >= chainSigners.length ) signer = new ethers.Wallet(private_keys, getProvider(chainId))
signerIndexes[chainId] = 0 signers[chainId] = signer
return result }
return signer
// const result = signer[signerIndexes[chainId]]
// signerIndexes[chainId]++
// if( signerIndexes[chainId] >= signer.length )
// signerIndexes[chainId] = 0
// return result
} }

19
cache.js Normal file
View File

@@ -0,0 +1,19 @@
// implement a cluster-wide cache which is in-memory for each instance
export class Cache {
constructor(name) {
this.name = name
this.cache = {}
}
async get(key) {
return this.cache[key]
}
async set(key, value) {
this.cache[key] = value
// todo broadcast
}
}

8
io.js Normal file
View File

@@ -0,0 +1,8 @@
import {createServer} from "http";
import {Server} from "socket.io";
const options = {}
if( process.env.DEXORDER_CORS )
options['cors'] = {origin:process.env.DEXORDER_CORS}
export const httpServer = createServer()
export const io = new Server(httpServer, options)

26
main.js
View File

@@ -1,14 +1,25 @@
import 'dotenv/config' import 'dotenv/config'
import { createServer } from "http";
import { Server } from "socket.io"
import {lookupToken} from "./token.js"; import {lookupToken} from "./token.js";
import {startWatcher} from "./watcher.js";
import {chains} from "./blockchain.js";
import {watchErc20Transfer, watchVaultCreated} from "./vault.js";
import {httpServer, io} from "./io.js";
const options = {}
if( process.env.DEXORDER_CORS ) // setup watcher
options['cors'] = {origin:process.env.DEXORDER_CORS}
const httpServer = createServer() const filterCallbacks = [
const io = new Server(httpServer, options) // format is [[className, eventName, ...eventArgs], callback(provider, database, logInfo)]
[['VaultDeployer','VaultCreated', null, null], watchVaultCreated],
[['ERC20', 'Transfer'], watchErc20Transfer],
]
for( const chain of Object.values(chains) )
await startWatcher( chain.id, 1000, filterCallbacks )
// setup socket.io
io.on("connection", (socket) => { io.on("connection", (socket) => {
// initially, only anonymous messages are allowed // initially, only anonymous messages are allowed
@@ -23,4 +34,3 @@ io.on("connection", (socket) => {
const port = parseInt(process.env.DEXORDER_PORT) || 3000; const port = parseInt(process.env.DEXORDER_PORT) || 3000;
httpServer.listen(port) httpServer.listen(port)
console.log('Started server on port '+port) console.log('Started server on port '+port)
console.log(options)

View File

@@ -22,7 +22,7 @@ exports.up = async function (db) {
await db.createTable('eoa', { await db.createTable('eoa', {
chain: { type: 'int', primaryKey: true }, chain: { type: 'int', primaryKey: true },
address: { type: 'bytea', primaryKey: true}, address: { type: 'bytea', primaryKey: true},
vaulted: 'boolean', // vaulted: 'boolean',
}) })
await db.createTable('tokenusage', { await db.createTable('tokenusage', {
chain: { type: 'int', primaryKey: true }, chain: { type: 'int', primaryKey: true },

4
misc.js Normal file
View File

@@ -0,0 +1,4 @@
import fs from "fs";
import util from "util";
export const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8');

View File

@@ -1,5 +1,51 @@
import {ethers} from "ethers";
import {getAbi} from "./abi.js";
import {getProvider, getSigner} from "./blockchain.js";
import {Cache} from './cache.js';
const DEPLOYER_ADDRESS = '0xF99aB16Bd8398EAf12407D05A0F8824316008E99'
const VAULT_INIT_CODE_HASH = '0xbf043f7035d5aa3be2b3c94df5b256fbe24675689327af4ab71c48194c463031'
const vaults = new Cache('vaults') // vault:owner
const deployerAbi = await getAbi('VaultDeployer');
function newVault(address, owner) {
return {address, owner, balances: {}}
}
export function vaultAddress(chainId, ownerAddress) {
const encoded = ethers.AbiCoder.defaultAbiCoder().encode(['address'], [ownerAddress])
const salt = ethers.keccak256(encoded)
return ethers.getCreate2Address(DEPLOYER_ADDRESS, salt, VAULT_INIT_CODE_HASH)
}
export function loginAddress(socket, chainId, address) { export function loginAddress(socket, chainId, address) {
// todo check for existing vault // todo check for existing vault
// todo send welcome with basic info if (!vaults[address]) {
//
} else {
// todo send welcome with basic info and extra tokens
}
}
export async function ensureVault(socket, chainId, owner) {
const address = vaultAddress(chainId, owner)
if (!vaults[address]) {
const deployer = new ethers.Contract(DEPLOYER_ADDRESS, deployerAbi, getSigner(chainId))
await deployer.deployVault(owner)
}
}
export async function watchVaultCreated(provider, db, event) {
console.log(`vault created`, event)
}
export async function watchErc20Transfer(provider, db, event) {
console.log('Transfer', event)
} }

View File

@@ -1,37 +1,46 @@
import {getProvider} from "./blockchain.js"; import {getProvider} from "./blockchain.js";
import {ethers} from "ethers";
import {erc20Abi} from "./abi.js";
import {pool} from "./db.js"; import {pool} from "./db.js";
import {getInterface} from "./abi.js";
const BATCH_SIZE = 100 // the most blocks allowed per batch const BATCH_SIZE = 100 // the most blocks allowed per batch
async function watchChain(chainId, filterCallbacks) {
// watcher collects all events from the blockchain and then matches them against our local filters, invoking any
// registered callbacks. we do the filtering/switching locally to prevent invoking a separate api call for
// each type of event
async function processLogs(chainId, topicEventCallbacks) {
// console.log('processLogs')
const provider = getProvider(chainId) const provider = getProvider(chainId)
const db = await pool.connect()
const block = await provider.getBlockNumber() const block = await provider.getBlockNumber()
const db = await pool.connect()
try {
let fromBlock, toBlock let fromBlock, toBlock
const result = await db.query('select block from progress where chain=$1', [chainId]) const result = await db.query('select block from progress where chain=$1', [chainId])
if( result.rowCount === 0 ) { if (result.rowCount === 0) {
console.log('initializing chain', chainId) console.log('initializing chain', chainId)
fromBlock = block fromBlock = block
db.query('insert into progress values ($1,$2)', [chainId, block-1]) db.query('insert into progress values ($1,$2)', [chainId, block - 1])
} } else if (result.rowCount === 1) {
else if( result.rowCount === 1 ) {
fromBlock = result.rows[0].block + 1 fromBlock = result.rows[0].block + 1
} } else
else
throw Error(`Found ${result.rowCount} rows for progress table chain ${chainId}`) throw Error(`Found ${result.rowCount} rows for progress table chain ${chainId}`)
if (fromBlock > block)
return
try { try {
do { do {
toBlock = Math.min(block, fromBlock + BATCH_SIZE-1) // toBlock is inclusive toBlock = Math.min(block, fromBlock + BATCH_SIZE - 1) // toBlock is inclusive
const promises = []
await db.query('BEGIN') await db.query('BEGIN')
for (const [filter, callback] of filterCallbacks) { const logs = await provider.getLogs({fromBlock, toBlock})
filter.fromBlock = fromBlock // console.log(`logs for block range [${fromBlock},${toBlock}]`, logs)
filter.toBlock = toBlock const promises = []
console.log('filter', filter) for (const log of logs) {
const found = await provider.getLogs(filter) for (const topic of log.topics) {
promises.push(callback(provider, db, found)) const cb = topicEventCallbacks[topic]
if (cb !== undefined)
promises.push(cb(provider, db, log))
}
} }
await Promise.all(promises) await Promise.all(promises)
db.query('update progress set block=$1 where chain=$2', [toBlock, chainId]) db.query('update progress set block=$1 where chain=$2', [toBlock, chainId])
@@ -41,14 +50,47 @@ async function watchChain(chainId, filterCallbacks) {
} catch (e) { } catch (e) {
await db.query('ROLLBACK') await db.query('ROLLBACK')
throw e throw e
} finally { }
}
finally {
db.release() db.release()
} }
} }
export function startWatcher(chainId, period, filterCallbacks) { export async function startWatcher(chainId, period, filterCallbacks) {
setInterval(async () => await watchChain(chainId, filterCallbacks), period) const topicEventCallbacks = {} // topic: callback(log)
for (const [[className, eventName, ...args], callback] of filterCallbacks) {
const interf = await getInterface(className)
const event = interf.getEvent(eventName)
const topics = interf.encodeFilterTopics(event, args)
for( const topic of topics ) {
topicEventCallbacks[topic] = async (provider, db, log) => {
let info
try {
// info = interf.decodeEventLog(event, log.data, log.topics)
info = interf.parseLog(log)
}
catch (e) {
// console.error(`could not decode log for topic ${topic}`, log)
return
}
if( info !== null ) {
info.address = log.address
await callback(provider, db, info)
}
}
}
}
console.log('registered topics', Object.keys(topicEventCallbacks))
setInterval(async () => {
try {
await processLogs(chainId, topicEventCallbacks)
} catch (e) {
console.error('error during processLogs',e)
}
}, period)
} }