Compare commits

..

27 Commits

Author SHA1 Message Date
tim
bd37554b7d maxmind fix 2025-12-18 20:59:31 -04:00
tim
0a9b0cc631 maxmind fix 2025-12-18 20:47:59 -04:00
tim
eca99567d8 put app back on app.dexorder.com and corp site on dexorder.com with www redirecting to apex 2025-05-19 15:19:20 -04:00
tim
c95c26afd7 yarn upgrade 2025-05-06 22:27:38 -04:00
tim
008b6793d1 redis pub/sub fix 2025-05-06 22:27:14 -04:00
tim
7f4c301491 dotcom 2025-05-06 13:56:05 -04:00
tim
49dbd7f619 DEXORDER_BIND 2025-04-26 17:50:05 -04:00
tim
fbdb9703ea short share urls 2025-04-23 12:55:49 -04:00
tim
a560383ce0 share landing page touchup 2025-04-22 17:32:19 -04:00
tim
85b2e2dca7 share landing page bugfixes 2025-04-22 17:21:11 -04:00
tim
ff0d71054b order sharing 2025-04-22 16:15:14 -04:00
tim
0b29539e0a USD marks 2025-03-29 15:27:13 -04:00
tim
1a3638a087 excessive vault request squelching 2025-03-28 20:05:19 -04:00
tim
bd1fb627f2 US/PR unblocked 2025-03-26 17:18:15 -04:00
tim
bf31648a57 dead code cleanup 2025-02-24 19:12:48 -04:00
tim
5af7422b9d db records TOS acceptance 2025-01-30 12:25:14 -04:00
tim
2f5a626e5c approvals; vault creation by backend 2025-01-16 20:16:31 -04:00
tim
65c4e08e84 MaxMind IP database & region approvals 2024-12-19 20:18:56 -04:00
tim
8835ad5272 faucet doesn't mint MEH or USXD unless requested 2024-10-23 15:58:41 -04:00
tim
2856ce1001 bin/secret-push 2024-10-06 16:00:18 -04:00
tim
6525b40663 .env-mock has chain 1337 2024-10-04 21:29:02 -04:00
tim
ad693e24a2 vaultAddress bugfix 2024-09-29 21:20:47 -04:00
tim
3928fe4181 applyFills fix 2024-09-13 14:09:13 -04:00
tim
6d088c4f63 updated SwapOrderStatus with Andrew's changes 2024-08-26 17:27:42 -04:00
Tim
24090ce0d1 bugfix d'oh 2024-07-17 17:01:36 -04:00
Tim
2d6e066b98 bugfix d'oh 2024-07-17 16:59:31 -04:00
Tim
570a456d43 bugfix d'oh 2024-07-17 16:57:49 -04:00
23 changed files with 2778 additions and 316 deletions

View File

@@ -1,11 +0,0 @@
DEXORDER_PORT=3001
DEXORDER_CORS=*
DEXORDER_DB_URL=postgresql://dexorder:redroxed@postgres:5432/dexorder
DEXORDER_REDIS_URL=redis://redis:6379
# Dexorder Alpha chain
DEXORDER_DEPLOYMENT_1337=alpha
DEXORDER_RPC_URL_1337=http://alpharpc:8545
# dev account #2
DEXORDER_ACCOUNTS_1337=0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a

View File

@@ -1,9 +0,0 @@
DEXORDER_PORT=3001
DEXORDER_CORS=*
DEXORDER_DB_URL=postgresql://dexorder:redroxed@postgres:5432/dexorder
DEXORDER_REDIS_URL=redis://redis:6379
DEXORDER_DEPLOYMENT_421614=arbsep
DEXORDER_RPC_URL_421614=https://arb-sepolia.g.alchemy.com/v2/lEA3hXyI1G-8Jiv3OvNqpkDN0ATe38zn
DEXORDER_ACCOUNTS_421614=065b7ef20b68ff99e71774acae7759a873073ac3d3c502ceb90f01efb3c48079

View File

@@ -1,5 +1,6 @@
#DEXORDER_BIND=0.0.0.0
DEXORDER_PORT=3001 DEXORDER_PORT=3001
DEXORDER_CORS=http://localhost:3000 DEXORDER_APP_URL=http://localhost:3000
DEXORDER_DB_URL=postgresql://dexorder:redroxed@localhost:5432/dexorder DEXORDER_DB_URL=postgresql://dexorder:redroxed@localhost:5432/dexorder
DEXORDER_REDIS_URL=redis://localhost:6379 DEXORDER_REDIS_URL=redis://localhost:6379
@@ -11,5 +12,17 @@ DEXORDER_RPC_URL_42161=http://localhost:8545
# Mockchain # Mockchain
DEXORDER_DEPLOYMENT_31337=latest DEXORDER_DEPLOYMENT_31337=latest
DEXORDER_RPC_URL_31337=http://localhost:8545 DEXORDER_RPC_URL_31337=http://localhost:8545
# dev account #2
DEXORDER_ACCOUNTS_31337=0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a # Devchain
DEXORDER_DEPLOYMENT_1337=latest
DEXORDER_RPC_URL_1337=http://localhost:8545
# use this in the URL to bypass IP country check for development / debug
# http://localhost:3000/...?approval=...
DEXORDER_REGION_APPROVAL=6ehWWH98diqv39gWZcPo
DEXORDER_SNAPSHOT_S3_BUCKET_NAME=snapshot
DEXORDER_SNAPSHOT_S3_ACCESS_KEY_ID=5c73e3059596e6726d22e7aa5d2d23d6
DEXORDER_SNAPSHOT_S3_SECRET_ACCESS_KEY=a543189cea7d87c891e5abef7fabd7c1759b5063a667b3f29217a6a496a7342a
DEXORDER_SNAPSHOT_S3_ENDPOINT=https://3d7004962a10203cb8fecacc6a8ff502.r2.cloudflarestorage.com
DEXORDER_SNAPSHOT_BASE_URL=https://3d7004962a10203cb8fecacc6a8ff502.r2.cloudflarestorage.com/snapshot

4
.gitignore vendored
View File

@@ -1,7 +1,9 @@
/.idea /.idea
/.vscode /.vscode
/maxmind
/.env /.env
/.env-*
!/.env-mock
# File-based project format # File-based project format
*.iws *.iws

56
approval.js Normal file
View File

@@ -0,0 +1,56 @@
import {countryForIP} from "./maxmind.js";
import {clientIP} from "./misc.js";
import {sql} from "./db.js";
const bannedCountries = [
// 'US', // FFS
// 'PR', // FFS (different country?!)
// OFAC country bans
'CU', // Cuba
'IR', // Iran
'KP', // North Korea
'SY', // Syria
'RU', // Russia
'BY', // Belarus
]
export async function approveTOS(socket, time, version, callback) {
const ipAddress = clientIP(socket)
await sql('insert into tosacceptance (ipaddr, time, version) values ($1,$2,$3)', ipAddress, time, version)
callback(true)
}
export function approveWallet(walletAddress) {
// todo OFAC lookup
return true
}
function approveIP(ipAddress) {
let country
try {
country = countryForIP(ipAddress)
}
catch (e) {
console.warn(`IP lookup failed for ${ipAddress}: ${e.message}`)
return false
}
if (!country) return false
const approved = !bannedCountries.includes(country)
console.debug(`IP ${ipAddress} from ${country} is ${approved ? 'approved' : 'rejected'}`)
return approved
}
export function approveRegion(socket, bypass) {
const ipAddress = clientIP(socket)
const debug = bypass === process.env.DEXORDER_REGION_APPROVAL;
const approved = debug || approveIP(ipAddress)
socket.emit('approvedRegion', approved)
if(debug)
console.info(`approved admin at ${ipAddress}`)
}

28
bin/secret-push Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
SCRIPT_DIR="$(dirname -- "$( readlink -f -- "$0"; )";)"
CONF_DIR="$SCRIPT_DIR/.."
if [ "$1" == "" ]; then
echo 'usage: secret-push {config_name} [secret_name]'
exit 1
fi
CONFIG=$1
shift
if [ "$1" != "" ]; then
KEY=server-secret-$1
else
KEY=server-secret
fi
FILE="$CONF_DIR/.env-$CONFIG"
if [ ! -f "$FILE" ]; then
echo Could not find env file for $FILE
exit 1;
fi
kubectl create secret generic "$KEY" --from-file ".env=$FILE" --dry-run=client -o yaml | kubectl apply -f -

View File

@@ -1,12 +1,19 @@
import {createClient} from "redis"; import {createClient} from "redis";
export const redis = createClient({
url: process.env.DEXORDER_REDIS_URL || 'redis://localhost:6379',
returnBuffers: false,
})
redis.on('error', (err) => console.log('Redis Client Error', err)); async function createRedisClient() {
await redis.connect(); const client = createClient({
url: process.env.DEXORDER_REDIS_URL || 'redis://localhost:6379',
returnBuffers: false,
})
client.on('error', (err) => console.log('Redis Client Error', err));
await client.connect();
return client
}
export const redis = await createRedisClient()
export const redisSubscriber = await createRedisClient()
export class CacheSet { export class CacheSet {
@@ -37,6 +44,10 @@ export class CacheDict {
async contains(chain, key) { async contains(chain, key) {
return await redis.hExists(`${chain}|${this.series}`, key) return await redis.hExists(`${chain}|${this.series}`, key)
} }
async items(chain) {
return Object.entries(await redis.hGetAll(`${chain}|${this.series}`)).map(([k, v]) => [k, v === null ? null : '' + v])
}
} }
@@ -83,3 +94,4 @@ export const vaultOpenOrders = new CacheDict('voo')
export const vaultRecentlyClosedOrders = new CacheDict('vrco') export const vaultRecentlyClosedOrders = new CacheDict('vrco')
export const orderFilled = new CacheDict('of') export const orderFilled = new CacheDict('of')
export const ohlcs = new CacheDict('ohlc') export const ohlcs = new CacheDict('ohlc')
export const marks = new CacheDict('mark.usd')

View File

@@ -1,4 +1,19 @@
import fs from "fs"; import fs from "fs";
import {marks} from "./cache.js";
export const chainInfo = JSON.parse(fs.readFileSync('../contract/version.json')).chainInfo export const chainInfo = JSON.parse(fs.readFileSync('../contract/version.json')).chainInfo
console.log('chainInfo', chainInfo) console.log('chainInfo', chainInfo)
export async function joinChain( socket, chainId ) {
try {
if (socket.chainId)
socket.leave(socket.chainId)
socket.join(chainId)
socket.chainId = chainId
const items = await marks.items(chainId);
for (let [token, mark] of items)
socket.emit('mark.usd', chainId, token, mark)
} catch (e) {
console.error('joinChain', e)
}
}

9
db.js
View File

@@ -22,7 +22,12 @@ export async function withDb(cb) {
} }
export async function sql(query) { export function dbAddr(addr) {
return await withDb(async (db)=>await db.query(query) ) // format an 0x-style address into postgres bytes
return '\\' + addr.slice(1)
}
export async function sql(query, ...params) {
return await withDb(async (db)=>await db.query(query, params) )
} }

View File

@@ -1,10 +1,21 @@
import {ethers} from "ethers"; import {ethers} from "ethers";
import {getProvider} from "./blockchain.js"; import {getProvider, getSigner} from "./blockchain.js";
import {chainInfo} from "./chain.js"; import {chainInfo} from "./chain.js";
import {mockERC20Contract, newContract} from "./contract.js"; import {mockERC20Contract, newContract} from "./contract.js";
import {metadata} from "./metadata.js";
export async function gib( chainId, owner, vault, tokenAmounts ) { export async function gib( chainId, owner, vault, tokenAmounts ) {
try {
return await doGib( chainId, owner, vault, tokenAmounts )
}
catch (e) {
console.error('gib failed', e)
}
}
export async function doGib( chainId, owner, vault, tokenAmounts ) {
if (!owner || !vault) return if (!owner || !vault) return
if (chainId === 421614) { if (chainId === 421614) {
// Arbitrum-Sepolia // Arbitrum-Sepolia
@@ -12,8 +23,8 @@ export async function gib( chainId, owner, vault, tokenAmounts ) {
for (const t of metadata[421614].t) { for (const t of metadata[421614].t) {
if (t.s === 'USDC' && t.x.mock) { if (t.s === 'USDC' && t.x.mock) {
// print 10,000 USDC // print 10,000 USDC
const provider = getProvider(chainId); const signer = getSigner(421614);
const usdc = await newContract(t.a, 'MockERC20', provider) const usdc = await newContract(t.a, 'MockERC20', signer)
await usdc.mint(vault, 10_000_000000) await usdc.mint(vault, 10_000_000000)
console.log(`minted 10,000 USDC to ${vault}`) console.log(`minted 10,000 USDC to ${vault}`)
} }
@@ -52,12 +63,20 @@ export async function gib( chainId, owner, vault, tokenAmounts ) {
if (info.mockCoins) { if (info.mockCoins) {
const [coinAddr, usdAddr] = info.mockCoins const [coinAddr, usdAddr] = info.mockCoins
const signer = await provider.getSigner() const signer = await provider.getSigner()
console.log('faucet minting tokens to', vault) if (tokenAmounts['MEH'] > 0) {
await (await mockERC20Contract(coinAddr, signer)).mint(vault, 10n * 10n ** 18n) const amount = BigInt(tokenAmounts['MEH']);
await (await mockERC20Contract(usdAddr, signer)).mint(vault, 10_000n * 10n ** 6n) console.log(`faucet minting ${amount} MEH to`, vault)
const token = await mockERC20Contract(coinAddr, signer);
await token.mint(vault, amount * 10n ** 18n)
}
if (tokenAmounts['USXD'] > 0) {
const amount = BigInt(tokenAmounts['USXD']);
console.log(`faucet minting ${amount} USXD to`, vault)
const token = await mockERC20Contract(usdAddr, signer);
await token.mint(vault, amount * 10n ** 6n)
}
} }
} }
return
} }
} }

11
init.js Normal file
View File

@@ -0,0 +1,11 @@
export function initLog(app) {
process.on('uncaughtException', (err) => {
console.error('Uncaught Exception:', err);
process.exit(1); // Exit with code 1
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
process.exit(1); // Exit with code 1
});
}

45
io.js
View File

@@ -1,15 +1,42 @@
import {createServer} from "http"; import {createServer} from "http";
import {Server} from "socket.io"; import {Server} from "socket.io";
import { createAdapter } from "@socket.io/redis-adapter"; import { createAdapter } from "@socket.io/redis-adapter";
import {redis} from "./cache.js"; import {redis, redisSubscriber} from "./cache.js";
import {fileURLToPath} from "url";
import path from "path";
import express from "express";
import {engine} from "express-handlebars";
import {initSnapShare} from "./snapshare.js";
import cors from "cors";
const options = { const socketIoOptions = {
} }
if( process.env.DEXORDER_CORS ) if( process.env.DEXORDER_APP_URL )
options['cors'] = {origin:process.env.DEXORDER_CORS} socketIoOptions['cors'] = {origin:process.env.DEXORDER_APP_URL}
export const httpServer = createServer()
export const io = new Server(httpServer, options) // Setup Express
const pubClient = redis.duplicate(); const __filename = fileURLToPath(import.meta.url);
await pubClient.connect() const __dirname = path.dirname(__filename);
const adapter = createAdapter(pubClient, redis, {/*key:'socket.io'*/})
const app = express();
app.engine('handlebars', engine({
defaultLayout: false,
}
));
app.set('view engine', 'handlebars');
app.set('views', path.join(__dirname, 'views')); // Set the views directory
app.use(express.static(path.join(__dirname, 'public')));
app.use(cors())
app.use((err, req, res, next) => {
console.error(err.stack);
res.status(500).send('Something went wrong!');
});
initSnapShare(app)
export const httpServer = createServer(app)
export const io = new Server(httpServer, socketIoOptions)
const adapter = createAdapter(redis, redisSubscriber, {/*key:'socket.io'*/})
io.adapter(adapter) io.adapter(adapter)

37
main.js
View File

@@ -1,33 +1,14 @@
import 'dotenv/config' import 'dotenv/config'
import {httpServer} from "./io.js";
import {lookupToken} from "./token.js"; import {initIO} from "./route.js";
import {httpServer, io} from "./io.js"; import {initLog} from "./init.js";
import {ensureVault, loginAddress} from "./vault.js";
import {subOHLCs, subPools, unsubOHLCs, unsubPools} from "./pool.js";
import {gib} from "./faucet.js";
// setup socket.io initLog();
initIO();
io.on("connection", (socket) => {
socket.on('lookupToken', (chainId, address, callback) => {
lookupToken(chainId, address).then((result)=>callback(result)).catch(()=>callback(null))
})
socket.on('address', (chainId, address) => loginAddress(socket, chainId, address) )
socket.on('subPools', (chainId, addresses) => subPools(socket, chainId, addresses) )
socket.on('unsubPools', (chainId, addresses) => unsubPools(socket, chainId, addresses) )
socket.on('subOHLCs', async (chainId, poolPeriods) => await subOHLCs(socket, chainId, poolPeriods) )
socket.on('unsubOHLCs', (chainId, poolPeriods) => unsubOHLCs(socket, chainId, poolPeriods) )
socket.on('ensureVault', (chainId,owner,num) => ensureVault(socket, chainId, owner, num) )
socket.on('gib', async (chainId, owner, vault, tokenAmounts) => await gib(chainId, owner, vault, tokenAmounts))
socket.join('public')
});
// io.on("disconnection", (socket)=>{
// socket.leave('public') // todo isn't this automatic?
// todo unsub pools etc?
// })
const port = parseInt(process.env.DEXORDER_PORT) || 3001; const port = parseInt(process.env.DEXORDER_PORT) || 3001;
httpServer.listen(port) const bind = process.env.DEXORDER_BIND || 'localhost';
console.log('Started server on port '+port) httpServer.listen(port, bind, ()=>{
console.log(`Started server on ${bind}:${port}`)
})

189
maxmind.js Normal file
View File

@@ -0,0 +1,189 @@
import fs from 'fs';
import path from 'path';
import {extract} from "tar";
import {Reader} from '@maxmind/geoip2-node';
const ENABLE_MAXMIND=false
let ipdb = null
function setDbFile(file) {
const dbBuffer = fs.readFileSync(file);
ipdb = Reader.openBuffer(dbBuffer);
}
/**
* Downloads and extracts a .tar.gz file from a given URL.
* @param {string} url - The URL to download the file from.
* @param {string} outputDir - The directory to move the final .mmdb file to.
* @param {string} tempDir - The directory to use for temporary files.
* @param {string} username - The username for HTTP basic auth.
* @param {string} password - The password for HTTP basic auth.
*/
async function downloadAndExtractMaxmindData(url, outputDir, tempDir, username, password) {
console.log('Downloading MaxMind database...');
const tempFilePath = path.join(tempDir, `temp_${Date.now()}_${Math.random().toString(36).slice(2, 10)}.tar.gz`);
const tempExtractDir = path.join(tempDir, `temp_extract_${Date.now()}_${Math.random().toString(36).slice(2, 10)}`);
try {
// Ensure the output directory exists
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, {recursive: true});
}
// Create a temporary extract directory
if (!fs.existsSync(tempExtractDir)) {
fs.mkdirSync(tempExtractDir, {recursive: true});
}
// Download the file with HTTP basic authentication and save it as a temporary tar.gz file
const response = await fetch(url, {
headers: {
Authorization: 'Basic ' + Buffer.from(`${username}:${password}`).toString('base64')
}
});
if (!response.ok) {
throw new Error(`Failed to download file: ${response.statusText}`);
}
const fileStream = fs.createWriteStream(tempFilePath);
const reader = response.body.getReader();
await new Promise((resolve, reject) => {
function processChunk({done, value}) {
if (done) {
fileStream.end();
resolve();
return;
}
fileStream.write(value, () => reader.read().then(processChunk).catch(reject));
}
reader.read().then(processChunk).catch(reject);
});
// Extract the tar.gz file into the temporary extract directory
await extract({
file: tempFilePath,
cwd: tempExtractDir
});
// Find the .mmdb file in the temporary extract directory
const mmdbFile = fs.readdirSync(tempExtractDir, {withFileTypes: true})
.map(dirent => {
const fullPath = path.join(tempExtractDir, dirent.name);
if (dirent.isDirectory()) {
const subFiles = fs.readdirSync(fullPath).map(subFile => path.join(fullPath, subFile));
return subFiles.find(file => file.endsWith('.mmdb')) || null;
}
return dirent.name.endsWith('.mmdb') ? fullPath : null;
})
.filter(Boolean)[0];
if (!mmdbFile) {
throw new Error('No .mmdb file found in the extracted contents.');
}
// Move the .mmdb file to the output directory
const dest = path.join(outputDir, path.basename(mmdbFile));
fs.copyFileSync(
mmdbFile,
dest
);
console.log(`MaxMind database downloaded to ${dest}`);
return dest
} finally {
// Clean up the temporary tar.gz file and temporary extract directory
if (fs.existsSync(tempFilePath)) {
fs.unlinkSync(tempFilePath);
}
if (fs.existsSync(tempExtractDir)) {
fs.rmSync(tempExtractDir, {recursive: true, force: true});
}
}
}
/**
* Checks for up-to-date MaxMind database files in the specified output directory. If none of the files
* meet the update criteria, the function downloads and extracts updated MaxMind data from the provided source.
*
* @param {string} outputDir - Directory where MaxMind database files are stored.
* @param {string} tempDir - Temporary directory used during the download and extraction process.
* @param {string} url - URL to download the MaxMind data.
* @param {string} username - Username for authentication to access the MaxMind download service.
* @param {string} password - Password for authentication to access the MaxMind download service.
* @return {Promise<void>} Resolves when the data check and optional update process is completed.
*/
async function checkAndUpdateMaxmindData(outputDir, tempDir, url, username, password) {
if (username === '' || password === '') {
console.log('No MaxMind credentials provided. Skipping MaxMind database update.');
return
}
console.log('Checking for MaxMind database updates.');
// Ensure the output directory exists
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, {recursive: true});
}
const mmdbFiles = fs.readdirSync(outputDir).filter(file => file.endsWith('.mmdb'));
let shouldDownload = true;
if (mmdbFiles.length) {
for (const mmdbFile of mmdbFiles) {
const filePath = path.join(outputDir, mmdbFile);
const stats = fs.statSync(filePath);
const modifiedDate = new Date(stats.mtime);
const fourDaysAgo = new Date(Date.now() - 4 * 24 * 60 * 60 * 1000);
if (modifiedDate > fourDaysAgo) {
shouldDownload = false;
console.log(`MaxMind database '${mmdbFile}' is recent.`);
setDbFile(filePath)
break;
}
}
}
if (shouldDownload) {
setDbFile(await downloadAndExtractMaxmindData(url, outputDir, tempDir, username, password));
}
}
const url = 'https://download.maxmind.com/geoip/databases/GeoIP2-Country/download?suffix=tar.gz';
const outputDirectory = './maxmind';
// const username = '1102431';
// const password = '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';
const username = process.env.MAXMIND_ACCOUNT_NUMBER;
const password = process.env.MAXMIND_LICENSE_KEY;
if( ENABLE_MAXMIND ) {
await checkAndUpdateMaxmindData(outputDirectory, '/tmp', url, username, password);
setInterval(async () => {
try {
await checkAndUpdateMaxmindData(outputDirectory, '/tmp', url, username, password);
} catch (error) {
console.error('Error during MaxMind database update:', error);
}
}, 24 * 60 * 60 * 1000 + 1000); // 1 day + 1 second
}
export function countryForIP(ipAddress) {
if (!ipdb) return null
try {
const code = ipdb.country(ipAddress).country.isoCode;
console.log(ipAddress, code)
return code;
}
catch (e) {
console.warn(`IP lookup failed for ${ipAddress}: ${e.message}`)
return null
}
}

View File

@@ -2,3 +2,8 @@ import util from "util";
import fs from "fs"; import fs from "fs";
export const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8'); export const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8');
export function clientIP(socket) {
// X-Forwarded-For
return socket.handshake.headers['x-forwarded-for']?.split(',')[0] || socket.handshake.address;
}

View File

@@ -10,6 +10,7 @@ export function sendVaultOrders( socket, chainId, vault ) {
]).then(async (got)=>{ ]).then(async (got)=>{
const [openIndexes, closedIndexes, recents] = got const [openIndexes, closedIndexes, recents] = got
const statuses = {} const statuses = {}
// noinspection JSCheckFunctionSignatures
const indexes = [...JSON.parse(openIndexes), ...JSON.parse(closedIndexes)] const indexes = [...JSON.parse(openIndexes), ...JSON.parse(closedIndexes)]
const proms = [] const proms = []
if( openIndexes !== null ) { if( openIndexes !== null ) {
@@ -55,10 +56,10 @@ export async function orderStatus( chainId, vault, orderIndex ) {
return status return status
} }
async function fillOrderStatus( chainId, orderKey, status ) { async function fillOrderStatus(chainId, orderKey, status ) {
const fills = await orderFilled.get(chainId, orderKey) const fills = await orderFilled.get(chainId, orderKey)
if (fills !== null) if (fills !== null)
applyFills(status, JSON.parse(fills)) applyFillsServer(status, JSON.parse(fills))
} }
export async function archivedOrders(chainId, vault, limit=100 ) { export async function archivedOrders(chainId, vault, limit=100 ) {
@@ -79,14 +80,43 @@ export async function archivedOrders(chainId, vault, limit=100 ) {
return result return result
} }
function applyFills( orderStatus, filled ) { export function applyFillsServer(orderStatus, orderFills) {
// console.log('apply fills', orderStatus, filled) // class ElaboratedTrancheStatus:
orderStatus[5] = filled[0][0] // filledIn: int
orderStatus[6] = filled[0][1] // filledOut: int
for( const i in filled[1] ) { // activationTime: int
const [filledIn, filledOut] = filled[1][i] // startTime: int
orderStatus[7][i] = filledIn // endTime: int
orderStatus[8][i] = filledOut // fills: list[Fill]
//
// class Fill:
// tx: str
// time: int
// filledIn: int
// filledOut: int
// fee: int
// console.log('apply fills OrderStatus', orderStatus)
// console.log('apply fills orderFills', orderFills)
const trancheStatus = orderStatus[9]
let orderIn = 0n
let orderOut = 0n
for (const i in orderFills) {
let filledIn = 0n
let filledOut = 0n
const [activationTime, fills] = orderFills[i];
for (const fill of fills) {
filledIn += BigInt(fill[2])
filledOut += BigInt(fill[3])
}
const old = trancheStatus[i]
const startTime = old[3]
const endTime = old[4]
trancheStatus[i] = [filledIn.toString(), filledOut.toString(), activationTime, startTime, endTime, fills]
orderIn += filledIn
orderOut += filledOut
} }
// console.log('applied fills', orderStatus) orderStatus[7] = orderIn.toString()
orderStatus[8] = orderOut.toString()
// console.log('apply fills final', orderStatus, orderStatus[1][8])
} }

View File

@@ -11,11 +11,18 @@
"author": "", "author": "",
"license": "unlicensed", "license": "unlicensed",
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.787.0",
"@isaacs/ttlcache": "^1.4.1",
"@maxmind/geoip2-node": "^5.0.0",
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
"cors": "^2.8.5",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"ethers": "^6.7.1", "ethers": "^6.7.1",
"express": "^5.1.0",
"express-handlebars": "^8.0.2",
"pg": "^8.11.3", "pg": "^8.11.3",
"redis": "^4.6.10", "redis": "^4.6.10",
"socket.io": "^4.7.2" "socket.io": "^4.7.2",
"tar": "^7.4.3"
} }
} }

27
pool.js
View File

@@ -1,6 +1,6 @@
import {ohlcs, prices} from "./cache.js"; import {ohlcs, prices} from "./cache.js";
export function subPools( socket, chainId, addresses) { export function subPools( socket, chainId, addresses ) {
for(const address of addresses) { for(const address of addresses) {
const room = `${chainId}|${address}`; const room = `${chainId}|${address}`;
socket.join(room) socket.join(room)
@@ -19,17 +19,22 @@ export function unsubPools( socket, chainId, addresses ) {
export async function subOHLCs( socket, chainId, poolPeriods) { export async function subOHLCs( socket, chainId, poolPeriods) {
console.log('subOHLCs', chainId, poolPeriods) try {
for(const key of poolPeriods) { console.log('subOHLCs', chainId, poolPeriods)
const room = `${chainId}|${key}`; for (const key of poolPeriods) {
socket.join(room) const room = `${chainId}|${key}`;
let ohlc = await ohlcs.get(chainId,key); socket.join(room)
console.log('got ohlc', ohlc) let ohlc = await ohlcs.get(chainId, key);
if (typeof(ohlc)==='string') { console.log('got ohlc', ohlc)
ohlc = JSON.parse(ohlc) if (typeof (ohlc) === 'string') {
ohlc = JSON.parse(ohlc)
}
socket.emit('ohlc', chainId, key, ohlc)
console.log('joined room', room)
} }
socket.emit('ohlc', chainId, key, ohlc) }
console.log('joined room', room) catch (e) {
console.error('subOHLCs', e)
} }
} }

31
route.js Normal file
View File

@@ -0,0 +1,31 @@
import {io} from "./io.js";
import {lookupToken} from "./token.js";
import {requestVault, loginAddress} from "./vault.js";
import {subOHLCs, subPools, unsubOHLCs, unsubPools} from "./pool.js";
import {gib} from "./faucet.js";
import {approveRegion, approveTOS} from "./approval.js";
import {joinChain} from "./chain.js";
import {share, shared} from "./snapshare.js";
// Server route handling
export function initIO() {
io.on("connection", (socket) => {
socket.on('lookupToken', (chainId, address, respond) => {
lookupToken(chainId, address).then((result) => respond(result)).catch(() => respond(null))
})
socket.on('address', (chainId, address) => loginAddress(socket, chainId, address))
socket.on('subPools', (chainId, addresses) => subPools(socket, chainId, addresses))
socket.on('unsubPools', (chainId, addresses) => unsubPools(socket, chainId, addresses))
socket.on('subOHLCs', async (chainId, poolPeriods) => await subOHLCs(socket, chainId, poolPeriods))
socket.on('unsubOHLCs', (chainId, poolPeriods) => unsubOHLCs(socket, chainId, poolPeriods))
socket.on('ensureVault', (chainId, owner, num) => requestVault(socket, chainId, owner, num))
socket.on('gib', async (chainId, owner, vault, tokenAmounts) => await gib(chainId, owner, vault, tokenAmounts))
socket.on('approveTOS', (time, version, callback) => approveTOS(socket, time, version, callback))
socket.on('approveRegion', (bypass) => approveRegion(socket, bypass))
socket.on('chain', async (chainId) => await joinChain(socket, chainId))
socket.on('share', async (data, snapshot, respond) => await share(socket, data, snapshot, respond))
socket.on('shared', async (code, respond) => await shared(socket, code, respond))
socket.join('public')
});
}

114
snapshare.js Normal file
View File

@@ -0,0 +1,114 @@
import {PutObjectCommand, S3Client} from "@aws-sdk/client-s3";
import cors from 'cors'
import crypto from "crypto";
import {sql} from "./db.js";
import {decodeBase62} from "../web/src/common.js";
const APP_URL = process.env.DEXORDER_APP_URL;
const SNAPSHOT_URL = process.env.DEXORDER_SNAPSHOT_URL;
const S3_BUCKET = process.env.DEXORDER_SNAPSHOT_S3_BUCKET_NAME;
const S3_ACCESS_KEY_ID = process.env.DEXORDER_SNAPSHOT_S3_ACCESS_KEY_ID;
const S3_SECRET_ACCESS_KEY = process.env.DEXORDER_SNAPSHOT_S3_SECRET_ACCESS_KEY;
const S3_ENDPOINT = process.env.DEXORDER_SNAPSHOT_S3_ENDPOINT; // e.g., 'https://<ACCOUNT_ID>.r2.cloudflarestorage.com/'
const s3 = new S3Client({
region: "auto",
endpoint: S3_ENDPOINT,
credentials: {
accessKeyId: S3_ACCESS_KEY_ID,
secretAccessKey: S3_SECRET_ACCESS_KEY,
}
});
function imageFilename(code) {
return 'share_' + code + '.png';
}
async function saveSnapshot(code, snapshot) {
await s3.send(new PutObjectCommand({
Bucket: S3_BUCKET,
Key: imageFilename(code),
Body: snapshot,
ContentType: 'image/png',
ACL: 'public-read', // or private, depending on your needs
}))
}
export async function share(socket, data, snapshot, respond) {
try {
const result = await sql('insert into sharedata (data) values ($1) returning id', data)
if (result.rowCount !== 1) {
console.error('insertion of sharedata failed', result)
respond(null)
}
const code = encodeURIComponent(result.rows[0].id)
respond(code);
saveSnapshot(code, snapshot).catch(e => console.error('save snapshot error', e))
}
catch (e) {
console.error('share error', e)
respond(null)
}
}
export async function shared(socket, code, respond) {
try {
const id = decodeBase62(code)
const result = await sql('select data from sharedata where id = $1', id)
if (result.rowCount !== 1) {
console.error('could not find share data', code)
respond(null)
}
const data = result.rows[0].data
console.log('shared data', data)
respond(data)
}
catch (e) {
console.error('shared error', e)
respond(null)
}
}
export function initSnapShare(app) {
// this URL is called by the frontend to upload order data and a snapshot image for use on a share page
app.post('/sharecode', cors({origin: process.env.DEXORDER_APP_URL}),
(req, res) => {
const chunks = [];
req.on('data', chunk => chunks.push(chunk))
req.on('error', (err) => res.status(500).send('Error reading body'))
req.on('end', () => {
const filename = crypto.randomUUID() + '.png';
const body = Buffer.concat(chunks);
s3.send(new PutObjectCommand({
Bucket: S3_BUCKET,
Key: filename,
Body: body,
ContentType: 'image/png',
ACL: 'public-read', // or private, depending on your needs
})).then(sent => {
res.send(filename)
}).catch(err => {
console.log('upload error', err)
res.status(500).send('error')
});
});
});
// this link returns a "share page" that shows the snapshot of the trade setup then redirects
// to the order page with the trade data loaded from the URL
app.get('/share/:code', (req, res) => {
const code = req.params.code;
const data = {
imageUrl: SNAPSHOT_URL + '/' + imageFilename(code),
redirectUrl: APP_URL + '/shared/' + code
};
res.render('share', data);
});
}

View File

@@ -1,16 +1,20 @@
import {ethers} from "ethers"; import {ethers} from "ethers";
import {getProvider, getSigner} from "./blockchain.js";
import {vaultBalances, vaults} from './cache.js'; import {vaultBalances, vaults} from './cache.js';
import {chainInfo} from "./chain.js"; import {chainInfo} from "./chain.js";
import {sendVaultOrders} from "./order.js"; import {sendVaultOrders} from "./order.js";
import {newContract} from "./contract.js"; import {approveWallet} from "./approval.js";
import {sql} from "./db.js";
import {clientIP} from "./misc.js";
import TTLCache from "@isaacs/ttlcache";
export function vaultAddress(chainId, owner, num=0) { export function vaultAddress(chainId, owner, num=0) {
if (!chainInfo[chainId].factory)
return null
try { try {
const salt = ethers.solidityPackedKeccak256(['address','uint8'],[owner,num]) const salt = ethers.solidityPackedKeccak256(['address','uint8'],[owner,num])
const result = ethers.getCreate2Address(chainInfo[chainId].factory, salt, chainInfo[chainId].vaultInitCodeHash) const result = ethers.getCreate2Address(chainInfo[chainId].factory, salt, chainInfo[chainId].vaultInitCodeHash)
// console.log('vaultAddress', chainId, owner, num, chainInfo[chainId].factory, salt, VAULT_INIT_CODE_HASH, result) // console.log('vaultAddress', chainId, owner, num, chainInfo[chainId].factory, chainInfo[chainId].vaultInitCodeHash, result)
return result return result
} }
catch (e) { catch (e) {
@@ -45,76 +49,33 @@ async function sendVaultInfo(socket, chainId, owner) {
} }
export async function loginAddress(socket, chainId, address) { export async function loginAddress(socket, chainId, address) {
if( socket.user_room !== undefined) if( socket.user_room !== undefined) {
socket.leave(socket.user_room) socket.leave(socket.user_room)
console.log('left user room', socket.user_room)
}
if( address ) { if( address ) {
socket.user_room = `${chainId}|${address}` const approved = approveWallet(address)
socket.join(socket.user_room) socket.emit('approvedWallet', approved)
console.log('joined user room', socket.user_room) if( approved ) {
await sendVaultInfo(socket, chainId, address); socket.user_room = `${chainId}|${address}`
socket.join(socket.user_room)
console.log('joined user room', socket.user_room)
await sendVaultInfo(socket, chainId, address)
}
} }
} }
const ensuring = {} const requestVaultCooldown = new TTLCache({ttl:5 * 1000, checkAgeOnGet: true})
export async function ensureVault(socket, chainId, owner, num) { export async function requestVault(socket, chainId, owner, num) {
const key = [chainId, owner, num] const key = [chainId, owner, num].join('|')
if( key in ensuring ) { if (requestVaultCooldown.has(key))
console.log('squelching ensureVault since one is in-progress')
return return
} requestVaultCooldown.set(key, true)
ensuring[key] = true const ipAddress = clientIP(socket)
console.log('ensureVault', chainId, owner, num) const time = new Date().toISOString();
if( chainId in chainInfo ) { const query = `insert into vaultcreationrequest (chain, owner, num, time, ipaddr) values (${chainId}, '${owner}', ${num}, '${time}', '${ipAddress}') ON CONFLICT DO NOTHING`;
const address = vaultAddress(chainId, owner, num) console.log('query:', query)
console.log('vault addr', address) await sql(query)
if (!await vaults.contains(chainId,address)) {
try {
const vault = await createVault(chainId, owner, num)
if (vault !== null) {
console.log('created vault', vault)
socket.emit('vaults', chainId, owner, [vault])
await emitBalances(socket, chainId, vault)
}
else
console.error('got null vault for chainId', chainId)
} catch {
}
}
else
console.log('ensureVault', owner, 'exists:', address)
}
await sendVaultInfo(socket, chainId, owner)
delete ensuring[key]
} }
async function createVault(chainId, owner, num) {
const signer = getSigner(chainId);
const factory = chainInfo[chainId].factory;
console.log('createVault', chainId, owner, num, factory, chainInfo[chainId].vaultInitCodeHash )
const deployer = await newContract(factory, 'IVaultFactory', signer)
const vaultAddr = vaultAddress(chainId, owner, num)
console.log(' ==> vault addr', vaultAddr )
try {
const tx = await deployer['deployVault(address,uint8)'](owner, num) // must specify which deployVault() to call
console.log(`deploying vault for ${owner} #${num} with tx ${tx.hash}`)
const result = await tx.wait()
if (result.status !== 1) {
// noinspection ExceptionCaughtLocallyJS
throw Error(`Vault deployment reverted. tx ${tx.hash}`)
}
}
catch (e) {
const vault = await newContract(vaultAddr, 'IVault', getProvider(chainId))
try {
const ver = await vault.version()
console.log(`vault already deployed at ${vaultAddr} with version ${ver}`)
}
catch (e2) {
console.error('could not deploy vault:', e)
return null
}
}
return vaultAddr
}

19
views/share.handlebars Normal file
View File

@@ -0,0 +1,19 @@
<html lang="en/US">
<head>
<title>Dexorder Trade Setup</title>
<meta property="og:title" content="Dexorder Trade Setup">
<meta property="og:description" content="Trade this setup on Dexorder">
<meta property="og:image" content="{{{imageUrl}}}">
<meta property="og:url" content="https://app.dexorder.com/">
<meta property="og:type" content="website">
<meta name="twitter:card" content="summary_large_image">
<meta name="twitter:title" content="Dexorder Trade Setup">
<meta name="twitter:description" content="Trade this setup on Dexorder">
<meta name="twitter:image" content="{{{imageUrl}}}">
</head>
<body>
<script language="javascript">window.location='{{{redirectUrl}}}'</script>
<img src="{{imageUrl}}" alt="Chart" width="100%"/>
</body>
</html>

2252
yarn.lock

File diff suppressed because it is too large Load Diff