server works with backend via redis, serves chainInfo, more

This commit is contained in:
Tim Olson
2023-10-04 03:42:21 -04:00
parent e36f7812eb
commit c05c9b9c16
12 changed files with 404 additions and 209 deletions

194
.gitignore vendored Normal file
View File

@@ -0,0 +1,194 @@
/.env
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### Node template
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

28
abi.js
View File

@@ -19,30 +19,7 @@ export const erc20Abi = [
]
const TimedOrderSpec = '(' +
'address tokenIn,' +
'address tokenOut,' +
'uint24 fee,' +
'uint32 deadline,' +
'uint32 leeway,' +
'uint160 minSqrtPriceX96,' +
'uint160 maxSqrtPriceX96,' +
'uint8 numTranches,' +
'uint256 amount,' +
'bool amountIsInput' +
')'
export const timedOrderAbi = [
'event TimedOrderCreated (address owner, uint64 index, Spec spec)',
'event TimedOrderFilled (address owner, uint64 index, uint256 amountIn, uint256 amountOut)',
'event TimedOrderCompleted (address owner, uint64 index)',
'event TimedOrderError (address owner, uint64 index, string reason)',
`timedOrder(${TimedOrderSpec}) returns (uint64 index)`,
]
const vaultDeployerAbi = [
const factoryAbi = [
'function deployVault(address owner) returns (address vault)',
'event VaultCreated( address deployer, address owner )',
]
@@ -50,8 +27,7 @@ const vaultDeployerAbi = [
export const abi = {
'ERC20': erc20Abi,
'TimedOrder': timedOrderAbi,
'VaultDeployer': vaultDeployerAbi,
'Factory': factoryAbi,
}

View File

@@ -1,23 +1,13 @@
import {ethers} from "ethers";
export const chains = {}
const _chainInfo = [
{id:42161, name:'Arbitrum'},
]
for( const chain of _chainInfo )
chains[chain.id] = chain
const providers = {} // indexed by chain id
export function getProvider(chainId) {
let result = providers[chainId]
if( result === undefined ) {
let rpc_url = process.env['DEXORDER_RPC_URL_'+chainId]
if( rpc_url === undefined ) {
console.error(`WARNING: No provider found for chainId ${chainId}. Using localhost.`)
rpc_url = 'http://localhost:8545'
}
if( rpc_url === undefined )
throw Error(`WARNING: No provider found for chainId ${chainId}. Using localhost.`)
result = rpc_url.startsWith('ws') ?
new ethers.WebSocketProvider(rpc_url, chainId) :
new ethers.JsonRpcProvider(rpc_url, chainId)

View File

@@ -1,19 +1,53 @@
// implement a cluster-wide cache which is in-memory for each instance
import {createClient} from "redis";
export class Cache {
export const redis = createClient({url: process.env.DEXORDER_REDIS_URL || 'redis://localhost:6379'})
constructor(name) {
this.name = name
this.cache = {}
redis.on('error', (err) => console.log('Redis Client Error', err));
await redis.connect();
export class CacheSet {
constructor(series) {
this.series = series
}
async get(key) {
return this.cache[key]
async contains(chain, key) {
return await redis.sIsMember(`${chain}|${this.series}`, key)
}
async set(key, value) {
this.cache[key] = value
// todo broadcast
}
export class CacheDict {
constructor(series) {
this.series = series
}
async get(chain, key) {
return await redis.hGet(`${chain}|${this.series}`, key)
}
}
export class CacheObject {
constructor(series) {
this.series = series
}
async get(chain) {
return await redis.json.get(`${chain}|${this.series}`)
}
}
const cache_blocks = {
'1338': new CacheObject('1338|latest_block'),
}
async function latestBlock(chain) {
return await cache_blocks[chain].get()
}
export const vaults = new CacheSet('v')
export const vaultTokens = new CacheDict('vt')
export const prices = new CacheDict('p')

9
io.js
View File

@@ -1,8 +1,15 @@
import {createServer} from "http";
import {Server} from "socket.io";
import { createAdapter } from "@socket.io/redis-adapter";
import {redis} from "./cache.js";
const options = {}
const options = {
}
if( process.env.DEXORDER_CORS )
options['cors'] = {origin:process.env.DEXORDER_CORS}
export const httpServer = createServer()
export const io = new Server(httpServer, options)
const pubClient = redis.duplicate();
await pubClient.connect()
const adapter = createAdapter(pubClient, redis, {/*key:'public|'*/})
io.adapter(adapter)

27
main.js
View File

@@ -1,22 +1,9 @@
import 'dotenv/config'
import {lookupToken} from "./token.js";
import {startWatcher} from "./watcher.js";
import {chains} from "./blockchain.js";
import {watchErc20Transfer, watchVaultCreated} from "./vault.js";
import {httpServer, io} from "./io.js";
// setup watcher
const filterCallbacks = [
// format is [[className, eventName, ...eventArgs], callback(provider, database, logInfo)]
[['VaultDeployer','VaultCreated', null, null], watchVaultCreated],
[['ERC20', 'Transfer'], watchErc20Transfer],
]
for( const chain of Object.values(chains) )
await startWatcher( chain.id, 1000, filterCallbacks )
import {ensureVault, loginAddress} from "./vault.js";
import {chainInfo} from "./misc.js";
// setup socket.io
@@ -27,10 +14,16 @@ io.on("connection", (socket) => {
lookupToken(chainId, address).then((result)=>callback(result)).catch(()=>callback(null))
})
socket.on('address', (chainId, address) => loginAddress(socket, chainId, address) )
// todo send known tokens or other initial config
socket.emit('welcome', {})
socket.on('ensureVault', ensureVault )
socket.emit('chainInfo', chainInfo)
socket.join('public')
io.to('room').emit('event', {foo:'bar'})
});
io.on("disconnection", (socket)=>{
socket.leave('public')
})
const port = parseInt(process.env.DEXORDER_PORT) || 3000;
httpServer.listen(port)
console.log('Started server on port '+port)

41
misc.js
View File

@@ -1,6 +1,45 @@
import fs from "fs";
import util from "util";
import {keccak256} from "ethers";
export const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8');
export const ALL_CHAINS = [42161,]
const vaultCode = JSON.parse(fs.readFileSync('./contract/out/Vault.sol/Vault.json').toString())
export const VAULT_INIT_CODE_HASH = keccak256(vaultCode.bytecode.object)
export const chainInfo = {}
const _chains = [
{id:42161, name:'Arbitrum'},
{id:1338, name:'Mock'},
]
function _setChainInfo(c, k, v) {
c.update(v)
chainInfo[c][k] = v
}
for (const chain of _chains) {
const path = `contract/broadcast/Deploy.sol/${chain.id}/run-${process.env['DEXORDER_DEPLOYMENT_' + chain.id]}.json`;
let deployment
try {
deployment = JSON.parse(fs.readFileSync(path, 'utf8')) //null synchronous is ok we only do this once on init
}
catch {
continue
}
for (const tx of deployment.transactions) {
if (tx.contractName === 'Factory')
chain.factory = tx.contractAddress
else if (tx.contractName === 'QueryHelper')
chain.helper = tx.contractAddress
else if (tx.contractName === 'MockEnv') {
// todo set up mock coins, etc
}
}
if (chain.factory === undefined)
throw Error(`No Factory deployment found for chainId ${chain.id} ${path}`)
if (chain.helper === undefined)
throw Error(`No QueryHelper deployment found for chainId ${chain.id} ${path}`)
chainInfo[chain.id] = chain
}

View File

@@ -11,9 +11,11 @@
"author": "",
"license": "unlicensed",
"dependencies": {
"@socket.io/redis-adapter": "^8.2.1",
"dotenv": "^16.3.1",
"ethers": "^6.7.1",
"pg": "^8.11.3",
"redis": "^4.6.10",
"socket.io": "^4.7.2"
}
}

View File

@@ -6,8 +6,10 @@ import {ethers} from "ethers";
import {erc20Abi} from "./abi.js";
import {getProvider} from "./blockchain.js";
const std_arbitrum_tokens = {}
const tokens = {
42161: {}
42161: std_arbitrum_tokens,
1338: std_arbitrum_tokens,
}
export async function lookupToken(chainId, address) {

View File

@@ -1,62 +1,43 @@
import {ethers, keccak256} from "ethers";
import {getAbi} from "./abi.js";
import {getSigner} from "./blockchain.js";
import {Cache} from './cache.js';
import vaultCode from 'contract/out/Vault.sol/Vault.json'
import assert from "assert";
import fs from 'fs';
import {ALL_CHAINS} from "./misc.js";
import {vaults} from './cache.js';
const VAULT_INIT_CODE_HASH = keccak256(vaultCode.bytecode.object)
console.log(VAULT_INIT_CODE_HASH)
const deployerAddresses = {}
for (const chainId of ALL_CHAINS) {
const path = `contract/broadcast/Deploy.sol/${chainId}/run-${process.env['DEXORDER_DEPLOYMENT_' + chainId]}.json`;
const deployment = JSON.parse(fs.readFileSync(path, 'utf8')) // synchronous is ok we only do this once on init
assert(deployment.transactions[0].contractName === 'VaultDeployer')
deployerAddresses[chainId] = deployment.transactions[0].contractAddress
}
const vaults = new Cache('vaults') // vault:owner
// Vault
// address owner
// balances { tokenAddress: amount }
// recentOrders []
const deployerAbi = await getAbi('VaultDeployer');
const deployerAbi = await getAbi('Factory');
function newVault(address, owner) {
return {address, owner, tokens: {}} // internal json version
}
export function vaultAddress(chainId, ownerAddress) {
const encoded = ethers.AbiCoder.defaultAbiCoder().encode(['address'], [ownerAddress])
export function vaultAddress(chainId, ownerAddress, num=0) {
const encoded = ethers.AbiCoder.defaultAbiCoder().encode(['address','uint8'], [ownerAddress,num])
const salt = ethers.keccak256(encoded)
return ethers.getCreate2Address(getDeployerAddress(chainId), salt, VAULT_INIT_CODE_HASH)
}
export function loginAddress(socket, chainId, address) {
// todo check for existing vault
if (!vaults[address]) {
//
} else {
export async function loginAddress(socket, chainId, address) {
// todo send known tokens
if (await vaults.contains(chainId,address)) {
// todo send welcome with basic info and extra tokens
socket.send('welcome',{})
}
}
export async function ensureVault(socket, chainId, owner) {
const address = vaultAddress(chainId, owner)
if (!vaults[address]) {
const deployer = new ethers.Contract(DEPLOYER_ADDRESS, deployerAbi, getSigner(chainId))
await deployer.deployVault(owner)
}
export async function ensureVault(chainId, owner, num) {
const address = vaultAddress(chainId, owner, num)
if (!await vaults.contains(chainId,address))
await createVault(chainId, owner, num)
}
async function createVault(chainId, owner, num) {
// todo create vault request for backend to pick up
const deployer = new ethers.Contract(factoryAddresses[chainId], deployerAbi, getSigner(chainId))
await deployer.deployVault(owner, num)
}

View File

@@ -1,103 +0,0 @@
import {getProvider} from "./blockchain.js";
import {pool} from "./db.js";
import {getInterface} from "./abi.js";
const BATCH_SIZE = 100 // the most blocks allowed per batch
// watcher collects all events from the blockchain and then matches them against our local filters, invoking any
// registered callbacks. we do the filtering/switching locally to prevent invoking a separate api call for
// each type of event
async function processLogs(chainId, topicEventCallbacks) {
// console.log('processLogs')
const provider = getProvider(chainId)
const block = await provider.getBlockNumber()
const db = await pool.connect()
try {
let fromBlock, toBlock
const result = await db.query('select block from progress where chain=$1', [chainId])
if (result.rowCount === 0) {
console.log('initializing chain', chainId)
fromBlock = block
db.query('insert into progress values ($1,$2)', [chainId, block - 1])
} else if (result.rowCount === 1) {
fromBlock = result.rows[0].block + 1
} else
throw Error(`Found ${result.rowCount} rows for progress table chain ${chainId}`)
if (fromBlock > block)
return
try {
do {
toBlock = Math.min(block, fromBlock + BATCH_SIZE - 1) // toBlock is inclusive
await db.query('BEGIN')
const logs = await provider.getLogs({fromBlock, toBlock})
// console.log(`logs for block range [${fromBlock},${toBlock}]`, logs)
const promises = []
for (const log of logs) {
for (const topic of log.topics) {
const cb = topicEventCallbacks[topic]
if (cb !== undefined)
promises.push(cb(provider, db, log))
}
}
await Promise.all(promises)
db.query('update progress set block=$1 where chain=$2', [toBlock, chainId])
db.query('COMMIT')
fromBlock = toBlock + 1
} while (toBlock > block)
} catch (e) {
await db.query('ROLLBACK')
throw e
}
}
finally {
db.release()
}
}
export async function startWatcher(chainId, period, filterCallbacks) {
const topicEventCallbacks = {} // topic: callback(log)
for (const [[className, eventName, ...args], callback] of filterCallbacks) {
const interf = await getInterface(className)
const event = interf.getEvent(eventName)
const topics = interf.encodeFilterTopics(event, args)
for( const topic of topics ) {
topicEventCallbacks[topic] = async (provider, db, log) => {
let info
try {
// info = interf.decodeEventLog(event, log.data, log.topics)
info = interf.parseLog(log)
}
catch (e) {
// console.error(`could not decode log for topic ${topic}`, log)
return
}
if( info !== null ) {
info.address = log.address
await callback(provider, db, info)
}
}
}
}
console.log('registered topics', Object.keys(topicEventCallbacks))
setInterval(async () => {
try {
await processLogs(chainId, topicEventCallbacks)
} catch (e) {
console.error('error during processLogs',e)
}
}, period)
}
// test
// const erc20 = new ethers.Interface(erc20Abi);
// const filter = erc20.encodeFilterTopics('Transfer', [null, null])
// async function handle(provider, db, events) {
// console.log(`got ${events.length} Transfers`)
// }
// startWatcher(42161, 1000, [[filter, handle]])

View File

@@ -17,11 +17,54 @@
resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.7.1.tgz#b251c70f824ce3ca7f8dc3df08d58f005cc0507c"
integrity sha512-hOUk6AyBFmqVrv7k5WAw/LpszxVbj9gGN4JRkIX52fdFAj1UA61KXmZDvqVEm+pOyec3+fIeZB02LYa/pWOArw==
"@redis/bloom@1.2.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@redis/bloom/-/bloom-1.2.0.tgz#d3fd6d3c0af3ef92f26767b56414a370c7b63b71"
integrity sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==
"@redis/client@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@redis/client/-/client-1.5.11.tgz#5ee8620fea56c67cb427228c35d8403518efe622"
integrity sha512-cV7yHcOAtNQ5x/yQl7Yw1xf53kO0FNDTdDU6bFIMbW6ljB7U7ns0YRM+QIkpoqTAt6zK5k9Fq0QWlUbLcq9AvA==
dependencies:
cluster-key-slot "1.1.2"
generic-pool "3.9.0"
yallist "4.0.0"
"@redis/graph@1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@redis/graph/-/graph-1.1.0.tgz#cc2b82e5141a29ada2cce7d267a6b74baa6dd519"
integrity sha512-16yZWngxyXPd+MJxeSr0dqh2AIOi8j9yXKcKCwVaKDbH3HTuETpDVPcLujhFYVPtYrngSco31BUcSa9TH31Gqg==
"@redis/json@1.0.6":
version "1.0.6"
resolved "https://registry.yarnpkg.com/@redis/json/-/json-1.0.6.tgz#b7a7725bbb907765d84c99d55eac3fcf772e180e"
integrity sha512-rcZO3bfQbm2zPRpqo82XbW8zg4G/w4W3tI7X8Mqleq9goQjAGLL7q/1n1ZX4dXEAmORVZ4s1+uKLaUOg7LrUhw==
"@redis/search@1.1.5":
version "1.1.5"
resolved "https://registry.yarnpkg.com/@redis/search/-/search-1.1.5.tgz#682b68114049ff28fdf2d82c580044dfb74199fe"
integrity sha512-hPP8w7GfGsbtYEJdn4n7nXa6xt6hVZnnDktKW4ArMaFQ/m/aR7eFvsLQmG/mn1Upq99btPJk+F27IQ2dYpCoUg==
"@redis/time-series@1.0.5":
version "1.0.5"
resolved "https://registry.yarnpkg.com/@redis/time-series/-/time-series-1.0.5.tgz#a6d70ef7a0e71e083ea09b967df0a0ed742bc6ad"
integrity sha512-IFjIgTusQym2B5IZJG3XKr5llka7ey84fw/NOYqESP5WUfQs9zz1ww/9+qoz4ka/S6KcGBodzlCeZ5UImKbscg==
"@socket.io/component-emitter@~3.1.0":
version "3.1.0"
resolved "https://registry.yarnpkg.com/@socket.io/component-emitter/-/component-emitter-3.1.0.tgz#96116f2a912e0c02817345b3c10751069920d553"
integrity sha512-+9jVqKhRSpsc591z5vX+X5Yyw+he/HCB4iQ/RYxw35CEPaY1gnsNE43nf9n9AaYjAQrTiI/mOwKUKdUs9vf7Xg==
"@socket.io/redis-adapter@^8.2.1":
version "8.2.1"
resolved "https://registry.yarnpkg.com/@socket.io/redis-adapter/-/redis-adapter-8.2.1.tgz#36f75afc518d0e1fa4fa7c29e6d042f53ee7563b"
integrity sha512-6Dt7EZgGSBP0qvXeOKGx7NnSr2tPMbVDfDyL97zerZo+v69hMfL99skMCL3RKZlWVqLyRme2T0wcy3udHhtOsg==
dependencies:
debug "~4.3.1"
notepack.io "~3.0.1"
uid2 "1.0.0"
"@types/cookie@^0.4.1":
version "0.4.1"
resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.4.1.tgz#bfd02c1f2224567676c1545199f87c3a861d878d"
@@ -67,6 +110,11 @@ buffer-writer@2.0.0:
resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04"
integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==
cluster-key-slot@1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz#88ddaa46906e303b5de30d3153b7d9fe0a0c19ac"
integrity sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==
cookie@~0.4.1:
version "0.4.2"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432"
@@ -126,6 +174,11 @@ ethers@^6.7.1:
tslib "2.4.0"
ws "8.5.0"
generic-pool@3.9.0:
version "3.9.0"
resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.9.0.tgz#36f4a678e963f4fdb8707eab050823abc4e8f5e4"
integrity sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==
mime-db@1.52.0:
version "1.52.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
@@ -148,6 +201,11 @@ negotiator@0.6.3:
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
notepack.io@~3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/notepack.io/-/notepack.io-3.0.1.tgz#2c2c9de1bd4e64a79d34e33c413081302a0d4019"
integrity sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg==
object-assign@^4:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
@@ -238,6 +296,18 @@ postgres-interval@^1.1.0:
dependencies:
xtend "^4.0.0"
redis@^4.6.10:
version "4.6.10"
resolved "https://registry.yarnpkg.com/redis/-/redis-4.6.10.tgz#07f6ea2b2c5455b098e76d1e8c9b3376114e9458"
integrity sha512-mmbyhuKgDiJ5TWUhiKhBssz+mjsuSI/lSZNPI9QvZOYzWvYGejtb+W3RlDDf8LD6Bdl5/mZeG8O1feUGhXTxEg==
dependencies:
"@redis/bloom" "1.2.0"
"@redis/client" "1.5.11"
"@redis/graph" "1.1.0"
"@redis/json" "1.0.6"
"@redis/search" "1.1.5"
"@redis/time-series" "1.0.5"
socket.io-adapter@~2.5.2:
version "2.5.2"
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-2.5.2.tgz#5de9477c9182fdc171cd8c8364b9a8894ec75d12"
@@ -276,6 +346,11 @@ tslib@2.4.0:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"
integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==
uid2@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/uid2/-/uid2-1.0.0.tgz#ef8d95a128d7c5c44defa1a3d052eecc17a06bfb"
integrity sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ==
vary@^1:
version "1.1.2"
resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
@@ -295,3 +370,8 @@ xtend@^4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
yallist@4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==