dexorder
This commit is contained in:
149
lib_openzeppelin_contracts/test/utils/structs/BitMap.test.js
Normal file
149
lib_openzeppelin_contracts/test/utils/structs/BitMap.test.js
Normal file
@@ -0,0 +1,149 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { expect } = require('chai');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
|
||||
async function fixture() {
|
||||
const bitmap = await ethers.deployContract('$BitMaps');
|
||||
return { bitmap };
|
||||
}
|
||||
|
||||
describe('BitMap', function () {
|
||||
const keyA = 7891n;
|
||||
const keyB = 451n;
|
||||
const keyC = 9592328n;
|
||||
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
it('starts empty', async function () {
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.false;
|
||||
});
|
||||
|
||||
describe('setTo', function () {
|
||||
it('set a key to true', async function () {
|
||||
await this.bitmap.$setTo(0, keyA, true);
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.false;
|
||||
});
|
||||
|
||||
it('set a key to false', async function () {
|
||||
await this.bitmap.$setTo(0, keyA, true);
|
||||
await this.bitmap.$setTo(0, keyA, false);
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.false;
|
||||
});
|
||||
|
||||
it('set several consecutive keys', async function () {
|
||||
await this.bitmap.$setTo(0, keyA + 0n, true);
|
||||
await this.bitmap.$setTo(0, keyA + 1n, true);
|
||||
await this.bitmap.$setTo(0, keyA + 2n, true);
|
||||
await this.bitmap.$setTo(0, keyA + 3n, true);
|
||||
await this.bitmap.$setTo(0, keyA + 4n, true);
|
||||
await this.bitmap.$setTo(0, keyA + 2n, false);
|
||||
await this.bitmap.$setTo(0, keyA + 4n, false);
|
||||
expect(await this.bitmap.$get(0, keyA + 0n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 1n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 2n)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyA + 3n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 4n)).to.be.false;
|
||||
});
|
||||
});
|
||||
|
||||
describe('set', function () {
|
||||
it('adds a key', async function () {
|
||||
await this.bitmap.$set(0, keyA);
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.false;
|
||||
});
|
||||
|
||||
it('adds several keys', async function () {
|
||||
await this.bitmap.$set(0, keyA);
|
||||
await this.bitmap.$set(0, keyB);
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.false;
|
||||
});
|
||||
|
||||
it('adds several consecutive keys', async function () {
|
||||
await this.bitmap.$set(0, keyA + 0n);
|
||||
await this.bitmap.$set(0, keyA + 1n);
|
||||
await this.bitmap.$set(0, keyA + 3n);
|
||||
expect(await this.bitmap.$get(0, keyA + 0n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 1n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 2n)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyA + 3n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 4n)).to.be.false;
|
||||
});
|
||||
});
|
||||
|
||||
describe('unset', function () {
|
||||
it('removes added keys', async function () {
|
||||
await this.bitmap.$set(0, keyA);
|
||||
await this.bitmap.$set(0, keyB);
|
||||
await this.bitmap.$unset(0, keyA);
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.false;
|
||||
});
|
||||
|
||||
it('removes consecutive added keys', async function () {
|
||||
await this.bitmap.$set(0, keyA + 0n);
|
||||
await this.bitmap.$set(0, keyA + 1n);
|
||||
await this.bitmap.$set(0, keyA + 3n);
|
||||
await this.bitmap.$unset(0, keyA + 1n);
|
||||
expect(await this.bitmap.$get(0, keyA + 0n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 1n)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyA + 2n)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyA + 3n)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyA + 4n)).to.be.false;
|
||||
});
|
||||
|
||||
it('adds and removes multiple keys', async function () {
|
||||
// []
|
||||
|
||||
await this.bitmap.$set(0, keyA);
|
||||
await this.bitmap.$set(0, keyC);
|
||||
|
||||
// [A, C]
|
||||
|
||||
await this.bitmap.$unset(0, keyA);
|
||||
await this.bitmap.$unset(0, keyB);
|
||||
|
||||
// [C]
|
||||
|
||||
await this.bitmap.$set(0, keyB);
|
||||
|
||||
// [C, B]
|
||||
|
||||
await this.bitmap.$set(0, keyA);
|
||||
await this.bitmap.$unset(0, keyC);
|
||||
|
||||
// [A, B]
|
||||
|
||||
await this.bitmap.$set(0, keyA);
|
||||
await this.bitmap.$set(0, keyB);
|
||||
|
||||
// [A, B]
|
||||
|
||||
await this.bitmap.$set(0, keyC);
|
||||
await this.bitmap.$unset(0, keyA);
|
||||
|
||||
// [B, C]
|
||||
|
||||
await this.bitmap.$set(0, keyA);
|
||||
await this.bitmap.$unset(0, keyB);
|
||||
|
||||
// [A, C]
|
||||
|
||||
expect(await this.bitmap.$get(0, keyA)).to.be.true;
|
||||
expect(await this.bitmap.$get(0, keyB)).to.be.false;
|
||||
expect(await this.bitmap.$get(0, keyC)).to.be.true;
|
||||
});
|
||||
});
|
||||
});
|
||||
332
lib_openzeppelin_contracts/test/utils/structs/Checkpoints.t.sol
Normal file
332
lib_openzeppelin_contracts/test/utils/structs/Checkpoints.t.sol
Normal file
@@ -0,0 +1,332 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
// This file was procedurally generated from scripts/generate/templates/Checkpoints.t.js.
|
||||
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {Test} from "@forge-std/Test.sol";
|
||||
import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol";
|
||||
import {Checkpoints} from "@openzeppelin/contracts/utils/structs/Checkpoints.sol";
|
||||
|
||||
contract CheckpointsTrace224Test is Test {
|
||||
using Checkpoints for Checkpoints.Trace224;
|
||||
|
||||
// Maximum gap between keys used during the fuzzing tests: the `_prepareKeys` function with make sure that
|
||||
// key#n+1 is in the [key#n, key#n + _KEY_MAX_GAP] range.
|
||||
uint8 internal constant _KEY_MAX_GAP = 64;
|
||||
|
||||
Checkpoints.Trace224 internal _ckpts;
|
||||
|
||||
// helpers
|
||||
function _boundUint32(uint32 x, uint32 min, uint32 max) internal pure returns (uint32) {
|
||||
return SafeCast.toUint32(bound(uint256(x), uint256(min), uint256(max)));
|
||||
}
|
||||
|
||||
function _prepareKeys(uint32[] memory keys, uint32 maxSpread) internal pure {
|
||||
uint32 lastKey = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint32 key = _boundUint32(keys[i], lastKey, lastKey + maxSpread);
|
||||
keys[i] = key;
|
||||
lastKey = key;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertLatestCheckpoint(bool exist, uint32 key, uint224 value) internal {
|
||||
(bool _exist, uint32 _key, uint224 _value) = _ckpts.latestCheckpoint();
|
||||
assertEq(_exist, exist);
|
||||
assertEq(_key, key);
|
||||
assertEq(_value, value);
|
||||
}
|
||||
|
||||
// tests
|
||||
function testPush(uint32[] memory keys, uint224[] memory values, uint32 pastKey) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
// initial state
|
||||
assertEq(_ckpts.length(), 0);
|
||||
assertEq(_ckpts.latest(), 0);
|
||||
_assertLatestCheckpoint(false, 0, 0);
|
||||
|
||||
uint256 duplicates = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint32 key = keys[i];
|
||||
uint224 value = values[i % values.length];
|
||||
if (i > 0 && key == keys[i - 1]) ++duplicates;
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// check length & latest
|
||||
assertEq(_ckpts.length(), i + 1 - duplicates);
|
||||
assertEq(_ckpts.latest(), value);
|
||||
_assertLatestCheckpoint(true, key, value);
|
||||
}
|
||||
|
||||
if (keys.length > 0) {
|
||||
uint32 lastKey = keys[keys.length - 1];
|
||||
if (lastKey > 0) {
|
||||
pastKey = _boundUint32(pastKey, 0, lastKey - 1);
|
||||
|
||||
vm.expectRevert();
|
||||
this.push(pastKey, values[keys.length % values.length]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used to test reverts
|
||||
function push(uint32 key, uint224 value) external {
|
||||
_ckpts.push(key, value);
|
||||
}
|
||||
|
||||
function testLookup(uint32[] memory keys, uint224[] memory values, uint32 lookup) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
uint32 lastKey = keys.length == 0 ? 0 : keys[keys.length - 1];
|
||||
lookup = _boundUint32(lookup, 0, lastKey + _KEY_MAX_GAP);
|
||||
|
||||
uint224 upper = 0;
|
||||
uint224 lower = 0;
|
||||
uint32 lowerKey = type(uint32).max;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint32 key = keys[i];
|
||||
uint224 value = values[i % values.length];
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// track expected result of lookups
|
||||
if (key <= lookup) {
|
||||
upper = value;
|
||||
}
|
||||
// find the first key that is not smaller than the lookup key
|
||||
if (key >= lookup && (i == 0 || keys[i - 1] < lookup)) {
|
||||
lowerKey = key;
|
||||
}
|
||||
if (key == lowerKey) {
|
||||
lower = value;
|
||||
}
|
||||
}
|
||||
|
||||
// check lookup
|
||||
assertEq(_ckpts.lowerLookup(lookup), lower);
|
||||
assertEq(_ckpts.upperLookup(lookup), upper);
|
||||
assertEq(_ckpts.upperLookupRecent(lookup), upper);
|
||||
}
|
||||
}
|
||||
|
||||
contract CheckpointsTrace208Test is Test {
|
||||
using Checkpoints for Checkpoints.Trace208;
|
||||
|
||||
// Maximum gap between keys used during the fuzzing tests: the `_prepareKeys` function with make sure that
|
||||
// key#n+1 is in the [key#n, key#n + _KEY_MAX_GAP] range.
|
||||
uint8 internal constant _KEY_MAX_GAP = 64;
|
||||
|
||||
Checkpoints.Trace208 internal _ckpts;
|
||||
|
||||
// helpers
|
||||
function _boundUint48(uint48 x, uint48 min, uint48 max) internal pure returns (uint48) {
|
||||
return SafeCast.toUint48(bound(uint256(x), uint256(min), uint256(max)));
|
||||
}
|
||||
|
||||
function _prepareKeys(uint48[] memory keys, uint48 maxSpread) internal pure {
|
||||
uint48 lastKey = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint48 key = _boundUint48(keys[i], lastKey, lastKey + maxSpread);
|
||||
keys[i] = key;
|
||||
lastKey = key;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertLatestCheckpoint(bool exist, uint48 key, uint208 value) internal {
|
||||
(bool _exist, uint48 _key, uint208 _value) = _ckpts.latestCheckpoint();
|
||||
assertEq(_exist, exist);
|
||||
assertEq(_key, key);
|
||||
assertEq(_value, value);
|
||||
}
|
||||
|
||||
// tests
|
||||
function testPush(uint48[] memory keys, uint208[] memory values, uint48 pastKey) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
// initial state
|
||||
assertEq(_ckpts.length(), 0);
|
||||
assertEq(_ckpts.latest(), 0);
|
||||
_assertLatestCheckpoint(false, 0, 0);
|
||||
|
||||
uint256 duplicates = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint48 key = keys[i];
|
||||
uint208 value = values[i % values.length];
|
||||
if (i > 0 && key == keys[i - 1]) ++duplicates;
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// check length & latest
|
||||
assertEq(_ckpts.length(), i + 1 - duplicates);
|
||||
assertEq(_ckpts.latest(), value);
|
||||
_assertLatestCheckpoint(true, key, value);
|
||||
}
|
||||
|
||||
if (keys.length > 0) {
|
||||
uint48 lastKey = keys[keys.length - 1];
|
||||
if (lastKey > 0) {
|
||||
pastKey = _boundUint48(pastKey, 0, lastKey - 1);
|
||||
|
||||
vm.expectRevert();
|
||||
this.push(pastKey, values[keys.length % values.length]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used to test reverts
|
||||
function push(uint48 key, uint208 value) external {
|
||||
_ckpts.push(key, value);
|
||||
}
|
||||
|
||||
function testLookup(uint48[] memory keys, uint208[] memory values, uint48 lookup) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
uint48 lastKey = keys.length == 0 ? 0 : keys[keys.length - 1];
|
||||
lookup = _boundUint48(lookup, 0, lastKey + _KEY_MAX_GAP);
|
||||
|
||||
uint208 upper = 0;
|
||||
uint208 lower = 0;
|
||||
uint48 lowerKey = type(uint48).max;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint48 key = keys[i];
|
||||
uint208 value = values[i % values.length];
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// track expected result of lookups
|
||||
if (key <= lookup) {
|
||||
upper = value;
|
||||
}
|
||||
// find the first key that is not smaller than the lookup key
|
||||
if (key >= lookup && (i == 0 || keys[i - 1] < lookup)) {
|
||||
lowerKey = key;
|
||||
}
|
||||
if (key == lowerKey) {
|
||||
lower = value;
|
||||
}
|
||||
}
|
||||
|
||||
// check lookup
|
||||
assertEq(_ckpts.lowerLookup(lookup), lower);
|
||||
assertEq(_ckpts.upperLookup(lookup), upper);
|
||||
assertEq(_ckpts.upperLookupRecent(lookup), upper);
|
||||
}
|
||||
}
|
||||
|
||||
contract CheckpointsTrace160Test is Test {
|
||||
using Checkpoints for Checkpoints.Trace160;
|
||||
|
||||
// Maximum gap between keys used during the fuzzing tests: the `_prepareKeys` function with make sure that
|
||||
// key#n+1 is in the [key#n, key#n + _KEY_MAX_GAP] range.
|
||||
uint8 internal constant _KEY_MAX_GAP = 64;
|
||||
|
||||
Checkpoints.Trace160 internal _ckpts;
|
||||
|
||||
// helpers
|
||||
function _boundUint96(uint96 x, uint96 min, uint96 max) internal pure returns (uint96) {
|
||||
return SafeCast.toUint96(bound(uint256(x), uint256(min), uint256(max)));
|
||||
}
|
||||
|
||||
function _prepareKeys(uint96[] memory keys, uint96 maxSpread) internal pure {
|
||||
uint96 lastKey = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint96 key = _boundUint96(keys[i], lastKey, lastKey + maxSpread);
|
||||
keys[i] = key;
|
||||
lastKey = key;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertLatestCheckpoint(bool exist, uint96 key, uint160 value) internal {
|
||||
(bool _exist, uint96 _key, uint160 _value) = _ckpts.latestCheckpoint();
|
||||
assertEq(_exist, exist);
|
||||
assertEq(_key, key);
|
||||
assertEq(_value, value);
|
||||
}
|
||||
|
||||
// tests
|
||||
function testPush(uint96[] memory keys, uint160[] memory values, uint96 pastKey) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
// initial state
|
||||
assertEq(_ckpts.length(), 0);
|
||||
assertEq(_ckpts.latest(), 0);
|
||||
_assertLatestCheckpoint(false, 0, 0);
|
||||
|
||||
uint256 duplicates = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint96 key = keys[i];
|
||||
uint160 value = values[i % values.length];
|
||||
if (i > 0 && key == keys[i - 1]) ++duplicates;
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// check length & latest
|
||||
assertEq(_ckpts.length(), i + 1 - duplicates);
|
||||
assertEq(_ckpts.latest(), value);
|
||||
_assertLatestCheckpoint(true, key, value);
|
||||
}
|
||||
|
||||
if (keys.length > 0) {
|
||||
uint96 lastKey = keys[keys.length - 1];
|
||||
if (lastKey > 0) {
|
||||
pastKey = _boundUint96(pastKey, 0, lastKey - 1);
|
||||
|
||||
vm.expectRevert();
|
||||
this.push(pastKey, values[keys.length % values.length]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used to test reverts
|
||||
function push(uint96 key, uint160 value) external {
|
||||
_ckpts.push(key, value);
|
||||
}
|
||||
|
||||
function testLookup(uint96[] memory keys, uint160[] memory values, uint96 lookup) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
uint96 lastKey = keys.length == 0 ? 0 : keys[keys.length - 1];
|
||||
lookup = _boundUint96(lookup, 0, lastKey + _KEY_MAX_GAP);
|
||||
|
||||
uint160 upper = 0;
|
||||
uint160 lower = 0;
|
||||
uint96 lowerKey = type(uint96).max;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
uint96 key = keys[i];
|
||||
uint160 value = values[i % values.length];
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// track expected result of lookups
|
||||
if (key <= lookup) {
|
||||
upper = value;
|
||||
}
|
||||
// find the first key that is not smaller than the lookup key
|
||||
if (key >= lookup && (i == 0 || keys[i - 1] < lookup)) {
|
||||
lowerKey = key;
|
||||
}
|
||||
if (key == lowerKey) {
|
||||
lower = value;
|
||||
}
|
||||
}
|
||||
|
||||
// check lookup
|
||||
assertEq(_ckpts.lowerLookup(lookup), lower);
|
||||
assertEq(_ckpts.upperLookup(lookup), upper);
|
||||
assertEq(_ckpts.upperLookupRecent(lookup), upper);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,146 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { expect } = require('chai');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
|
||||
const { VALUE_SIZES } = require('../../../scripts/generate/templates/Checkpoints.opts');
|
||||
|
||||
describe('Checkpoints', function () {
|
||||
for (const length of VALUE_SIZES) {
|
||||
describe(`Trace${length}`, function () {
|
||||
const fixture = async () => {
|
||||
const mock = await ethers.deployContract('$Checkpoints');
|
||||
const methods = {
|
||||
at: (...args) => mock.getFunction(`$at_Checkpoints_Trace${length}`)(0, ...args),
|
||||
latest: (...args) => mock.getFunction(`$latest_Checkpoints_Trace${length}`)(0, ...args),
|
||||
latestCheckpoint: (...args) => mock.getFunction(`$latestCheckpoint_Checkpoints_Trace${length}`)(0, ...args),
|
||||
length: (...args) => mock.getFunction(`$length_Checkpoints_Trace${length}`)(0, ...args),
|
||||
push: (...args) => mock.getFunction(`$push(uint256,uint${256 - length},uint${length})`)(0, ...args),
|
||||
lowerLookup: (...args) => mock.getFunction(`$lowerLookup(uint256,uint${256 - length})`)(0, ...args),
|
||||
upperLookup: (...args) => mock.getFunction(`$upperLookup(uint256,uint${256 - length})`)(0, ...args),
|
||||
upperLookupRecent: (...args) =>
|
||||
mock.getFunction(`$upperLookupRecent(uint256,uint${256 - length})`)(0, ...args),
|
||||
};
|
||||
|
||||
return { mock, methods };
|
||||
};
|
||||
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
describe('without checkpoints', function () {
|
||||
it('at zero reverts', async function () {
|
||||
// Reverts with array out of bound access, which is unspecified
|
||||
await expect(this.methods.at(0)).to.be.reverted;
|
||||
});
|
||||
|
||||
it('returns zero as latest value', async function () {
|
||||
expect(await this.methods.latest()).to.equal(0n);
|
||||
|
||||
const ckpt = await this.methods.latestCheckpoint();
|
||||
expect(ckpt[0]).to.be.false;
|
||||
expect(ckpt[1]).to.equal(0n);
|
||||
expect(ckpt[2]).to.equal(0n);
|
||||
});
|
||||
|
||||
it('lookup returns 0', async function () {
|
||||
expect(await this.methods.lowerLookup(0)).to.equal(0n);
|
||||
expect(await this.methods.upperLookup(0)).to.equal(0n);
|
||||
expect(await this.methods.upperLookupRecent(0)).to.equal(0n);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with checkpoints', function () {
|
||||
beforeEach('pushing checkpoints', async function () {
|
||||
this.checkpoints = [
|
||||
{ key: 2n, value: 17n },
|
||||
{ key: 3n, value: 42n },
|
||||
{ key: 5n, value: 101n },
|
||||
{ key: 7n, value: 23n },
|
||||
{ key: 11n, value: 99n },
|
||||
];
|
||||
for (const { key, value } of this.checkpoints) {
|
||||
await this.methods.push(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
it('at keys', async function () {
|
||||
for (const [index, { key, value }] of this.checkpoints.entries()) {
|
||||
const at = await this.methods.at(index);
|
||||
expect(at._value).to.equal(value);
|
||||
expect(at._key).to.equal(key);
|
||||
}
|
||||
});
|
||||
|
||||
it('length', async function () {
|
||||
expect(await this.methods.length()).to.equal(this.checkpoints.length);
|
||||
});
|
||||
|
||||
it('returns latest value', async function () {
|
||||
const latest = this.checkpoints.at(-1);
|
||||
expect(await this.methods.latest()).to.equal(latest.value);
|
||||
expect(await this.methods.latestCheckpoint()).to.deep.equal([true, latest.key, latest.value]);
|
||||
});
|
||||
|
||||
it('cannot push values in the past', async function () {
|
||||
await expect(this.methods.push(this.checkpoints.at(-1).key - 1n, 0n)).to.be.revertedWithCustomError(
|
||||
this.mock,
|
||||
'CheckpointUnorderedInsertion',
|
||||
);
|
||||
});
|
||||
|
||||
it('can update last value', async function () {
|
||||
const newValue = 42n;
|
||||
|
||||
// check length before the update
|
||||
expect(await this.methods.length()).to.equal(this.checkpoints.length);
|
||||
|
||||
// update last key
|
||||
await this.methods.push(this.checkpoints.at(-1).key, newValue);
|
||||
expect(await this.methods.latest()).to.equal(newValue);
|
||||
|
||||
// check that length did not change
|
||||
expect(await this.methods.length()).to.equal(this.checkpoints.length);
|
||||
});
|
||||
|
||||
it('lower lookup', async function () {
|
||||
for (let i = 0; i < 14; ++i) {
|
||||
const value = this.checkpoints.find(x => i <= x.key)?.value || 0n;
|
||||
|
||||
expect(await this.methods.lowerLookup(i)).to.equal(value);
|
||||
}
|
||||
});
|
||||
|
||||
it('upper lookup & upperLookupRecent', async function () {
|
||||
for (let i = 0; i < 14; ++i) {
|
||||
const value = this.checkpoints.findLast(x => i >= x.key)?.value || 0n;
|
||||
|
||||
expect(await this.methods.upperLookup(i)).to.equal(value);
|
||||
expect(await this.methods.upperLookupRecent(i)).to.equal(value);
|
||||
}
|
||||
});
|
||||
|
||||
it('upperLookupRecent with more than 5 checkpoints', async function () {
|
||||
const moreCheckpoints = [
|
||||
{ key: 12n, value: 22n },
|
||||
{ key: 13n, value: 131n },
|
||||
{ key: 17n, value: 45n },
|
||||
{ key: 19n, value: 31452n },
|
||||
{ key: 21n, value: 0n },
|
||||
];
|
||||
const allCheckpoints = [].concat(this.checkpoints, moreCheckpoints);
|
||||
|
||||
for (const { key, value } of moreCheckpoints) {
|
||||
await this.methods.push(key, value);
|
||||
}
|
||||
|
||||
for (let i = 0; i < 25; ++i) {
|
||||
const value = allCheckpoints.findLast(x => i >= x.key)?.value || 0n;
|
||||
expect(await this.methods.upperLookup(i)).to.equal(value);
|
||||
expect(await this.methods.upperLookupRecent(i)).to.equal(value);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -0,0 +1,79 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { expect } = require('chai');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
const { PANIC_CODES } = require('@nomicfoundation/hardhat-chai-matchers/panic');
|
||||
|
||||
const { generators } = require('../../helpers/random');
|
||||
|
||||
const LENGTH = 4;
|
||||
|
||||
async function fixture() {
|
||||
const mock = await ethers.deployContract('$CircularBuffer');
|
||||
await mock.$setup(0, LENGTH);
|
||||
return { mock };
|
||||
}
|
||||
|
||||
describe('CircularBuffer', function () {
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
it('starts empty', async function () {
|
||||
expect(await this.mock.$count(0)).to.equal(0n);
|
||||
expect(await this.mock.$length(0)).to.equal(LENGTH);
|
||||
expect(await this.mock.$includes(0, ethers.ZeroHash)).to.be.false;
|
||||
await expect(this.mock.$last(0, 0)).to.be.revertedWithPanic(PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS);
|
||||
});
|
||||
|
||||
it('push', async function () {
|
||||
const values = Array.from({ length: LENGTH + 3 }, generators.bytes32);
|
||||
|
||||
for (const [i, value] of values.map((v, i) => [i, v])) {
|
||||
// push value
|
||||
await this.mock.$push(0, value);
|
||||
|
||||
// view of the values
|
||||
const pushed = values.slice(0, i + 1);
|
||||
const stored = pushed.slice(-LENGTH);
|
||||
const dropped = pushed.slice(0, -LENGTH);
|
||||
|
||||
// check count
|
||||
expect(await this.mock.$length(0)).to.equal(LENGTH);
|
||||
expect(await this.mock.$count(0)).to.equal(stored.length);
|
||||
|
||||
// check last
|
||||
for (const j in stored) {
|
||||
expect(await this.mock.$last(0, j)).to.equal(stored.at(-j - 1));
|
||||
}
|
||||
await expect(this.mock.$last(0, stored.length + 1)).to.be.revertedWithPanic(
|
||||
PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS,
|
||||
);
|
||||
|
||||
// check included and non-included values
|
||||
for (const v of stored) {
|
||||
expect(await this.mock.$includes(0, v)).to.be.true;
|
||||
}
|
||||
for (const v of dropped) {
|
||||
expect(await this.mock.$includes(0, v)).to.be.false;
|
||||
}
|
||||
expect(await this.mock.$includes(0, ethers.ZeroHash)).to.be.false;
|
||||
}
|
||||
});
|
||||
|
||||
it('clear', async function () {
|
||||
const value = generators.bytes32();
|
||||
await this.mock.$push(0, value);
|
||||
|
||||
expect(await this.mock.$count(0)).to.equal(1n);
|
||||
expect(await this.mock.$length(0)).to.equal(LENGTH);
|
||||
expect(await this.mock.$includes(0, value)).to.be.true;
|
||||
await this.mock.$last(0, 0); // not revert
|
||||
|
||||
await this.mock.$clear(0);
|
||||
|
||||
expect(await this.mock.$count(0)).to.equal(0n);
|
||||
expect(await this.mock.$length(0)).to.equal(LENGTH);
|
||||
expect(await this.mock.$includes(0, value)).to.be.false;
|
||||
await expect(this.mock.$last(0, 0)).to.be.revertedWithPanic(PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,102 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { expect } = require('chai');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
const { PANIC_CODES } = require('@nomicfoundation/hardhat-chai-matchers/panic');
|
||||
|
||||
async function fixture() {
|
||||
const mock = await ethers.deployContract('$DoubleEndedQueue');
|
||||
|
||||
/** Rebuild the content of the deque as a JS array. */
|
||||
const getContent = () =>
|
||||
mock.$length(0).then(length => Promise.all(Array.from({ length: Number(length) }, (_, i) => mock.$at(0, i))));
|
||||
|
||||
return { mock, getContent };
|
||||
}
|
||||
|
||||
describe('DoubleEndedQueue', function () {
|
||||
const coder = ethers.AbiCoder.defaultAbiCoder();
|
||||
const bytesA = coder.encode(['uint256'], [0xdeadbeef]);
|
||||
const bytesB = coder.encode(['uint256'], [0x0123456789]);
|
||||
const bytesC = coder.encode(['uint256'], [0x42424242]);
|
||||
const bytesD = coder.encode(['uint256'], [0x171717]);
|
||||
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
describe('when empty', function () {
|
||||
it('getters', async function () {
|
||||
expect(await this.mock.$empty(0)).to.be.true;
|
||||
expect(await this.getContent()).to.have.ordered.members([]);
|
||||
});
|
||||
|
||||
it('reverts on accesses', async function () {
|
||||
await expect(this.mock.$popBack(0)).to.be.revertedWithPanic(PANIC_CODES.POP_ON_EMPTY_ARRAY);
|
||||
await expect(this.mock.$popFront(0)).to.be.revertedWithPanic(PANIC_CODES.POP_ON_EMPTY_ARRAY);
|
||||
await expect(this.mock.$back(0)).to.be.revertedWithPanic(PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS);
|
||||
await expect(this.mock.$front(0)).to.be.revertedWithPanic(PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when not empty', function () {
|
||||
beforeEach(async function () {
|
||||
await this.mock.$pushBack(0, bytesB);
|
||||
await this.mock.$pushFront(0, bytesA);
|
||||
await this.mock.$pushBack(0, bytesC);
|
||||
this.content = [bytesA, bytesB, bytesC];
|
||||
});
|
||||
|
||||
it('getters', async function () {
|
||||
expect(await this.mock.$empty(0)).to.be.false;
|
||||
expect(await this.mock.$length(0)).to.equal(this.content.length);
|
||||
expect(await this.mock.$front(0)).to.equal(this.content[0]);
|
||||
expect(await this.mock.$back(0)).to.equal(this.content[this.content.length - 1]);
|
||||
expect(await this.getContent()).to.have.ordered.members(this.content);
|
||||
});
|
||||
|
||||
it('out of bounds access', async function () {
|
||||
await expect(this.mock.$at(0, this.content.length)).to.be.revertedWithPanic(
|
||||
PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS,
|
||||
);
|
||||
});
|
||||
|
||||
describe('push', function () {
|
||||
it('front', async function () {
|
||||
await this.mock.$pushFront(0, bytesD);
|
||||
this.content.unshift(bytesD); // add element at the beginning
|
||||
|
||||
expect(await this.getContent()).to.have.ordered.members(this.content);
|
||||
});
|
||||
|
||||
it('back', async function () {
|
||||
await this.mock.$pushBack(0, bytesD);
|
||||
this.content.push(bytesD); // add element at the end
|
||||
|
||||
expect(await this.getContent()).to.have.ordered.members(this.content);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pop', function () {
|
||||
it('front', async function () {
|
||||
const value = this.content.shift(); // remove first element
|
||||
await expect(this.mock.$popFront(0)).to.emit(this.mock, 'return$popFront').withArgs(value);
|
||||
|
||||
expect(await this.getContent()).to.have.ordered.members(this.content);
|
||||
});
|
||||
|
||||
it('back', async function () {
|
||||
const value = this.content.pop(); // remove last element
|
||||
await expect(this.mock.$popBack(0)).to.emit(this.mock, 'return$popBack').withArgs(value);
|
||||
|
||||
expect(await this.getContent()).to.have.ordered.members(this.content);
|
||||
});
|
||||
});
|
||||
|
||||
it('clear', async function () {
|
||||
await this.mock.$clear(0);
|
||||
|
||||
expect(await this.mock.$empty(0)).to.be.true;
|
||||
expect(await this.getContent()).to.have.ordered.members([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,151 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { expect } = require('chai');
|
||||
|
||||
const zip = (array1, array2) => array1.map((item, index) => [item, array2[index]]);
|
||||
|
||||
function shouldBehaveLikeMap() {
|
||||
async function expectMembersMatch(methods, keys, values) {
|
||||
expect(keys.length).to.equal(values.length);
|
||||
expect(await methods.length()).to.equal(keys.length);
|
||||
expect([...(await methods.keys())]).to.have.members(keys);
|
||||
|
||||
for (const [key, value] of zip(keys, values)) {
|
||||
expect(await methods.contains(key)).to.be.true;
|
||||
expect(await methods.get(key)).to.equal(value);
|
||||
}
|
||||
|
||||
expect(await Promise.all(keys.map((_, index) => methods.at(index)))).to.have.deep.members(zip(keys, values));
|
||||
}
|
||||
|
||||
it('starts empty', async function () {
|
||||
expect(await this.methods.contains(this.keyA)).to.be.false;
|
||||
|
||||
await expectMembersMatch(this.methods, [], []);
|
||||
});
|
||||
|
||||
describe('set', function () {
|
||||
it('adds a key', async function () {
|
||||
await expect(this.methods.set(this.keyA, this.valueA)).to.emit(this.mock, this.events.setReturn).withArgs(true);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.keyA], [this.valueA]);
|
||||
});
|
||||
|
||||
it('adds several keys', async function () {
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
await this.methods.set(this.keyB, this.valueB);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.keyA, this.keyB], [this.valueA, this.valueB]);
|
||||
expect(await this.methods.contains(this.keyC)).to.be.false;
|
||||
});
|
||||
|
||||
it('returns false when adding keys already in the set', async function () {
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
|
||||
await expect(this.methods.set(this.keyA, this.valueA)).to.emit(this.mock, this.events.setReturn).withArgs(false);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.keyA], [this.valueA]);
|
||||
});
|
||||
|
||||
it('updates values for keys already in the set', async function () {
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
await this.methods.set(this.keyA, this.valueB);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.keyA], [this.valueB]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove', function () {
|
||||
it('removes added keys', async function () {
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
|
||||
await expect(this.methods.remove(this.keyA)).to.emit(this.mock, this.events.removeReturn).withArgs(true);
|
||||
|
||||
expect(await this.methods.contains(this.keyA)).to.be.false;
|
||||
await expectMembersMatch(this.methods, [], []);
|
||||
});
|
||||
|
||||
it('returns false when removing keys not in the set', async function () {
|
||||
await expect(await this.methods.remove(this.keyA))
|
||||
.to.emit(this.mock, this.events.removeReturn)
|
||||
.withArgs(false);
|
||||
|
||||
expect(await this.methods.contains(this.keyA)).to.be.false;
|
||||
});
|
||||
|
||||
it('adds and removes multiple keys', async function () {
|
||||
// []
|
||||
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
await this.methods.set(this.keyC, this.valueC);
|
||||
|
||||
// [A, C]
|
||||
|
||||
await this.methods.remove(this.keyA);
|
||||
await this.methods.remove(this.keyB);
|
||||
|
||||
// [C]
|
||||
|
||||
await this.methods.set(this.keyB, this.valueB);
|
||||
|
||||
// [C, B]
|
||||
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
await this.methods.remove(this.keyC);
|
||||
|
||||
// [A, B]
|
||||
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
await this.methods.set(this.keyB, this.valueB);
|
||||
|
||||
// [A, B]
|
||||
|
||||
await this.methods.set(this.keyC, this.valueC);
|
||||
await this.methods.remove(this.keyA);
|
||||
|
||||
// [B, C]
|
||||
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
await this.methods.remove(this.keyB);
|
||||
|
||||
// [A, C]
|
||||
|
||||
await expectMembersMatch(this.methods, [this.keyA, this.keyC], [this.valueA, this.valueC]);
|
||||
|
||||
expect(await this.methods.contains(this.keyA)).to.be.true;
|
||||
expect(await this.methods.contains(this.keyB)).to.be.false;
|
||||
expect(await this.methods.contains(this.keyC)).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('read', function () {
|
||||
beforeEach(async function () {
|
||||
await this.methods.set(this.keyA, this.valueA);
|
||||
});
|
||||
|
||||
describe('get', function () {
|
||||
it('existing value', async function () {
|
||||
expect(await this.methods.get(this.keyA)).to.equal(this.valueA);
|
||||
});
|
||||
|
||||
it('missing value', async function () {
|
||||
await expect(this.methods.get(this.keyB))
|
||||
.to.be.revertedWithCustomError(this.mock, 'EnumerableMapNonexistentKey')
|
||||
.withArgs(ethers.AbiCoder.defaultAbiCoder().encode([this.keyType], [this.keyB]));
|
||||
});
|
||||
});
|
||||
|
||||
describe('tryGet', function () {
|
||||
it('existing value', async function () {
|
||||
expect(await this.methods.tryGet(this.keyA)).to.have.ordered.members([true, this.valueA]);
|
||||
});
|
||||
|
||||
it('missing value', async function () {
|
||||
expect(await this.methods.tryGet(this.keyB)).to.have.ordered.members([false, this.zeroValue]);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
shouldBehaveLikeMap,
|
||||
};
|
||||
@@ -0,0 +1,65 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
|
||||
const { mapValues } = require('../../helpers/iterate');
|
||||
const { generators } = require('../../helpers/random');
|
||||
const { TYPES, formatType } = require('../../../scripts/generate/templates/EnumerableMap.opts');
|
||||
|
||||
const { shouldBehaveLikeMap } = require('./EnumerableMap.behavior');
|
||||
|
||||
// Add Bytes32ToBytes32Map that must be tested but is not part of the generated types.
|
||||
TYPES.unshift(formatType('bytes32', 'bytes32'));
|
||||
|
||||
async function fixture() {
|
||||
const mock = await ethers.deployContract('$EnumerableMap');
|
||||
const env = Object.fromEntries(
|
||||
TYPES.map(({ name, keyType, valueType }) => [
|
||||
name,
|
||||
{
|
||||
keyType,
|
||||
keys: Array.from({ length: 3 }, generators[keyType]),
|
||||
values: Array.from({ length: 3 }, generators[valueType]),
|
||||
zeroValue: generators[valueType].zero,
|
||||
methods: mapValues(
|
||||
{
|
||||
set: `$set(uint256,${keyType},${valueType})`,
|
||||
get: `$get_EnumerableMap_${name}(uint256,${keyType})`,
|
||||
tryGet: `$tryGet_EnumerableMap_${name}(uint256,${keyType})`,
|
||||
remove: `$remove_EnumerableMap_${name}(uint256,${keyType})`,
|
||||
length: `$length_EnumerableMap_${name}(uint256)`,
|
||||
at: `$at_EnumerableMap_${name}(uint256,uint256)`,
|
||||
contains: `$contains_EnumerableMap_${name}(uint256,${keyType})`,
|
||||
keys: `$keys_EnumerableMap_${name}(uint256)`,
|
||||
},
|
||||
fnSig =>
|
||||
(...args) =>
|
||||
mock.getFunction(fnSig)(0, ...args),
|
||||
),
|
||||
events: {
|
||||
setReturn: `return$set_EnumerableMap_${name}_${keyType}_${valueType}`,
|
||||
removeReturn: `return$remove_EnumerableMap_${name}_${keyType}`,
|
||||
},
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
return { mock, env };
|
||||
}
|
||||
|
||||
describe('EnumerableMap', function () {
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
for (const { name } of TYPES) {
|
||||
describe(name, function () {
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, this.env[name]);
|
||||
[this.keyA, this.keyB, this.keyC] = this.keys;
|
||||
[this.valueA, this.valueB, this.valueC] = this.values;
|
||||
});
|
||||
|
||||
shouldBehaveLikeMap();
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
const { expect } = require('chai');
|
||||
const { PANIC_CODES } = require('@nomicfoundation/hardhat-chai-matchers/panic');
|
||||
|
||||
function shouldBehaveLikeSet() {
|
||||
async function expectMembersMatch(methods, values) {
|
||||
expect(await methods.length()).to.equal(values.length);
|
||||
for (const value of values) expect(await methods.contains(value)).to.be.true;
|
||||
|
||||
expect(await Promise.all(values.map((_, index) => methods.at(index)))).to.have.deep.members(values);
|
||||
expect([...(await methods.values())]).to.have.deep.members(values);
|
||||
}
|
||||
|
||||
it('starts empty', async function () {
|
||||
expect(await this.methods.contains(this.valueA)).to.be.false;
|
||||
|
||||
await expectMembersMatch(this.methods, []);
|
||||
});
|
||||
|
||||
describe('add', function () {
|
||||
it('adds a value', async function () {
|
||||
await expect(this.methods.add(this.valueA)).to.emit(this.mock, this.events.addReturn).withArgs(true);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.valueA]);
|
||||
});
|
||||
|
||||
it('adds several values', async function () {
|
||||
await this.methods.add(this.valueA);
|
||||
await this.methods.add(this.valueB);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.valueA, this.valueB]);
|
||||
expect(await this.methods.contains(this.valueC)).to.be.false;
|
||||
});
|
||||
|
||||
it('returns false when adding values already in the set', async function () {
|
||||
await this.methods.add(this.valueA);
|
||||
|
||||
await expect(this.methods.add(this.valueA)).to.emit(this.mock, this.events.addReturn).withArgs(false);
|
||||
|
||||
await expectMembersMatch(this.methods, [this.valueA]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('at', function () {
|
||||
it('reverts when retrieving non-existent elements', async function () {
|
||||
await expect(this.methods.at(0)).to.be.revertedWithPanic(PANIC_CODES.ARRAY_ACCESS_OUT_OF_BOUNDS);
|
||||
});
|
||||
|
||||
it('retrieves existing element', async function () {
|
||||
await this.methods.add(this.valueA);
|
||||
expect(await this.methods.at(0)).to.equal(this.valueA);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove', function () {
|
||||
it('removes added values', async function () {
|
||||
await this.methods.add(this.valueA);
|
||||
|
||||
await expect(this.methods.remove(this.valueA)).to.emit(this.mock, this.events.removeReturn).withArgs(true);
|
||||
|
||||
expect(await this.methods.contains(this.valueA)).to.be.false;
|
||||
await expectMembersMatch(this.methods, []);
|
||||
});
|
||||
|
||||
it('returns false when removing values not in the set', async function () {
|
||||
await expect(this.methods.remove(this.valueA)).to.emit(this.mock, this.events.removeReturn).withArgs(false);
|
||||
|
||||
expect(await this.methods.contains(this.valueA)).to.be.false;
|
||||
});
|
||||
|
||||
it('adds and removes multiple values', async function () {
|
||||
// []
|
||||
|
||||
await this.methods.add(this.valueA);
|
||||
await this.methods.add(this.valueC);
|
||||
|
||||
// [A, C]
|
||||
|
||||
await this.methods.remove(this.valueA);
|
||||
await this.methods.remove(this.valueB);
|
||||
|
||||
// [C]
|
||||
|
||||
await this.methods.add(this.valueB);
|
||||
|
||||
// [C, B]
|
||||
|
||||
await this.methods.add(this.valueA);
|
||||
await this.methods.remove(this.valueC);
|
||||
|
||||
// [A, B]
|
||||
|
||||
await this.methods.add(this.valueA);
|
||||
await this.methods.add(this.valueB);
|
||||
|
||||
// [A, B]
|
||||
|
||||
await this.methods.add(this.valueC);
|
||||
await this.methods.remove(this.valueA);
|
||||
|
||||
// [B, C]
|
||||
|
||||
await this.methods.add(this.valueA);
|
||||
await this.methods.remove(this.valueB);
|
||||
|
||||
// [A, C]
|
||||
|
||||
await expectMembersMatch(this.methods, [this.valueA, this.valueC]);
|
||||
|
||||
expect(await this.methods.contains(this.valueB)).to.be.false;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
shouldBehaveLikeSet,
|
||||
};
|
||||
@@ -0,0 +1,61 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
|
||||
const { mapValues } = require('../../helpers/iterate');
|
||||
const { generators } = require('../../helpers/random');
|
||||
const { TYPES } = require('../../../scripts/generate/templates/EnumerableSet.opts');
|
||||
|
||||
const { shouldBehaveLikeSet } = require('./EnumerableSet.behavior');
|
||||
|
||||
const getMethods = (mock, fnSigs) => {
|
||||
return mapValues(
|
||||
fnSigs,
|
||||
fnSig =>
|
||||
(...args) =>
|
||||
mock.getFunction(fnSig)(0, ...args),
|
||||
);
|
||||
};
|
||||
|
||||
async function fixture() {
|
||||
const mock = await ethers.deployContract('$EnumerableSet');
|
||||
|
||||
const env = Object.fromEntries(
|
||||
TYPES.map(({ name, type }) => [
|
||||
type,
|
||||
{
|
||||
values: Array.from({ length: 3 }, generators[type]),
|
||||
methods: getMethods(mock, {
|
||||
add: `$add(uint256,${type})`,
|
||||
remove: `$remove(uint256,${type})`,
|
||||
contains: `$contains(uint256,${type})`,
|
||||
length: `$length_EnumerableSet_${name}(uint256)`,
|
||||
at: `$at_EnumerableSet_${name}(uint256,uint256)`,
|
||||
values: `$values_EnumerableSet_${name}(uint256)`,
|
||||
}),
|
||||
events: {
|
||||
addReturn: `return$add_EnumerableSet_${name}_${type}`,
|
||||
removeReturn: `return$remove_EnumerableSet_${name}_${type}`,
|
||||
},
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
return { mock, env };
|
||||
}
|
||||
|
||||
describe('EnumerableSet', function () {
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
for (const { type } of TYPES) {
|
||||
describe(type, function () {
|
||||
beforeEach(function () {
|
||||
Object.assign(this, this.env[type]);
|
||||
[this.valueA, this.valueB, this.valueC] = this.values;
|
||||
});
|
||||
|
||||
shouldBehaveLikeSet();
|
||||
});
|
||||
}
|
||||
});
|
||||
100
lib_openzeppelin_contracts/test/utils/structs/MerkleTree.test.js
Normal file
100
lib_openzeppelin_contracts/test/utils/structs/MerkleTree.test.js
Normal file
@@ -0,0 +1,100 @@
|
||||
const { ethers } = require('hardhat');
|
||||
const { expect } = require('chai');
|
||||
const { loadFixture } = require('@nomicfoundation/hardhat-network-helpers');
|
||||
const { PANIC_CODES } = require('@nomicfoundation/hardhat-chai-matchers/panic');
|
||||
const { StandardMerkleTree } = require('@openzeppelin/merkle-tree');
|
||||
|
||||
const { generators } = require('../../helpers/random');
|
||||
|
||||
const makeTree = (leafs = [ethers.ZeroHash]) =>
|
||||
StandardMerkleTree.of(
|
||||
leafs.map(leaf => [leaf]),
|
||||
['bytes32'],
|
||||
{ sortLeaves: false },
|
||||
);
|
||||
|
||||
const hashLeaf = leaf => makeTree().leafHash([leaf]);
|
||||
|
||||
const DEPTH = 4n; // 16 slots
|
||||
const ZERO = hashLeaf(ethers.ZeroHash);
|
||||
|
||||
async function fixture() {
|
||||
const mock = await ethers.deployContract('MerkleTreeMock');
|
||||
await mock.setup(DEPTH, ZERO);
|
||||
return { mock };
|
||||
}
|
||||
|
||||
describe('MerkleTree', function () {
|
||||
beforeEach(async function () {
|
||||
Object.assign(this, await loadFixture(fixture));
|
||||
});
|
||||
|
||||
it('sets initial values at setup', async function () {
|
||||
const merkleTree = makeTree(Array.from({ length: 2 ** Number(DEPTH) }, () => ethers.ZeroHash));
|
||||
|
||||
expect(await this.mock.root()).to.equal(merkleTree.root);
|
||||
expect(await this.mock.depth()).to.equal(DEPTH);
|
||||
expect(await this.mock.nextLeafIndex()).to.equal(0n);
|
||||
});
|
||||
|
||||
describe('push', function () {
|
||||
it('tree is correctly updated', async function () {
|
||||
const leafs = Array.from({ length: 2 ** Number(DEPTH) }, () => ethers.ZeroHash);
|
||||
|
||||
// for each leaf slot
|
||||
for (const i in leafs) {
|
||||
// generate random leaf and hash it
|
||||
const hashedLeaf = hashLeaf((leafs[i] = generators.bytes32()));
|
||||
|
||||
// update leaf list and rebuild tree.
|
||||
const tree = makeTree(leafs);
|
||||
|
||||
// push value to tree
|
||||
await expect(this.mock.push(hashedLeaf)).to.emit(this.mock, 'LeafInserted').withArgs(hashedLeaf, i, tree.root);
|
||||
|
||||
// check tree
|
||||
expect(await this.mock.root()).to.equal(tree.root);
|
||||
expect(await this.mock.nextLeafIndex()).to.equal(BigInt(i) + 1n);
|
||||
}
|
||||
});
|
||||
|
||||
it('revert when tree is full', async function () {
|
||||
await Promise.all(Array.from({ length: 2 ** Number(DEPTH) }).map(() => this.mock.push(ethers.ZeroHash)));
|
||||
|
||||
await expect(this.mock.push(ethers.ZeroHash)).to.be.revertedWithPanic(PANIC_CODES.TOO_MUCH_MEMORY_ALLOCATED);
|
||||
});
|
||||
});
|
||||
|
||||
it('reset', async function () {
|
||||
// empty tree
|
||||
const zeroLeafs = Array.from({ length: 2 ** Number(DEPTH) }, () => ethers.ZeroHash);
|
||||
const zeroTree = makeTree(zeroLeafs);
|
||||
|
||||
// tree with one element
|
||||
const leafs = Array.from({ length: 2 ** Number(DEPTH) }, () => ethers.ZeroHash);
|
||||
const hashedLeaf = hashLeaf((leafs[0] = generators.bytes32())); // fill first leaf and hash it
|
||||
const tree = makeTree(leafs);
|
||||
|
||||
// root should be that of a zero tree
|
||||
expect(await this.mock.root()).to.equal(zeroTree.root);
|
||||
expect(await this.mock.nextLeafIndex()).to.equal(0n);
|
||||
|
||||
// push leaf and check root
|
||||
await expect(this.mock.push(hashedLeaf)).to.emit(this.mock, 'LeafInserted').withArgs(hashedLeaf, 0, tree.root);
|
||||
|
||||
expect(await this.mock.root()).to.equal(tree.root);
|
||||
expect(await this.mock.nextLeafIndex()).to.equal(1n);
|
||||
|
||||
// reset tree
|
||||
await this.mock.setup(DEPTH, ZERO);
|
||||
|
||||
expect(await this.mock.root()).to.equal(zeroTree.root);
|
||||
expect(await this.mock.nextLeafIndex()).to.equal(0n);
|
||||
|
||||
// re-push leaf and check root
|
||||
await expect(this.mock.push(hashedLeaf)).to.emit(this.mock, 'LeafInserted').withArgs(hashedLeaf, 0, tree.root);
|
||||
|
||||
expect(await this.mock.root()).to.equal(tree.root);
|
||||
expect(await this.mock.nextLeafIndex()).to.equal(1n);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user