dexorder
This commit is contained in:
20
lib_openzeppelin_contracts/scripts/checks/compare-layout.js
Normal file
20
lib_openzeppelin_contracts/scripts/checks/compare-layout.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const fs = require('fs');
|
||||
const { getStorageUpgradeReport } = require('@openzeppelin/upgrades-core/dist/storage');
|
||||
|
||||
const { ref, head } = require('yargs').argv;
|
||||
|
||||
const oldLayout = JSON.parse(fs.readFileSync(ref));
|
||||
const newLayout = JSON.parse(fs.readFileSync(head));
|
||||
|
||||
for (const name in oldLayout) {
|
||||
if (name in newLayout) {
|
||||
const report = getStorageUpgradeReport(oldLayout[name], newLayout[name], {});
|
||||
if (!report.ok) {
|
||||
console.log(`Storage layout incompatilibity found in ${name}:`);
|
||||
console.log(report.explain());
|
||||
process.exitCode = 1;
|
||||
}
|
||||
} else {
|
||||
console.log(`WARNING: ${name} is missing from the current branch`);
|
||||
}
|
||||
}
|
||||
247
lib_openzeppelin_contracts/scripts/checks/compareGasReports.js
Executable file
247
lib_openzeppelin_contracts/scripts/checks/compareGasReports.js
Executable file
@@ -0,0 +1,247 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const chalk = require('chalk');
|
||||
const { argv } = require('yargs')
|
||||
.env()
|
||||
.options({
|
||||
style: {
|
||||
type: 'string',
|
||||
choices: ['shell', 'markdown'],
|
||||
default: 'shell',
|
||||
},
|
||||
hideEqual: {
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
},
|
||||
strictTesting: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
});
|
||||
|
||||
// Deduce base tx cost from the percentage denominator
|
||||
const BASE_TX_COST = 21000;
|
||||
|
||||
// Utilities
|
||||
function sum(...args) {
|
||||
return args.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
|
||||
function average(...args) {
|
||||
return sum(...args) / args.length;
|
||||
}
|
||||
|
||||
function variation(current, previous, offset = 0) {
|
||||
return {
|
||||
value: current,
|
||||
delta: current - previous,
|
||||
prcnt: (100 * (current - previous)) / (previous - offset),
|
||||
};
|
||||
}
|
||||
|
||||
// Report class
|
||||
class Report {
|
||||
// Read report file
|
||||
static load(filepath) {
|
||||
return JSON.parse(fs.readFileSync(filepath, 'utf8'));
|
||||
}
|
||||
|
||||
// Compare two reports
|
||||
static compare(update, ref, opts = { hideEqual: true, strictTesting: false }) {
|
||||
if (JSON.stringify(update.options?.solcInfo) !== JSON.stringify(ref.options?.solcInfo)) {
|
||||
console.warn('WARNING: Reports produced with non matching metadata');
|
||||
}
|
||||
|
||||
// gasReporter 1.0.0 uses ".info", but 2.0.0 uses ".data"
|
||||
const updateInfo = update.info ?? update.data;
|
||||
const refInfo = ref.info ?? ref.data;
|
||||
|
||||
const deployments = updateInfo.deployments
|
||||
.map(contract =>
|
||||
Object.assign(contract, { previousVersion: refInfo.deployments.find(({ name }) => name === contract.name) }),
|
||||
)
|
||||
.filter(contract => contract.gasData?.length && contract.previousVersion?.gasData?.length)
|
||||
.flatMap(contract => [
|
||||
{
|
||||
contract: contract.name,
|
||||
method: '[bytecode length]',
|
||||
avg: variation(contract.bytecode.length / 2 - 1, contract.previousVersion.bytecode.length / 2 - 1),
|
||||
},
|
||||
{
|
||||
contract: contract.name,
|
||||
method: '[construction cost]',
|
||||
avg: variation(
|
||||
...[contract.gasData, contract.previousVersion.gasData].map(x => Math.round(average(...x))),
|
||||
BASE_TX_COST,
|
||||
),
|
||||
},
|
||||
])
|
||||
.sort((a, b) => `${a.contract}:${a.method}`.localeCompare(`${b.contract}:${b.method}`));
|
||||
|
||||
const methods = Object.keys(updateInfo.methods)
|
||||
.filter(key => refInfo.methods[key])
|
||||
.filter(key => updateInfo.methods[key].numberOfCalls > 0)
|
||||
.filter(
|
||||
key => !opts.strictTesting || updateInfo.methods[key].numberOfCalls === refInfo.methods[key].numberOfCalls,
|
||||
)
|
||||
.map(key => ({
|
||||
contract: refInfo.methods[key].contract,
|
||||
method: refInfo.methods[key].fnSig,
|
||||
min: variation(...[updateInfo, refInfo].map(x => Math.min(...x.methods[key].gasData)), BASE_TX_COST),
|
||||
max: variation(...[updateInfo, refInfo].map(x => Math.max(...x.methods[key].gasData)), BASE_TX_COST),
|
||||
avg: variation(...[updateInfo, refInfo].map(x => Math.round(average(...x.methods[key].gasData))), BASE_TX_COST),
|
||||
}))
|
||||
.sort((a, b) => `${a.contract}:${a.method}`.localeCompare(`${b.contract}:${b.method}`));
|
||||
|
||||
return []
|
||||
.concat(deployments, methods)
|
||||
.filter(row => !opts.hideEqual || row.min?.delta || row.max?.delta || row.avg?.delta);
|
||||
}
|
||||
}
|
||||
|
||||
// Display
|
||||
function center(text, length) {
|
||||
return text.padStart((text.length + length) / 2).padEnd(length);
|
||||
}
|
||||
|
||||
function plusSign(num) {
|
||||
return num > 0 ? '+' : '';
|
||||
}
|
||||
|
||||
function formatCellShell(cell) {
|
||||
const format = chalk[cell?.delta > 0 ? 'red' : cell?.delta < 0 ? 'green' : 'reset'];
|
||||
return [
|
||||
format((!isFinite(cell?.value) ? '-' : cell.value.toString()).padStart(8)),
|
||||
format((!isFinite(cell?.delta) ? '-' : plusSign(cell.delta) + cell.delta.toString()).padStart(8)),
|
||||
format((!isFinite(cell?.prcnt) ? '-' : plusSign(cell.prcnt) + cell.prcnt.toFixed(2) + '%').padStart(8)),
|
||||
];
|
||||
}
|
||||
|
||||
function formatCmpShell(rows) {
|
||||
const contractLength = Math.max(8, ...rows.map(({ contract }) => contract.length));
|
||||
const methodLength = Math.max(7, ...rows.map(({ method }) => method.length));
|
||||
|
||||
const COLS = [
|
||||
{ txt: '', length: 0 },
|
||||
{ txt: 'Contract', length: contractLength },
|
||||
{ txt: 'Method', length: methodLength },
|
||||
{ txt: 'Min', length: 30 },
|
||||
{ txt: 'Max', length: 30 },
|
||||
{ txt: 'Avg', length: 30 },
|
||||
{ txt: '', length: 0 },
|
||||
];
|
||||
const HEADER = COLS.map(entry => chalk.bold(center(entry.txt, entry.length || 0)))
|
||||
.join(' | ')
|
||||
.trim();
|
||||
const SEPARATOR = COLS.map(({ length }) => (length > 0 ? '-'.repeat(length + 2) : ''))
|
||||
.join('|')
|
||||
.trim();
|
||||
|
||||
return [
|
||||
'',
|
||||
HEADER,
|
||||
...rows.map(entry =>
|
||||
[
|
||||
'',
|
||||
chalk.grey(entry.contract.padEnd(contractLength)),
|
||||
entry.method.padEnd(methodLength),
|
||||
...formatCellShell(entry.min),
|
||||
...formatCellShell(entry.max),
|
||||
...formatCellShell(entry.avg),
|
||||
'',
|
||||
]
|
||||
.join(' | ')
|
||||
.trim(),
|
||||
),
|
||||
'',
|
||||
]
|
||||
.join(`\n${SEPARATOR}\n`)
|
||||
.trim();
|
||||
}
|
||||
|
||||
function alignPattern(align) {
|
||||
switch (align) {
|
||||
case 'left':
|
||||
case undefined:
|
||||
return ':-';
|
||||
case 'right':
|
||||
return '-:';
|
||||
case 'center':
|
||||
return ':-:';
|
||||
}
|
||||
}
|
||||
|
||||
function trend(value) {
|
||||
return value > 0 ? ':x:' : value < 0 ? ':heavy_check_mark:' : ':heavy_minus_sign:';
|
||||
}
|
||||
|
||||
function formatCellMarkdown(cell) {
|
||||
return [
|
||||
!isFinite(cell?.value) ? '-' : cell.value.toString(),
|
||||
!isFinite(cell?.delta) ? '-' : plusSign(cell.delta) + cell.delta.toString(),
|
||||
!isFinite(cell?.prcnt) ? '-' : plusSign(cell.prcnt) + cell.prcnt.toFixed(2) + '% ' + trend(cell.delta),
|
||||
];
|
||||
}
|
||||
|
||||
function formatCmpMarkdown(rows) {
|
||||
const COLS = [
|
||||
{ txt: '' },
|
||||
{ txt: 'Contract', align: 'left' },
|
||||
{ txt: 'Method', align: 'left' },
|
||||
{ txt: 'Min', align: 'right' },
|
||||
{ txt: '(+/-)', align: 'right' },
|
||||
{ txt: '%', align: 'right' },
|
||||
{ txt: 'Max', align: 'right' },
|
||||
{ txt: '(+/-)', align: 'right' },
|
||||
{ txt: '%', align: 'right' },
|
||||
{ txt: 'Avg', align: 'right' },
|
||||
{ txt: '(+/-)', align: 'right' },
|
||||
{ txt: '%', align: 'right' },
|
||||
{ txt: '' },
|
||||
];
|
||||
const HEADER = COLS.map(entry => entry.txt)
|
||||
.join(' | ')
|
||||
.trim();
|
||||
const SEPARATOR = COLS.map(entry => (entry.txt ? alignPattern(entry.align) : ''))
|
||||
.join('|')
|
||||
.trim();
|
||||
|
||||
return [
|
||||
'# Changes to gas costs',
|
||||
'',
|
||||
HEADER,
|
||||
SEPARATOR,
|
||||
rows
|
||||
.map(entry =>
|
||||
[
|
||||
'',
|
||||
entry.contract,
|
||||
entry.method,
|
||||
...formatCellMarkdown(entry.min),
|
||||
...formatCellMarkdown(entry.max),
|
||||
...formatCellMarkdown(entry.avg),
|
||||
'',
|
||||
]
|
||||
.join(' | ')
|
||||
.trim(),
|
||||
)
|
||||
.join('\n'),
|
||||
'',
|
||||
]
|
||||
.join('\n')
|
||||
.trim();
|
||||
}
|
||||
|
||||
// MAIN
|
||||
const report = Report.compare(Report.load(argv._[0]), Report.load(argv._[1]), argv);
|
||||
|
||||
switch (argv.style) {
|
||||
case 'markdown':
|
||||
console.log(formatCmpMarkdown(report));
|
||||
break;
|
||||
case 'shell':
|
||||
default:
|
||||
console.log(formatCmpShell(report));
|
||||
break;
|
||||
}
|
||||
38
lib_openzeppelin_contracts/scripts/checks/extract-layout.js
Normal file
38
lib_openzeppelin_contracts/scripts/checks/extract-layout.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const fs = require('fs');
|
||||
const { findAll, astDereferencer, srcDecoder } = require('solidity-ast/utils');
|
||||
const { extractStorageLayout } = require('@openzeppelin/upgrades-core/dist/storage/extract');
|
||||
|
||||
const { _ } = require('yargs').argv;
|
||||
|
||||
const skipPath = ['contracts/mocks/', 'contracts-exposed/'];
|
||||
const skipKind = ['interface', 'library'];
|
||||
|
||||
function extractLayouts(path) {
|
||||
const layout = {};
|
||||
const { input, output } = JSON.parse(fs.readFileSync(path));
|
||||
|
||||
const decoder = srcDecoder(input, output);
|
||||
const deref = astDereferencer(output);
|
||||
|
||||
for (const src in output.contracts) {
|
||||
if (skipPath.some(prefix => src.startsWith(prefix))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const contractDef of findAll('ContractDefinition', output.sources[src].ast)) {
|
||||
if (skipKind.includes(contractDef.contractKind)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
layout[contractDef.name] = extractStorageLayout(
|
||||
contractDef,
|
||||
decoder,
|
||||
deref,
|
||||
output.contracts[src][contractDef.name].storageLayout,
|
||||
);
|
||||
}
|
||||
}
|
||||
return layout;
|
||||
}
|
||||
|
||||
console.log(JSON.stringify(Object.assign(..._.map(extractLayouts))));
|
||||
6
lib_openzeppelin_contracts/scripts/checks/generation.sh
Executable file
6
lib_openzeppelin_contracts/scripts/checks/generation.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
npm run generate
|
||||
git diff -R --exit-code
|
||||
54
lib_openzeppelin_contracts/scripts/checks/inheritance-ordering.js
Executable file
54
lib_openzeppelin_contracts/scripts/checks/inheritance-ordering.js
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const path = require('path');
|
||||
const graphlib = require('graphlib');
|
||||
const { findAll } = require('solidity-ast/utils');
|
||||
const { _: artifacts } = require('yargs').argv;
|
||||
|
||||
for (const artifact of artifacts) {
|
||||
const { output: solcOutput } = require(path.resolve(__dirname, '../..', artifact));
|
||||
|
||||
const graph = new graphlib.Graph({ directed: true });
|
||||
const names = {};
|
||||
const linearized = [];
|
||||
|
||||
for (const source in solcOutput.contracts) {
|
||||
if (['contracts-exposed/', 'contracts/mocks/'].some(pattern => source.startsWith(pattern))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const contractDef of findAll('ContractDefinition', solcOutput.sources[source].ast)) {
|
||||
names[contractDef.id] = contractDef.name;
|
||||
linearized.push(contractDef.linearizedBaseContracts);
|
||||
|
||||
contractDef.linearizedBaseContracts.forEach((c1, i, contracts) =>
|
||||
contracts.slice(i + 1).forEach(c2 => {
|
||||
graph.setEdge(c1, c2);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// graphlib.alg.findCycles will not find minimal cycles.
|
||||
/// We are only interested int cycles of lengths 2 (needs proof)
|
||||
graph.nodes().forEach((x, i, nodes) =>
|
||||
nodes
|
||||
.slice(i + 1)
|
||||
.filter(y => graph.hasEdge(x, y) && graph.hasEdge(y, x))
|
||||
.forEach(y => {
|
||||
console.log(`Conflict between ${names[x]} and ${names[y]} detected in the following dependency chains:`);
|
||||
linearized
|
||||
.filter(chain => chain.includes(parseInt(x)) && chain.includes(parseInt(y)))
|
||||
.forEach(chain => {
|
||||
const comp = chain.indexOf(parseInt(x)) < chain.indexOf(parseInt(y)) ? '>' : '<';
|
||||
console.log(`- ${names[x]} ${comp} ${names[y]} in ${names[chain.find(Boolean)]}`);
|
||||
// console.log(`- ${names[x]} ${comp} ${names[y]}: ${chain.reverse().map(id => names[id]).join(', ')}`);
|
||||
});
|
||||
process.exitCode = 1;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!process.exitCode) {
|
||||
console.log('Contract ordering is consistent.');
|
||||
}
|
||||
41
lib_openzeppelin_contracts/scripts/gen-nav.js
Normal file
41
lib_openzeppelin_contracts/scripts/gen-nav.js
Normal file
@@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const path = require('path');
|
||||
const glob = require('glob');
|
||||
const startCase = require('lodash.startcase');
|
||||
|
||||
const baseDir = process.argv[2];
|
||||
|
||||
const files = glob.sync(baseDir + '/**/*.adoc').map(f => path.relative(baseDir, f));
|
||||
|
||||
console.log('.API');
|
||||
|
||||
function getPageTitle(directory) {
|
||||
switch (directory) {
|
||||
case 'metatx':
|
||||
return 'Meta Transactions';
|
||||
case 'common':
|
||||
return 'Common (Tokens)';
|
||||
default:
|
||||
return startCase(directory);
|
||||
}
|
||||
}
|
||||
|
||||
const links = files.map(file => {
|
||||
const doc = file.replace(baseDir, '');
|
||||
const title = path.parse(file).name;
|
||||
|
||||
return {
|
||||
xref: `* xref:${doc}[${getPageTitle(title)}]`,
|
||||
title,
|
||||
};
|
||||
});
|
||||
|
||||
// Case-insensitive sort based on titles (so 'token/ERC20' gets sorted as 'erc20')
|
||||
const sortedLinks = links.sort(function (a, b) {
|
||||
return a.title.toLowerCase().localeCompare(b.title.toLowerCase(), undefined, { numeric: true });
|
||||
});
|
||||
|
||||
for (const link of sortedLinks) {
|
||||
console.log(link.xref);
|
||||
}
|
||||
16
lib_openzeppelin_contracts/scripts/generate/format-lines.js
Normal file
16
lib_openzeppelin_contracts/scripts/generate/format-lines.js
Normal file
@@ -0,0 +1,16 @@
|
||||
function formatLines(...lines) {
|
||||
return [...indentEach(0, lines)].join('\n') + '\n';
|
||||
}
|
||||
|
||||
function* indentEach(indent, lines) {
|
||||
for (const line of lines) {
|
||||
if (Array.isArray(line)) {
|
||||
yield* indentEach(indent + 1, line);
|
||||
} else {
|
||||
const padding = ' '.repeat(indent);
|
||||
yield* line.split('\n').map(subline => (subline === '' ? '' : padding + subline));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = formatLines;
|
||||
53
lib_openzeppelin_contracts/scripts/generate/run.js
Executable file
53
lib_openzeppelin_contracts/scripts/generate/run.js
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const cp = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const format = require('./format-lines');
|
||||
|
||||
function getVersion(path) {
|
||||
try {
|
||||
return fs.readFileSync(path, 'utf8').match(/\/\/ OpenZeppelin Contracts \(last updated v[^)]+\)/)[0];
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function generateFromTemplate(file, template, outputPrefix = '') {
|
||||
const script = path.relative(path.join(__dirname, '../..'), __filename);
|
||||
const input = path.join(path.dirname(script), template);
|
||||
const output = path.join(outputPrefix, file);
|
||||
const version = getVersion(output);
|
||||
const content = format(
|
||||
'// SPDX-License-Identifier: MIT',
|
||||
...(version ? [version + ` (${file})`] : []),
|
||||
`// This file was procedurally generated from ${input}.`,
|
||||
'',
|
||||
require(template),
|
||||
);
|
||||
|
||||
fs.writeFileSync(output, content);
|
||||
cp.execFileSync('prettier', ['--write', output]);
|
||||
}
|
||||
|
||||
// Contracts
|
||||
for (const [file, template] of Object.entries({
|
||||
'utils/math/SafeCast.sol': './templates/SafeCast.js',
|
||||
'utils/structs/EnumerableSet.sol': './templates/EnumerableSet.js',
|
||||
'utils/structs/EnumerableMap.sol': './templates/EnumerableMap.js',
|
||||
'utils/structs/Checkpoints.sol': './templates/Checkpoints.js',
|
||||
'utils/SlotDerivation.sol': './templates/SlotDerivation.js',
|
||||
'utils/StorageSlot.sol': './templates/StorageSlot.js',
|
||||
'utils/Arrays.sol': './templates/Arrays.js',
|
||||
'mocks/StorageSlotMock.sol': './templates/StorageSlotMock.js',
|
||||
})) {
|
||||
generateFromTemplate(file, template, './contracts/');
|
||||
}
|
||||
|
||||
// Tests
|
||||
for (const [file, template] of Object.entries({
|
||||
'utils/structs/Checkpoints.t.sol': './templates/Checkpoints.t.js',
|
||||
'utils/SlotDerivation.t.sol': './templates/SlotDerivation.t.js',
|
||||
})) {
|
||||
generateFromTemplate(file, template, './test/');
|
||||
}
|
||||
385
lib_openzeppelin_contracts/scripts/generate/templates/Arrays.js
Normal file
385
lib_openzeppelin_contracts/scripts/generate/templates/Arrays.js
Normal file
@@ -0,0 +1,385 @@
|
||||
const format = require('../format-lines');
|
||||
const { capitalize } = require('../../helpers');
|
||||
const { TYPES } = require('./Arrays.opts');
|
||||
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {SlotDerivation} from "./SlotDerivation.sol";
|
||||
import {StorageSlot} from "./StorageSlot.sol";
|
||||
import {Math} from "./math/Math.sol";
|
||||
|
||||
/**
|
||||
* @dev Collection of functions related to array types.
|
||||
*/
|
||||
`;
|
||||
|
||||
const sort = type => `\
|
||||
/**
|
||||
* @dev Sort an array of ${type} (in memory) following the provided comparator function.
|
||||
*
|
||||
* This function does the sorting "in place", meaning that it overrides the input. The object is returned for
|
||||
* convenience, but that returned value can be discarded safely if the caller has a memory pointer to the array.
|
||||
*
|
||||
* NOTE: this function's cost is \`O(n · log(n))\` in average and \`O(n²)\` in the worst case, with n the length of the
|
||||
* array. Using it in view functions that are executed through \`eth_call\` is safe, but one should be very careful
|
||||
* when executing this as part of a transaction. If the array being sorted is too large, the sort operation may
|
||||
* consume more gas than is available in a block, leading to potential DoS.
|
||||
*/
|
||||
function sort(
|
||||
${type}[] memory array,
|
||||
function(${type}, ${type}) pure returns (bool) comp
|
||||
) internal pure returns (${type}[] memory) {
|
||||
${
|
||||
type === 'bytes32'
|
||||
? '_quickSort(_begin(array), _end(array), comp);'
|
||||
: 'sort(_castToBytes32Array(array), _castToBytes32Comp(comp));'
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Variant of {sort} that sorts an array of ${type} in increasing order.
|
||||
*/
|
||||
function sort(${type}[] memory array) internal pure returns (${type}[] memory) {
|
||||
${type === 'bytes32' ? 'sort(array, _defaultComp);' : 'sort(_castToBytes32Array(array), _defaultComp);'}
|
||||
return array;
|
||||
}
|
||||
`;
|
||||
|
||||
const quickSort = `
|
||||
/**
|
||||
* @dev Performs a quick sort of a segment of memory. The segment sorted starts at \`begin\` (inclusive), and stops
|
||||
* at end (exclusive). Sorting follows the \`comp\` comparator.
|
||||
*
|
||||
* Invariant: \`begin <= end\`. This is the case when initially called by {sort} and is preserved in subcalls.
|
||||
*
|
||||
* IMPORTANT: Memory locations between \`begin\` and \`end\` are not validated/zeroed. This function should
|
||||
* be used only if the limits are within a memory array.
|
||||
*/
|
||||
function _quickSort(uint256 begin, uint256 end, function(bytes32, bytes32) pure returns (bool) comp) private pure {
|
||||
unchecked {
|
||||
if (end - begin < 0x40) return;
|
||||
|
||||
// Use first element as pivot
|
||||
bytes32 pivot = _mload(begin);
|
||||
// Position where the pivot should be at the end of the loop
|
||||
uint256 pos = begin;
|
||||
|
||||
for (uint256 it = begin + 0x20; it < end; it += 0x20) {
|
||||
if (comp(_mload(it), pivot)) {
|
||||
// If the value stored at the iterator's position comes before the pivot, we increment the
|
||||
// position of the pivot and move the value there.
|
||||
pos += 0x20;
|
||||
_swap(pos, it);
|
||||
}
|
||||
}
|
||||
|
||||
_swap(begin, pos); // Swap pivot into place
|
||||
_quickSort(begin, pos, comp); // Sort the left side of the pivot
|
||||
_quickSort(pos + 0x20, end, comp); // Sort the right side of the pivot
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Pointer to the memory location of the first element of \`array\`.
|
||||
*/
|
||||
function _begin(bytes32[] memory array) private pure returns (uint256 ptr) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
ptr := add(array, 0x20)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Pointer to the memory location of the first memory word (32bytes) after \`array\`. This is the memory word
|
||||
* that comes just after the last element of the array.
|
||||
*/
|
||||
function _end(bytes32[] memory array) private pure returns (uint256 ptr) {
|
||||
unchecked {
|
||||
return _begin(array) + array.length * 0x20;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Load memory word (as a bytes32) at location \`ptr\`.
|
||||
*/
|
||||
function _mload(uint256 ptr) private pure returns (bytes32 value) {
|
||||
assembly {
|
||||
value := mload(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Swaps the elements memory location \`ptr1\` and \`ptr2\`.
|
||||
*/
|
||||
function _swap(uint256 ptr1, uint256 ptr2) private pure {
|
||||
assembly {
|
||||
let value1 := mload(ptr1)
|
||||
let value2 := mload(ptr2)
|
||||
mstore(ptr1, value2)
|
||||
mstore(ptr2, value1)
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const defaultComparator = `
|
||||
/// @dev Comparator for sorting arrays in increasing order.
|
||||
function _defaultComp(bytes32 a, bytes32 b) private pure returns (bool) {
|
||||
return a < b;
|
||||
}
|
||||
`;
|
||||
|
||||
const castArray = type => `\
|
||||
/// @dev Helper: low level cast ${type} memory array to uint256 memory array
|
||||
function _castToBytes32Array(${type}[] memory input) private pure returns (bytes32[] memory output) {
|
||||
assembly {
|
||||
output := input
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const castComparator = type => `\
|
||||
/// @dev Helper: low level cast ${type} comp function to bytes32 comp function
|
||||
function _castToBytes32Comp(
|
||||
function(${type}, ${type}) pure returns (bool) input
|
||||
) private pure returns (function(bytes32, bytes32) pure returns (bool) output) {
|
||||
assembly {
|
||||
output := input
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const search = `
|
||||
/**
|
||||
* @dev Searches a sorted \`array\` and returns the first index that contains
|
||||
* a value greater or equal to \`element\`. If no such index exists (i.e. all
|
||||
* values in the array are strictly less than \`element\`), the array length is
|
||||
* returned. Time complexity O(log n).
|
||||
*
|
||||
* NOTE: The \`array\` is expected to be sorted in ascending order, and to
|
||||
* contain no repeated elements.
|
||||
*
|
||||
* IMPORTANT: Deprecated. This implementation behaves as {lowerBound} but lacks
|
||||
* support for repeated elements in the array. The {lowerBound} function should
|
||||
* be used instead.
|
||||
*/
|
||||
function findUpperBound(uint256[] storage array, uint256 element) internal view returns (uint256) {
|
||||
uint256 low = 0;
|
||||
uint256 high = array.length;
|
||||
|
||||
if (high == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
|
||||
// Note that mid will always be strictly less than high (i.e. it will be a valid array index)
|
||||
// because Math.average rounds towards zero (it does integer division with truncation).
|
||||
if (unsafeAccess(array, mid).value > element) {
|
||||
high = mid;
|
||||
} else {
|
||||
low = mid + 1;
|
||||
}
|
||||
}
|
||||
|
||||
// At this point \`low\` is the exclusive upper bound. We will return the inclusive upper bound.
|
||||
if (low > 0 && unsafeAccess(array, low - 1).value == element) {
|
||||
return low - 1;
|
||||
} else {
|
||||
return low;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Searches an \`array\` sorted in ascending order and returns the first
|
||||
* index that contains a value greater or equal than \`element\`. If no such index
|
||||
* exists (i.e. all values in the array are strictly less than \`element\`), the array
|
||||
* length is returned. Time complexity O(log n).
|
||||
*
|
||||
* See C++'s https://en.cppreference.com/w/cpp/algorithm/lower_bound[lower_bound].
|
||||
*/
|
||||
function lowerBound(uint256[] storage array, uint256 element) internal view returns (uint256) {
|
||||
uint256 low = 0;
|
||||
uint256 high = array.length;
|
||||
|
||||
if (high == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
|
||||
// Note that mid will always be strictly less than high (i.e. it will be a valid array index)
|
||||
// because Math.average rounds towards zero (it does integer division with truncation).
|
||||
if (unsafeAccess(array, mid).value < element) {
|
||||
// this cannot overflow because mid < high
|
||||
unchecked {
|
||||
low = mid + 1;
|
||||
}
|
||||
} else {
|
||||
high = mid;
|
||||
}
|
||||
}
|
||||
|
||||
return low;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Searches an \`array\` sorted in ascending order and returns the first
|
||||
* index that contains a value strictly greater than \`element\`. If no such index
|
||||
* exists (i.e. all values in the array are strictly less than \`element\`), the array
|
||||
* length is returned. Time complexity O(log n).
|
||||
*
|
||||
* See C++'s https://en.cppreference.com/w/cpp/algorithm/upper_bound[upper_bound].
|
||||
*/
|
||||
function upperBound(uint256[] storage array, uint256 element) internal view returns (uint256) {
|
||||
uint256 low = 0;
|
||||
uint256 high = array.length;
|
||||
|
||||
if (high == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
|
||||
// Note that mid will always be strictly less than high (i.e. it will be a valid array index)
|
||||
// because Math.average rounds towards zero (it does integer division with truncation).
|
||||
if (unsafeAccess(array, mid).value > element) {
|
||||
high = mid;
|
||||
} else {
|
||||
// this cannot overflow because mid < high
|
||||
unchecked {
|
||||
low = mid + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return low;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Same as {lowerBound}, but with an array in memory.
|
||||
*/
|
||||
function lowerBoundMemory(uint256[] memory array, uint256 element) internal pure returns (uint256) {
|
||||
uint256 low = 0;
|
||||
uint256 high = array.length;
|
||||
|
||||
if (high == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
|
||||
// Note that mid will always be strictly less than high (i.e. it will be a valid array index)
|
||||
// because Math.average rounds towards zero (it does integer division with truncation).
|
||||
if (unsafeMemoryAccess(array, mid) < element) {
|
||||
// this cannot overflow because mid < high
|
||||
unchecked {
|
||||
low = mid + 1;
|
||||
}
|
||||
} else {
|
||||
high = mid;
|
||||
}
|
||||
}
|
||||
|
||||
return low;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Same as {upperBound}, but with an array in memory.
|
||||
*/
|
||||
function upperBoundMemory(uint256[] memory array, uint256 element) internal pure returns (uint256) {
|
||||
uint256 low = 0;
|
||||
uint256 high = array.length;
|
||||
|
||||
if (high == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
|
||||
// Note that mid will always be strictly less than high (i.e. it will be a valid array index)
|
||||
// because Math.average rounds towards zero (it does integer division with truncation).
|
||||
if (unsafeMemoryAccess(array, mid) > element) {
|
||||
high = mid;
|
||||
} else {
|
||||
// this cannot overflow because mid < high
|
||||
unchecked {
|
||||
low = mid + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return low;
|
||||
}
|
||||
`;
|
||||
|
||||
const unsafeAccessStorage = type => `
|
||||
/**
|
||||
* @dev Access an array in an "unsafe" way. Skips solidity "index-out-of-range" check.
|
||||
*
|
||||
* WARNING: Only use if you are certain \`pos\` is lower than the array length.
|
||||
*/
|
||||
function unsafeAccess(${type}[] storage arr, uint256 pos) internal pure returns (StorageSlot.${capitalize(
|
||||
type,
|
||||
)}Slot storage) {
|
||||
bytes32 slot;
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
slot := arr.slot
|
||||
}
|
||||
return slot.deriveArray().offset(pos).get${capitalize(type)}Slot();
|
||||
}`;
|
||||
|
||||
const unsafeAccessMemory = type => `
|
||||
/**
|
||||
* @dev Access an array in an "unsafe" way. Skips solidity "index-out-of-range" check.
|
||||
*
|
||||
* WARNING: Only use if you are certain \`pos\` is lower than the array length.
|
||||
*/
|
||||
function unsafeMemoryAccess(${type}[] memory arr, uint256 pos) internal pure returns (${type} res) {
|
||||
assembly {
|
||||
res := mload(add(add(arr, 0x20), mul(pos, 0x20)))
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const unsafeSetLength = type => `
|
||||
/**
|
||||
* @dev Helper to set the length of an dynamic array. Directly writing to \`.length\` is forbidden.
|
||||
*
|
||||
* WARNING: this does not clear elements if length is reduced, of initialize elements if length is increased.
|
||||
*/
|
||||
function unsafeSetLength(${type}[] storage array, uint256 len) internal {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
sstore(array.slot, len)
|
||||
}
|
||||
}`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library Arrays {',
|
||||
'using SlotDerivation for bytes32;',
|
||||
'using StorageSlot for bytes32;',
|
||||
// sorting, comparator, helpers and internal
|
||||
sort('bytes32'),
|
||||
TYPES.filter(type => type !== 'bytes32').map(sort),
|
||||
quickSort,
|
||||
defaultComparator,
|
||||
TYPES.filter(type => type !== 'bytes32').map(castArray),
|
||||
TYPES.filter(type => type !== 'bytes32').map(castComparator),
|
||||
// lookup
|
||||
search,
|
||||
// unsafe (direct) storage and memory access
|
||||
TYPES.map(unsafeAccessStorage),
|
||||
TYPES.map(unsafeAccessMemory),
|
||||
TYPES.map(unsafeSetLength),
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,3 @@
|
||||
const TYPES = ['address', 'bytes32', 'uint256'];
|
||||
|
||||
module.exports = { TYPES };
|
||||
@@ -0,0 +1,248 @@
|
||||
const format = require('../format-lines');
|
||||
const { OPTS } = require('./Checkpoints.opts');
|
||||
|
||||
// TEMPLATE
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {Math} from "../math/Math.sol";
|
||||
|
||||
/**
|
||||
* @dev This library defines the \`Trace*\` struct, for checkpointing values as they change at different points in
|
||||
* time, and later looking up past values by block number. See {Votes} as an example.
|
||||
*
|
||||
* To create a history of checkpoints define a variable type \`Checkpoints.Trace*\` in your contract, and store a new
|
||||
* checkpoint for the current transaction block using the {push} function.
|
||||
*/
|
||||
`;
|
||||
|
||||
const errors = `\
|
||||
/**
|
||||
* @dev A value was attempted to be inserted on a past checkpoint.
|
||||
*/
|
||||
error CheckpointUnorderedInsertion();
|
||||
`;
|
||||
|
||||
const template = opts => `\
|
||||
struct ${opts.historyTypeName} {
|
||||
${opts.checkpointTypeName}[] ${opts.checkpointFieldName};
|
||||
}
|
||||
|
||||
struct ${opts.checkpointTypeName} {
|
||||
${opts.keyTypeName} ${opts.keyFieldName};
|
||||
${opts.valueTypeName} ${opts.valueFieldName};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Pushes a (\`key\`, \`value\`) pair into a ${opts.historyTypeName} so that it is stored as the checkpoint.
|
||||
*
|
||||
* Returns previous value and new value.
|
||||
*
|
||||
* IMPORTANT: Never accept \`key\` as a user input, since an arbitrary \`type(${opts.keyTypeName}).max\` key set will disable the
|
||||
* library.
|
||||
*/
|
||||
function push(
|
||||
${opts.historyTypeName} storage self,
|
||||
${opts.keyTypeName} key,
|
||||
${opts.valueTypeName} value
|
||||
) internal returns (${opts.valueTypeName}, ${opts.valueTypeName}) {
|
||||
return _insert(self.${opts.checkpointFieldName}, key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value in the first (oldest) checkpoint with key greater or equal than the search key, or zero if
|
||||
* there is none.
|
||||
*/
|
||||
function lowerLookup(${opts.historyTypeName} storage self, ${opts.keyTypeName} key) internal view returns (${opts.valueTypeName}) {
|
||||
uint256 len = self.${opts.checkpointFieldName}.length;
|
||||
uint256 pos = _lowerBinaryLookup(self.${opts.checkpointFieldName}, key, 0, len);
|
||||
return pos == len ? 0 : _unsafeAccess(self.${opts.checkpointFieldName}, pos).${opts.valueFieldName};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value in the last (most recent) checkpoint with key lower or equal than the search key, or zero
|
||||
* if there is none.
|
||||
*/
|
||||
function upperLookup(${opts.historyTypeName} storage self, ${opts.keyTypeName} key) internal view returns (${opts.valueTypeName}) {
|
||||
uint256 len = self.${opts.checkpointFieldName}.length;
|
||||
uint256 pos = _upperBinaryLookup(self.${opts.checkpointFieldName}, key, 0, len);
|
||||
return pos == 0 ? 0 : _unsafeAccess(self.${opts.checkpointFieldName}, pos - 1).${opts.valueFieldName};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value in the last (most recent) checkpoint with key lower or equal than the search key, or zero
|
||||
* if there is none.
|
||||
*
|
||||
* NOTE: This is a variant of {upperLookup} that is optimised to find "recent" checkpoint (checkpoints with high
|
||||
* keys).
|
||||
*/
|
||||
function upperLookupRecent(${opts.historyTypeName} storage self, ${opts.keyTypeName} key) internal view returns (${opts.valueTypeName}) {
|
||||
uint256 len = self.${opts.checkpointFieldName}.length;
|
||||
|
||||
uint256 low = 0;
|
||||
uint256 high = len;
|
||||
|
||||
if (len > 5) {
|
||||
uint256 mid = len - Math.sqrt(len);
|
||||
if (key < _unsafeAccess(self.${opts.checkpointFieldName}, mid)._key) {
|
||||
high = mid;
|
||||
} else {
|
||||
low = mid + 1;
|
||||
}
|
||||
}
|
||||
|
||||
uint256 pos = _upperBinaryLookup(self.${opts.checkpointFieldName}, key, low, high);
|
||||
|
||||
return pos == 0 ? 0 : _unsafeAccess(self.${opts.checkpointFieldName}, pos - 1).${opts.valueFieldName};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value in the most recent checkpoint, or zero if there are no checkpoints.
|
||||
*/
|
||||
function latest(${opts.historyTypeName} storage self) internal view returns (${opts.valueTypeName}) {
|
||||
uint256 pos = self.${opts.checkpointFieldName}.length;
|
||||
return pos == 0 ? 0 : _unsafeAccess(self.${opts.checkpointFieldName}, pos - 1).${opts.valueFieldName};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns whether there is a checkpoint in the structure (i.e. it is not empty), and if so the key and value
|
||||
* in the most recent checkpoint.
|
||||
*/
|
||||
function latestCheckpoint(${opts.historyTypeName} storage self)
|
||||
internal
|
||||
view
|
||||
returns (
|
||||
bool exists,
|
||||
${opts.keyTypeName} ${opts.keyFieldName},
|
||||
${opts.valueTypeName} ${opts.valueFieldName}
|
||||
)
|
||||
{
|
||||
uint256 pos = self.${opts.checkpointFieldName}.length;
|
||||
if (pos == 0) {
|
||||
return (false, 0, 0);
|
||||
} else {
|
||||
${opts.checkpointTypeName} storage ckpt = _unsafeAccess(self.${opts.checkpointFieldName}, pos - 1);
|
||||
return (true, ckpt.${opts.keyFieldName}, ckpt.${opts.valueFieldName});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the number of checkpoint.
|
||||
*/
|
||||
function length(${opts.historyTypeName} storage self) internal view returns (uint256) {
|
||||
return self.${opts.checkpointFieldName}.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns checkpoint at given position.
|
||||
*/
|
||||
function at(${opts.historyTypeName} storage self, uint32 pos) internal view returns (${opts.checkpointTypeName} memory) {
|
||||
return self.${opts.checkpointFieldName}[pos];
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Pushes a (\`key\`, \`value\`) pair into an ordered list of checkpoints, either by inserting a new checkpoint,
|
||||
* or by updating the last one.
|
||||
*/
|
||||
function _insert(
|
||||
${opts.checkpointTypeName}[] storage self,
|
||||
${opts.keyTypeName} key,
|
||||
${opts.valueTypeName} value
|
||||
) private returns (${opts.valueTypeName}, ${opts.valueTypeName}) {
|
||||
uint256 pos = self.length;
|
||||
|
||||
if (pos > 0) {
|
||||
${opts.checkpointTypeName} storage last = _unsafeAccess(self, pos - 1);
|
||||
${opts.keyTypeName} lastKey = last.${opts.keyFieldName};
|
||||
${opts.valueTypeName} lastValue = last.${opts.valueFieldName};
|
||||
|
||||
// Checkpoint keys must be non-decreasing.
|
||||
if (lastKey > key) {
|
||||
revert CheckpointUnorderedInsertion();
|
||||
}
|
||||
|
||||
// Update or push new checkpoint
|
||||
if (lastKey == key) {
|
||||
_unsafeAccess(self, pos - 1).${opts.valueFieldName} = value;
|
||||
} else {
|
||||
self.push(${opts.checkpointTypeName}({${opts.keyFieldName}: key, ${opts.valueFieldName}: value}));
|
||||
}
|
||||
return (lastValue, value);
|
||||
} else {
|
||||
self.push(${opts.checkpointTypeName}({${opts.keyFieldName}: key, ${opts.valueFieldName}: value}));
|
||||
return (0, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Return the index of the last (most recent) checkpoint with key lower or equal than the search key, or \`high\`
|
||||
* if there is none. \`low\` and \`high\` define a section where to do the search, with inclusive \`low\` and exclusive
|
||||
* \`high\`.
|
||||
*
|
||||
* WARNING: \`high\` should not be greater than the array's length.
|
||||
*/
|
||||
function _upperBinaryLookup(
|
||||
${opts.checkpointTypeName}[] storage self,
|
||||
${opts.keyTypeName} key,
|
||||
uint256 low,
|
||||
uint256 high
|
||||
) private view returns (uint256) {
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
if (_unsafeAccess(self, mid).${opts.keyFieldName} > key) {
|
||||
high = mid;
|
||||
} else {
|
||||
low = mid + 1;
|
||||
}
|
||||
}
|
||||
return high;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Return the index of the first (oldest) checkpoint with key is greater or equal than the search key, or
|
||||
* \`high\` if there is none. \`low\` and \`high\` define a section where to do the search, with inclusive \`low\` and
|
||||
* exclusive \`high\`.
|
||||
*
|
||||
* WARNING: \`high\` should not be greater than the array's length.
|
||||
*/
|
||||
function _lowerBinaryLookup(
|
||||
${opts.checkpointTypeName}[] storage self,
|
||||
${opts.keyTypeName} key,
|
||||
uint256 low,
|
||||
uint256 high
|
||||
) private view returns (uint256) {
|
||||
while (low < high) {
|
||||
uint256 mid = Math.average(low, high);
|
||||
if (_unsafeAccess(self, mid).${opts.keyFieldName} < key) {
|
||||
low = mid + 1;
|
||||
} else {
|
||||
high = mid;
|
||||
}
|
||||
}
|
||||
return high;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Access an element of the array without performing bounds check. The position is assumed to be within bounds.
|
||||
*/
|
||||
function _unsafeAccess(${opts.checkpointTypeName}[] storage self, uint256 pos)
|
||||
private
|
||||
pure
|
||||
returns (${opts.checkpointTypeName} storage result)
|
||||
{
|
||||
assembly {
|
||||
mstore(0, self.slot)
|
||||
result.slot := add(keccak256(0, 0x20), pos)
|
||||
}
|
||||
}
|
||||
`;
|
||||
/* eslint-enable max-len */
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library Checkpoints {',
|
||||
errors,
|
||||
OPTS.flatMap(opts => template(opts)),
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,17 @@
|
||||
// OPTIONS
|
||||
const VALUE_SIZES = [224, 208, 160];
|
||||
|
||||
const defaultOpts = size => ({
|
||||
historyTypeName: `Trace${size}`,
|
||||
checkpointTypeName: `Checkpoint${size}`,
|
||||
checkpointFieldName: '_checkpoints',
|
||||
keyTypeName: `uint${256 - size}`,
|
||||
keyFieldName: '_key',
|
||||
valueTypeName: `uint${size}`,
|
||||
valueFieldName: '_value',
|
||||
});
|
||||
|
||||
module.exports = {
|
||||
VALUE_SIZES,
|
||||
OPTS: VALUE_SIZES.map(size => defaultOpts(size)),
|
||||
};
|
||||
@@ -0,0 +1,146 @@
|
||||
const format = require('../format-lines');
|
||||
const { capitalize } = require('../../helpers');
|
||||
const { OPTS } = require('./Checkpoints.opts.js');
|
||||
|
||||
// TEMPLATE
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {Test} from "forge-std/Test.sol";
|
||||
import {SafeCast} from "@openzeppelin/contracts/utils/math/SafeCast.sol";
|
||||
import {Checkpoints} from "@openzeppelin/contracts/utils/structs/Checkpoints.sol";
|
||||
`;
|
||||
|
||||
/* eslint-disable max-len */
|
||||
const template = opts => `\
|
||||
using Checkpoints for Checkpoints.${opts.historyTypeName};
|
||||
|
||||
// Maximum gap between keys used during the fuzzing tests: the \`_prepareKeys\` function with make sure that
|
||||
// key#n+1 is in the [key#n, key#n + _KEY_MAX_GAP] range.
|
||||
uint8 internal constant _KEY_MAX_GAP = 64;
|
||||
|
||||
Checkpoints.${opts.historyTypeName} internal _ckpts;
|
||||
|
||||
// helpers
|
||||
function _bound${capitalize(opts.keyTypeName)}(
|
||||
${opts.keyTypeName} x,
|
||||
${opts.keyTypeName} min,
|
||||
${opts.keyTypeName} max
|
||||
) internal pure returns (${opts.keyTypeName}) {
|
||||
return SafeCast.to${capitalize(opts.keyTypeName)}(bound(uint256(x), uint256(min), uint256(max)));
|
||||
}
|
||||
|
||||
function _prepareKeys(
|
||||
${opts.keyTypeName}[] memory keys,
|
||||
${opts.keyTypeName} maxSpread
|
||||
) internal pure {
|
||||
${opts.keyTypeName} lastKey = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
${opts.keyTypeName} key = _bound${capitalize(opts.keyTypeName)}(keys[i], lastKey, lastKey + maxSpread);
|
||||
keys[i] = key;
|
||||
lastKey = key;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertLatestCheckpoint(
|
||||
bool exist,
|
||||
${opts.keyTypeName} key,
|
||||
${opts.valueTypeName} value
|
||||
) internal {
|
||||
(bool _exist, ${opts.keyTypeName} _key, ${opts.valueTypeName} _value) = _ckpts.latestCheckpoint();
|
||||
assertEq(_exist, exist);
|
||||
assertEq(_key, key);
|
||||
assertEq(_value, value);
|
||||
}
|
||||
|
||||
// tests
|
||||
function testPush(
|
||||
${opts.keyTypeName}[] memory keys,
|
||||
${opts.valueTypeName}[] memory values,
|
||||
${opts.keyTypeName} pastKey
|
||||
) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
// initial state
|
||||
assertEq(_ckpts.length(), 0);
|
||||
assertEq(_ckpts.latest(), 0);
|
||||
_assertLatestCheckpoint(false, 0, 0);
|
||||
|
||||
uint256 duplicates = 0;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
${opts.keyTypeName} key = keys[i];
|
||||
${opts.valueTypeName} value = values[i % values.length];
|
||||
if (i > 0 && key == keys[i-1]) ++duplicates;
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// check length & latest
|
||||
assertEq(_ckpts.length(), i + 1 - duplicates);
|
||||
assertEq(_ckpts.latest(), value);
|
||||
_assertLatestCheckpoint(true, key, value);
|
||||
}
|
||||
|
||||
if (keys.length > 0) {
|
||||
${opts.keyTypeName} lastKey = keys[keys.length - 1];
|
||||
if (lastKey > 0) {
|
||||
pastKey = _bound${capitalize(opts.keyTypeName)}(pastKey, 0, lastKey - 1);
|
||||
|
||||
vm.expectRevert();
|
||||
this.push(pastKey, values[keys.length % values.length]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used to test reverts
|
||||
function push(${opts.keyTypeName} key, ${opts.valueTypeName} value) external {
|
||||
_ckpts.push(key, value);
|
||||
}
|
||||
|
||||
function testLookup(
|
||||
${opts.keyTypeName}[] memory keys,
|
||||
${opts.valueTypeName}[] memory values,
|
||||
${opts.keyTypeName} lookup
|
||||
) public {
|
||||
vm.assume(values.length > 0 && values.length <= keys.length);
|
||||
_prepareKeys(keys, _KEY_MAX_GAP);
|
||||
|
||||
${opts.keyTypeName} lastKey = keys.length == 0 ? 0 : keys[keys.length - 1];
|
||||
lookup = _bound${capitalize(opts.keyTypeName)}(lookup, 0, lastKey + _KEY_MAX_GAP);
|
||||
|
||||
${opts.valueTypeName} upper = 0;
|
||||
${opts.valueTypeName} lower = 0;
|
||||
${opts.keyTypeName} lowerKey = type(${opts.keyTypeName}).max;
|
||||
for (uint256 i = 0; i < keys.length; ++i) {
|
||||
${opts.keyTypeName} key = keys[i];
|
||||
${opts.valueTypeName} value = values[i % values.length];
|
||||
|
||||
// push
|
||||
_ckpts.push(key, value);
|
||||
|
||||
// track expected result of lookups
|
||||
if (key <= lookup) {
|
||||
upper = value;
|
||||
}
|
||||
// find the first key that is not smaller than the lookup key
|
||||
if (key >= lookup && (i == 0 || keys[i-1] < lookup)) {
|
||||
lowerKey = key;
|
||||
}
|
||||
if (key == lowerKey) {
|
||||
lower = value;
|
||||
}
|
||||
}
|
||||
|
||||
// check lookup
|
||||
assertEq(_ckpts.lowerLookup(lookup), lower);
|
||||
assertEq(_ckpts.upperLookup(lookup), upper);
|
||||
assertEq(_ckpts.upperLookupRecent(lookup), upper);
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header,
|
||||
...OPTS.flatMap(opts => [`contract Checkpoints${opts.historyTypeName}Test is Test {`, [template(opts)], '}']),
|
||||
);
|
||||
@@ -0,0 +1,281 @@
|
||||
const format = require('../format-lines');
|
||||
const { fromBytes32, toBytes32 } = require('./conversion');
|
||||
const { TYPES } = require('./EnumerableMap.opts');
|
||||
|
||||
/* eslint-disable max-len */
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {EnumerableSet} from "./EnumerableSet.sol";
|
||||
|
||||
/**
|
||||
* @dev Library for managing an enumerable variant of Solidity's
|
||||
* https://solidity.readthedocs.io/en/latest/types.html#mapping-types[\`mapping\`]
|
||||
* type.
|
||||
*
|
||||
* Maps have the following properties:
|
||||
*
|
||||
* - Entries are added, removed, and checked for existence in constant time
|
||||
* (O(1)).
|
||||
* - Entries are enumerated in O(n). No guarantees are made on the ordering.
|
||||
*
|
||||
* \`\`\`solidity
|
||||
* contract Example {
|
||||
* // Add the library methods
|
||||
* using EnumerableMap for EnumerableMap.UintToAddressMap;
|
||||
*
|
||||
* // Declare a set state variable
|
||||
* EnumerableMap.UintToAddressMap private myMap;
|
||||
* }
|
||||
* \`\`\`
|
||||
*
|
||||
* The following map types are supported:
|
||||
*
|
||||
* - \`uint256 -> address\` (\`UintToAddressMap\`) since v3.0.0
|
||||
* - \`address -> uint256\` (\`AddressToUintMap\`) since v4.6.0
|
||||
* - \`bytes32 -> bytes32\` (\`Bytes32ToBytes32Map\`) since v4.6.0
|
||||
* - \`uint256 -> uint256\` (\`UintToUintMap\`) since v4.7.0
|
||||
* - \`bytes32 -> uint256\` (\`Bytes32ToUintMap\`) since v4.7.0
|
||||
* - \`uint256 -> bytes32\` (\`UintToBytes32Map\`) since v5.1.0
|
||||
* - \`address -> address\` (\`AddressToAddressMap\`) since v5.1.0
|
||||
* - \`address -> bytes32\` (\`AddressToBytes32Map\`) since v5.1.0
|
||||
* - \`bytes32 -> address\` (\`Bytes32ToAddressMap\`) since v5.1.0
|
||||
*
|
||||
* [WARNING]
|
||||
* ====
|
||||
* Trying to delete such a structure from storage will likely result in data corruption, rendering the structure
|
||||
* unusable.
|
||||
* See https://github.com/ethereum/solidity/pull/11843[ethereum/solidity#11843] for more info.
|
||||
*
|
||||
* In order to clean an EnumerableMap, you can either remove all elements one by one or create a fresh instance using an
|
||||
* array of EnumerableMap.
|
||||
* ====
|
||||
*/
|
||||
`;
|
||||
/* eslint-enable max-len */
|
||||
|
||||
const defaultMap = () => `\
|
||||
// To implement this library for multiple types with as little code repetition as possible, we write it in
|
||||
// terms of a generic Map type with bytes32 keys and values. The Map implementation uses private functions,
|
||||
// and user-facing implementations such as \`UintToAddressMap\` are just wrappers around the underlying Map.
|
||||
// This means that we can only create new EnumerableMaps for types that fit in bytes32.
|
||||
|
||||
/**
|
||||
* @dev Query for a nonexistent map key.
|
||||
*/
|
||||
error EnumerableMapNonexistentKey(bytes32 key);
|
||||
|
||||
struct Bytes32ToBytes32Map {
|
||||
// Storage of keys
|
||||
EnumerableSet.Bytes32Set _keys;
|
||||
mapping(bytes32 key => bytes32) _values;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Adds a key-value pair to a map, or updates the value for an existing
|
||||
* key. O(1).
|
||||
*
|
||||
* Returns true if the key was added to the map, that is if it was not
|
||||
* already present.
|
||||
*/
|
||||
function set(
|
||||
Bytes32ToBytes32Map storage map,
|
||||
bytes32 key,
|
||||
bytes32 value
|
||||
) internal returns (bool) {
|
||||
map._values[key] = value;
|
||||
return map._keys.add(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Removes a key-value pair from a map. O(1).
|
||||
*
|
||||
* Returns true if the key was removed from the map, that is if it was present.
|
||||
*/
|
||||
function remove(Bytes32ToBytes32Map storage map, bytes32 key) internal returns (bool) {
|
||||
delete map._values[key];
|
||||
return map._keys.remove(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns true if the key is in the map. O(1).
|
||||
*/
|
||||
function contains(Bytes32ToBytes32Map storage map, bytes32 key) internal view returns (bool) {
|
||||
return map._keys.contains(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the number of key-value pairs in the map. O(1).
|
||||
*/
|
||||
function length(Bytes32ToBytes32Map storage map) internal view returns (uint256) {
|
||||
return map._keys.length();
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the key-value pair stored at position \`index\` in the map. O(1).
|
||||
*
|
||||
* Note that there are no guarantees on the ordering of entries inside the
|
||||
* array, and it may change when more entries are added or removed.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - \`index\` must be strictly less than {length}.
|
||||
*/
|
||||
function at(Bytes32ToBytes32Map storage map, uint256 index) internal view returns (bytes32, bytes32) {
|
||||
bytes32 key = map._keys.at(index);
|
||||
return (key, map._values[key]);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Tries to returns the value associated with \`key\`. O(1).
|
||||
* Does not revert if \`key\` is not in the map.
|
||||
*/
|
||||
function tryGet(Bytes32ToBytes32Map storage map, bytes32 key) internal view returns (bool, bytes32) {
|
||||
bytes32 value = map._values[key];
|
||||
if (value == bytes32(0)) {
|
||||
return (contains(map, key), bytes32(0));
|
||||
} else {
|
||||
return (true, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value associated with \`key\`. O(1).
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - \`key\` must be in the map.
|
||||
*/
|
||||
function get(Bytes32ToBytes32Map storage map, bytes32 key) internal view returns (bytes32) {
|
||||
bytes32 value = map._values[key];
|
||||
if(value == 0 && !contains(map, key)) {
|
||||
revert EnumerableMapNonexistentKey(key);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Return the an array containing all the keys
|
||||
*
|
||||
* WARNING: This operation will copy the entire storage to memory, which can be quite expensive. This is designed
|
||||
* to mostly be used by view accessors that are queried without any gas fees. Developers should keep in mind that
|
||||
* this function has an unbounded cost, and using it as part of a state-changing function may render the function
|
||||
* uncallable if the map grows to a point where copying to memory consumes too much gas to fit in a block.
|
||||
*/
|
||||
function keys(Bytes32ToBytes32Map storage map) internal view returns (bytes32[] memory) {
|
||||
return map._keys.values();
|
||||
}
|
||||
`;
|
||||
|
||||
const customMap = ({ name, keyType, valueType }) => `\
|
||||
// ${name}
|
||||
|
||||
struct ${name} {
|
||||
Bytes32ToBytes32Map _inner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Adds a key-value pair to a map, or updates the value for an existing
|
||||
* key. O(1).
|
||||
*
|
||||
* Returns true if the key was added to the map, that is if it was not
|
||||
* already present.
|
||||
*/
|
||||
function set(
|
||||
${name} storage map,
|
||||
${keyType} key,
|
||||
${valueType} value
|
||||
) internal returns (bool) {
|
||||
return set(map._inner, ${toBytes32(keyType, 'key')}, ${toBytes32(valueType, 'value')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Removes a value from a map. O(1).
|
||||
*
|
||||
* Returns true if the key was removed from the map, that is if it was present.
|
||||
*/
|
||||
function remove(${name} storage map, ${keyType} key) internal returns (bool) {
|
||||
return remove(map._inner, ${toBytes32(keyType, 'key')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns true if the key is in the map. O(1).
|
||||
*/
|
||||
function contains(${name} storage map, ${keyType} key) internal view returns (bool) {
|
||||
return contains(map._inner, ${toBytes32(keyType, 'key')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the number of elements in the map. O(1).
|
||||
*/
|
||||
function length(${name} storage map) internal view returns (uint256) {
|
||||
return length(map._inner);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the element stored at position \`index\` in the map. O(1).
|
||||
* Note that there are no guarantees on the ordering of values inside the
|
||||
* array, and it may change when more values are added or removed.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - \`index\` must be strictly less than {length}.
|
||||
*/
|
||||
function at(${name} storage map, uint256 index) internal view returns (${keyType}, ${valueType}) {
|
||||
(bytes32 key, bytes32 value) = at(map._inner, index);
|
||||
return (${fromBytes32(keyType, 'key')}, ${fromBytes32(valueType, 'value')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Tries to returns the value associated with \`key\`. O(1).
|
||||
* Does not revert if \`key\` is not in the map.
|
||||
*/
|
||||
function tryGet(${name} storage map, ${keyType} key) internal view returns (bool, ${valueType}) {
|
||||
(bool success, bytes32 value) = tryGet(map._inner, ${toBytes32(keyType, 'key')});
|
||||
return (success, ${fromBytes32(valueType, 'value')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value associated with \`key\`. O(1).
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - \`key\` must be in the map.
|
||||
*/
|
||||
function get(${name} storage map, ${keyType} key) internal view returns (${valueType}) {
|
||||
return ${fromBytes32(valueType, `get(map._inner, ${toBytes32(keyType, 'key')})`)};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Return the an array containing all the keys
|
||||
*
|
||||
* WARNING: This operation will copy the entire storage to memory, which can be quite expensive. This is designed
|
||||
* to mostly be used by view accessors that are queried without any gas fees. Developers should keep in mind that
|
||||
* this function has an unbounded cost, and using it as part of a state-changing function may render the function
|
||||
* uncallable if the map grows to a point where copying to memory consumes too much gas to fit in a block.
|
||||
*/
|
||||
function keys(${name} storage map) internal view returns (${keyType}[] memory) {
|
||||
bytes32[] memory store = keys(map._inner);
|
||||
${keyType}[] memory result;
|
||||
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
result := store
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library EnumerableMap {',
|
||||
[
|
||||
'using EnumerableSet for EnumerableSet.Bytes32Set;',
|
||||
'',
|
||||
defaultMap(),
|
||||
TYPES.map(details => customMap(details).trimEnd()).join('\n\n'),
|
||||
],
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,19 @@
|
||||
const { capitalize } = require('../../helpers');
|
||||
|
||||
const mapType = str => (str == 'uint256' ? 'Uint' : capitalize(str));
|
||||
|
||||
const formatType = (keyType, valueType) => ({
|
||||
name: `${mapType(keyType)}To${mapType(valueType)}Map`,
|
||||
keyType,
|
||||
valueType,
|
||||
});
|
||||
|
||||
const TYPES = ['uint256', 'address', 'bytes32']
|
||||
.flatMap((key, _, array) => array.map(value => [key, value]))
|
||||
.slice(0, -1) // remove bytes32 → byte32 (last one) that is already defined
|
||||
.map(args => formatType(...args));
|
||||
|
||||
module.exports = {
|
||||
TYPES,
|
||||
formatType,
|
||||
};
|
||||
@@ -0,0 +1,245 @@
|
||||
const format = require('../format-lines');
|
||||
const { fromBytes32, toBytes32 } = require('./conversion');
|
||||
const { TYPES } = require('./EnumerableSet.opts');
|
||||
|
||||
/* eslint-disable max-len */
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
/**
|
||||
* @dev Library for managing
|
||||
* https://en.wikipedia.org/wiki/Set_(abstract_data_type)[sets] of primitive
|
||||
* types.
|
||||
*
|
||||
* Sets have the following properties:
|
||||
*
|
||||
* - Elements are added, removed, and checked for existence in constant time
|
||||
* (O(1)).
|
||||
* - Elements are enumerated in O(n). No guarantees are made on the ordering.
|
||||
*
|
||||
* \`\`\`solidity
|
||||
* contract Example {
|
||||
* // Add the library methods
|
||||
* using EnumerableSet for EnumerableSet.AddressSet;
|
||||
*
|
||||
* // Declare a set state variable
|
||||
* EnumerableSet.AddressSet private mySet;
|
||||
* }
|
||||
* \`\`\`
|
||||
*
|
||||
* As of v3.3.0, sets of type \`bytes32\` (\`Bytes32Set\`), \`address\` (\`AddressSet\`)
|
||||
* and \`uint256\` (\`UintSet\`) are supported.
|
||||
*
|
||||
* [WARNING]
|
||||
* ====
|
||||
* Trying to delete such a structure from storage will likely result in data corruption, rendering the structure
|
||||
* unusable.
|
||||
* See https://github.com/ethereum/solidity/pull/11843[ethereum/solidity#11843] for more info.
|
||||
*
|
||||
* In order to clean an EnumerableSet, you can either remove all elements one by one or create a fresh instance using an
|
||||
* array of EnumerableSet.
|
||||
* ====
|
||||
*/
|
||||
`;
|
||||
/* eslint-enable max-len */
|
||||
|
||||
const defaultSet = () => `\
|
||||
// To implement this library for multiple types with as little code
|
||||
// repetition as possible, we write it in terms of a generic Set type with
|
||||
// bytes32 values.
|
||||
// The Set implementation uses private functions, and user-facing
|
||||
// implementations (such as AddressSet) are just wrappers around the
|
||||
// underlying Set.
|
||||
// This means that we can only create new EnumerableSets for types that fit
|
||||
// in bytes32.
|
||||
|
||||
struct Set {
|
||||
// Storage of set values
|
||||
bytes32[] _values;
|
||||
// Position is the index of the value in the \`values\` array plus 1.
|
||||
// Position 0 is used to mean a value is not in the set.
|
||||
mapping(bytes32 value => uint256) _positions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Add a value to a set. O(1).
|
||||
*
|
||||
* Returns true if the value was added to the set, that is if it was not
|
||||
* already present.
|
||||
*/
|
||||
function _add(Set storage set, bytes32 value) private returns (bool) {
|
||||
if (!_contains(set, value)) {
|
||||
set._values.push(value);
|
||||
// The value is stored at length-1, but we add 1 to all indexes
|
||||
// and use 0 as a sentinel value
|
||||
set._positions[value] = set._values.length;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Removes a value from a set. O(1).
|
||||
*
|
||||
* Returns true if the value was removed from the set, that is if it was
|
||||
* present.
|
||||
*/
|
||||
function _remove(Set storage set, bytes32 value) private returns (bool) {
|
||||
// We cache the value's position to prevent multiple reads from the same storage slot
|
||||
uint256 position = set._positions[value];
|
||||
|
||||
if (position != 0) {
|
||||
// Equivalent to contains(set, value)
|
||||
// To delete an element from the _values array in O(1), we swap the element to delete with the last one in
|
||||
// the array, and then remove the last element (sometimes called as 'swap and pop').
|
||||
// This modifies the order of the array, as noted in {at}.
|
||||
|
||||
uint256 valueIndex = position - 1;
|
||||
uint256 lastIndex = set._values.length - 1;
|
||||
|
||||
if (valueIndex != lastIndex) {
|
||||
bytes32 lastValue = set._values[lastIndex];
|
||||
|
||||
// Move the lastValue to the index where the value to delete is
|
||||
set._values[valueIndex] = lastValue;
|
||||
// Update the tracked position of the lastValue (that was just moved)
|
||||
set._positions[lastValue] = position;
|
||||
}
|
||||
|
||||
// Delete the slot where the moved value was stored
|
||||
set._values.pop();
|
||||
|
||||
// Delete the tracked position for the deleted slot
|
||||
delete set._positions[value];
|
||||
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns true if the value is in the set. O(1).
|
||||
*/
|
||||
function _contains(Set storage set, bytes32 value) private view returns (bool) {
|
||||
return set._positions[value] != 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the number of values on the set. O(1).
|
||||
*/
|
||||
function _length(Set storage set) private view returns (uint256) {
|
||||
return set._values.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value stored at position \`index\` in the set. O(1).
|
||||
*
|
||||
* Note that there are no guarantees on the ordering of values inside the
|
||||
* array, and it may change when more values are added or removed.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - \`index\` must be strictly less than {length}.
|
||||
*/
|
||||
function _at(Set storage set, uint256 index) private view returns (bytes32) {
|
||||
return set._values[index];
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Return the entire set in an array
|
||||
*
|
||||
* WARNING: This operation will copy the entire storage to memory, which can be quite expensive. This is designed
|
||||
* to mostly be used by view accessors that are queried without any gas fees. Developers should keep in mind that
|
||||
* this function has an unbounded cost, and using it as part of a state-changing function may render the function
|
||||
* uncallable if the set grows to a point where copying to memory consumes too much gas to fit in a block.
|
||||
*/
|
||||
function _values(Set storage set) private view returns (bytes32[] memory) {
|
||||
return set._values;
|
||||
}
|
||||
`;
|
||||
|
||||
const customSet = ({ name, type }) => `\
|
||||
// ${name}
|
||||
|
||||
struct ${name} {
|
||||
Set _inner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Add a value to a set. O(1).
|
||||
*
|
||||
* Returns true if the value was added to the set, that is if it was not
|
||||
* already present.
|
||||
*/
|
||||
function add(${name} storage set, ${type} value) internal returns (bool) {
|
||||
return _add(set._inner, ${toBytes32(type, 'value')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Removes a value from a set. O(1).
|
||||
*
|
||||
* Returns true if the value was removed from the set, that is if it was
|
||||
* present.
|
||||
*/
|
||||
function remove(${name} storage set, ${type} value) internal returns (bool) {
|
||||
return _remove(set._inner, ${toBytes32(type, 'value')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns true if the value is in the set. O(1).
|
||||
*/
|
||||
function contains(${name} storage set, ${type} value) internal view returns (bool) {
|
||||
return _contains(set._inner, ${toBytes32(type, 'value')});
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the number of values in the set. O(1).
|
||||
*/
|
||||
function length(${name} storage set) internal view returns (uint256) {
|
||||
return _length(set._inner);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the value stored at position \`index\` in the set. O(1).
|
||||
*
|
||||
* Note that there are no guarantees on the ordering of values inside the
|
||||
* array, and it may change when more values are added or removed.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - \`index\` must be strictly less than {length}.
|
||||
*/
|
||||
function at(${name} storage set, uint256 index) internal view returns (${type}) {
|
||||
return ${fromBytes32(type, '_at(set._inner, index)')};
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Return the entire set in an array
|
||||
*
|
||||
* WARNING: This operation will copy the entire storage to memory, which can be quite expensive. This is designed
|
||||
* to mostly be used by view accessors that are queried without any gas fees. Developers should keep in mind that
|
||||
* this function has an unbounded cost, and using it as part of a state-changing function may render the function
|
||||
* uncallable if the set grows to a point where copying to memory consumes too much gas to fit in a block.
|
||||
*/
|
||||
function values(${name} storage set) internal view returns (${type}[] memory) {
|
||||
bytes32[] memory store = _values(set._inner);
|
||||
${type}[] memory result;
|
||||
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
result := store
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library EnumerableSet {',
|
||||
[defaultSet(), TYPES.map(details => customSet(details).trimEnd()).join('\n\n')],
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,12 @@
|
||||
const { capitalize } = require('../../helpers');
|
||||
|
||||
const mapType = str => (str == 'uint256' ? 'Uint' : capitalize(str));
|
||||
|
||||
const formatType = type => ({
|
||||
name: `${mapType(type)}Set`,
|
||||
type,
|
||||
});
|
||||
|
||||
const TYPES = ['bytes32', 'address', 'uint256'].map(formatType);
|
||||
|
||||
module.exports = { TYPES, formatType };
|
||||
@@ -0,0 +1,138 @@
|
||||
const format = require('../format-lines');
|
||||
const { range } = require('../../helpers');
|
||||
|
||||
const LENGTHS = range(8, 256, 8).reverse(); // 248 → 8 (in steps of 8)
|
||||
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
/**
|
||||
* @dev Wrappers over Solidity's uintXX/intXX/bool casting operators with added overflow
|
||||
* checks.
|
||||
*
|
||||
* Downcasting from uint256/int256 in Solidity does not revert on overflow. This can
|
||||
* easily result in undesired exploitation or bugs, since developers usually
|
||||
* assume that overflows raise errors. \`SafeCast\` restores this intuition by
|
||||
* reverting the transaction when such an operation overflows.
|
||||
*
|
||||
* Using this library instead of the unchecked operations eliminates an entire
|
||||
* class of bugs, so it's recommended to use it always.
|
||||
*/
|
||||
`;
|
||||
|
||||
const errors = `\
|
||||
/**
|
||||
* @dev Value doesn't fit in an uint of \`bits\` size.
|
||||
*/
|
||||
error SafeCastOverflowedUintDowncast(uint8 bits, uint256 value);
|
||||
|
||||
/**
|
||||
* @dev An int value doesn't fit in an uint of \`bits\` size.
|
||||
*/
|
||||
error SafeCastOverflowedIntToUint(int256 value);
|
||||
|
||||
/**
|
||||
* @dev Value doesn't fit in an int of \`bits\` size.
|
||||
*/
|
||||
error SafeCastOverflowedIntDowncast(uint8 bits, int256 value);
|
||||
|
||||
/**
|
||||
* @dev An uint value doesn't fit in an int of \`bits\` size.
|
||||
*/
|
||||
error SafeCastOverflowedUintToInt(uint256 value);
|
||||
`;
|
||||
|
||||
const toUintDownCast = length => `\
|
||||
/**
|
||||
* @dev Returns the downcasted uint${length} from uint256, reverting on
|
||||
* overflow (when the input is greater than largest uint${length}).
|
||||
*
|
||||
* Counterpart to Solidity's \`uint${length}\` operator.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - input must fit into ${length} bits
|
||||
*/
|
||||
function toUint${length}(uint256 value) internal pure returns (uint${length}) {
|
||||
if (value > type(uint${length}).max) {
|
||||
revert SafeCastOverflowedUintDowncast(${length}, value);
|
||||
}
|
||||
return uint${length}(value);
|
||||
}
|
||||
`;
|
||||
|
||||
/* eslint-disable max-len */
|
||||
const toIntDownCast = length => `\
|
||||
/**
|
||||
* @dev Returns the downcasted int${length} from int256, reverting on
|
||||
* overflow (when the input is less than smallest int${length} or
|
||||
* greater than largest int${length}).
|
||||
*
|
||||
* Counterpart to Solidity's \`int${length}\` operator.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - input must fit into ${length} bits
|
||||
*/
|
||||
function toInt${length}(int256 value) internal pure returns (int${length} downcasted) {
|
||||
downcasted = int${length}(value);
|
||||
if (downcasted != value) {
|
||||
revert SafeCastOverflowedIntDowncast(${length}, value);
|
||||
}
|
||||
}
|
||||
`;
|
||||
/* eslint-enable max-len */
|
||||
|
||||
const toInt = length => `\
|
||||
/**
|
||||
* @dev Converts an unsigned uint${length} into a signed int${length}.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - input must be less than or equal to maxInt${length}.
|
||||
*/
|
||||
function toInt${length}(uint${length} value) internal pure returns (int${length}) {
|
||||
// Note: Unsafe cast below is okay because \`type(int${length}).max\` is guaranteed to be positive
|
||||
if (value > uint${length}(type(int${length}).max)) {
|
||||
revert SafeCastOverflowedUintToInt(value);
|
||||
}
|
||||
return int${length}(value);
|
||||
}
|
||||
`;
|
||||
|
||||
const toUint = length => `\
|
||||
/**
|
||||
* @dev Converts a signed int${length} into an unsigned uint${length}.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - input must be greater than or equal to 0.
|
||||
*/
|
||||
function toUint${length}(int${length} value) internal pure returns (uint${length}) {
|
||||
if (value < 0) {
|
||||
revert SafeCastOverflowedIntToUint(value);
|
||||
}
|
||||
return uint${length}(value);
|
||||
}
|
||||
`;
|
||||
|
||||
const boolToUint = `
|
||||
/**
|
||||
* @dev Cast a boolean (false or true) to a uint256 (0 or 1) with no jump.
|
||||
*/
|
||||
function toUint(bool b) internal pure returns (uint256 u) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
u := iszero(iszero(b))
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library SafeCast {',
|
||||
errors,
|
||||
[...LENGTHS.map(toUintDownCast), toUint(256), ...LENGTHS.map(toIntDownCast), toInt(256), boolToUint],
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,13 @@
|
||||
const { capitalize } = require('../../helpers');
|
||||
|
||||
const TYPES = [
|
||||
{ type: 'address', isValueType: true },
|
||||
{ type: 'bool', isValueType: true, name: 'Boolean' },
|
||||
{ type: 'bytes32', isValueType: true, variants: ['bytes4'] },
|
||||
{ type: 'uint256', isValueType: true, variants: ['uint32'] },
|
||||
{ type: 'int256', isValueType: true, variants: ['int32'] },
|
||||
{ type: 'string', isValueType: false },
|
||||
{ type: 'bytes', isValueType: false },
|
||||
].map(type => Object.assign(type, { name: type.name ?? capitalize(type.type) }));
|
||||
|
||||
module.exports = { TYPES };
|
||||
@@ -0,0 +1,116 @@
|
||||
const format = require('../format-lines');
|
||||
const { TYPES } = require('./Slot.opts');
|
||||
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
/**
|
||||
* @dev Library for computing storage (and transient storage) locations from namespaces and deriving slots
|
||||
* corresponding to standard patterns. The derivation method for array and mapping matches the storage layout used by
|
||||
* the solidity language / compiler.
|
||||
*
|
||||
* See https://docs.soliditylang.org/en/v0.8.20/internals/layout_in_storage.html#mappings-and-dynamic-arrays[Solidity docs for mappings and dynamic arrays.].
|
||||
*
|
||||
* Example usage:
|
||||
* \`\`\`solidity
|
||||
* contract Example {
|
||||
* // Add the library methods
|
||||
* using StorageSlot for bytes32;
|
||||
* using SlotDerivation for bytes32;
|
||||
*
|
||||
* // Declare a namespace
|
||||
* string private constant _NAMESPACE = "<namespace>" // eg. OpenZeppelin.Slot
|
||||
*
|
||||
* function setValueInNamespace(uint256 key, address newValue) internal {
|
||||
* _NAMESPACE.erc7201Slot().deriveMapping(key).getAddressSlot().value = newValue;
|
||||
* }
|
||||
*
|
||||
* function getValueInNamespace(uint256 key) internal view returns (address) {
|
||||
* return _NAMESPACE.erc7201Slot().deriveMapping(key).getAddressSlot().value;
|
||||
* }
|
||||
* }
|
||||
* \`\`\`
|
||||
*
|
||||
* TIP: Consider using this library along with {StorageSlot}.
|
||||
*
|
||||
* NOTE: This library provides a way to manipulate storage locations in a non-standard way. Tooling for checking
|
||||
* upgrade safety will ignore the slots accessed through this library.
|
||||
*/
|
||||
`;
|
||||
|
||||
const namespace = `\
|
||||
/**
|
||||
* @dev Derive an ERC-7201 slot from a string (namespace).
|
||||
*/
|
||||
function erc7201Slot(string memory namespace) internal pure returns (bytes32 slot) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
mstore(0x00, sub(keccak256(add(namespace, 0x20), mload(namespace)), 1))
|
||||
slot := and(keccak256(0x00, 0x20), not(0xff))
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const array = `\
|
||||
/**
|
||||
* @dev Add an offset to a slot to get the n-th element of a structure or an array.
|
||||
*/
|
||||
function offset(bytes32 slot, uint256 pos) internal pure returns (bytes32 result) {
|
||||
unchecked {
|
||||
return bytes32(uint256(slot) + pos);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Derive the location of the first element in an array from the slot where the length is stored.
|
||||
*/
|
||||
function deriveArray(bytes32 slot) internal pure returns (bytes32 result) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
mstore(0x00, slot)
|
||||
result := keccak256(0x00, 0x20)
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const mapping = ({ type }) => `\
|
||||
/**
|
||||
* @dev Derive the location of a mapping element from the key.
|
||||
*/
|
||||
function deriveMapping(bytes32 slot, ${type} key) internal pure returns (bytes32 result) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
mstore(0x00, key)
|
||||
mstore(0x20, slot)
|
||||
result := keccak256(0x00, 0x40)
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const mapping2 = ({ type }) => `\
|
||||
/**
|
||||
* @dev Derive the location of a mapping element from the key.
|
||||
*/
|
||||
function deriveMapping(bytes32 slot, ${type} memory key) internal pure returns (bytes32 result) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
let length := mload(key)
|
||||
let begin := add(key, 0x20)
|
||||
let end := add(begin, length)
|
||||
let cache := mload(end)
|
||||
mstore(end, slot)
|
||||
result := keccak256(begin, add(length, 0x20))
|
||||
mstore(end, cache)
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library SlotDerivation {',
|
||||
namespace,
|
||||
array,
|
||||
TYPES.map(type => (type.isValueType ? mapping(type) : mapping2(type))),
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,73 @@
|
||||
const format = require('../format-lines');
|
||||
const { capitalize } = require('../../helpers');
|
||||
const { TYPES } = require('./Slot.opts');
|
||||
|
||||
const header = `\
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {Test} from "forge-std/Test.sol";
|
||||
|
||||
import {SlotDerivation} from "@openzeppelin/contracts/utils/SlotDerivation.sol";
|
||||
`;
|
||||
|
||||
const array = `\
|
||||
bytes[] private _array;
|
||||
|
||||
function testDeriveArray(uint256 length, uint256 offset) public {
|
||||
length = bound(length, 1, type(uint256).max);
|
||||
offset = bound(offset, 0, length - 1);
|
||||
|
||||
bytes32 baseSlot;
|
||||
assembly {
|
||||
baseSlot := _array.slot
|
||||
sstore(baseSlot, length) // store length so solidity access does not revert
|
||||
}
|
||||
|
||||
bytes storage derived = _array[offset];
|
||||
bytes32 derivedSlot;
|
||||
assembly {
|
||||
derivedSlot := derived.slot
|
||||
}
|
||||
|
||||
assertEq(baseSlot.deriveArray().offset(offset), derivedSlot);
|
||||
}
|
||||
`;
|
||||
|
||||
const mapping = ({ type, name, isValueType }) => `\
|
||||
mapping(${type} => bytes) private _${type}Mapping;
|
||||
|
||||
function testDeriveMapping${name}(${type} ${isValueType ? '' : 'memory'} key) public {
|
||||
bytes32 baseSlot;
|
||||
assembly {
|
||||
baseSlot := _${type}Mapping.slot
|
||||
}
|
||||
|
||||
bytes storage derived = _${type}Mapping[key];
|
||||
bytes32 derivedSlot;
|
||||
assembly {
|
||||
derivedSlot := derived.slot
|
||||
}
|
||||
|
||||
assertEq(baseSlot.deriveMapping(key), derivedSlot);
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'contract SlotDerivationTest is Test {',
|
||||
'using SlotDerivation for bytes32;',
|
||||
'',
|
||||
array,
|
||||
TYPES.flatMap(type =>
|
||||
[].concat(
|
||||
type,
|
||||
(type.variants ?? []).map(variant => ({
|
||||
type: variant,
|
||||
name: capitalize(variant),
|
||||
isValueType: type.isValueType,
|
||||
})),
|
||||
),
|
||||
).map(type => mapping(type)),
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,127 @@
|
||||
const format = require('../format-lines');
|
||||
const { TYPES } = require('./Slot.opts');
|
||||
|
||||
const header = `\
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
/**
|
||||
* @dev Library for reading and writing primitive types to specific storage slots.
|
||||
*
|
||||
* Storage slots are often used to avoid storage conflict when dealing with upgradeable contracts.
|
||||
* This library helps with reading and writing to such slots without the need for inline assembly.
|
||||
*
|
||||
* The functions in this library return Slot structs that contain a \`value\` member that can be used to read or write.
|
||||
*
|
||||
* Example usage to set ERC-1967 implementation slot:
|
||||
* \`\`\`solidity
|
||||
* contract ERC1967 {
|
||||
* // Define the slot. Alternatively, use the SlotDerivation library to derive the slot.
|
||||
* bytes32 internal constant _IMPLEMENTATION_SLOT = 0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc;
|
||||
*
|
||||
* function _getImplementation() internal view returns (address) {
|
||||
* return StorageSlot.getAddressSlot(_IMPLEMENTATION_SLOT).value;
|
||||
* }
|
||||
*
|
||||
* function _setImplementation(address newImplementation) internal {
|
||||
* require(newImplementation.code.length > 0);
|
||||
* StorageSlot.getAddressSlot(_IMPLEMENTATION_SLOT).value = newImplementation;
|
||||
* }
|
||||
* }
|
||||
* \`\`\`
|
||||
*
|
||||
* Since version 5.1, this library also support writing and reading value types to and from transient storage.
|
||||
*
|
||||
* * Example using transient storage:
|
||||
* \`\`\`solidity
|
||||
* contract Lock {
|
||||
* // Define the slot. Alternatively, use the SlotDerivation library to derive the slot.
|
||||
* bytes32 internal constant _LOCK_SLOT = 0xf4678858b2b588224636b8522b729e7722d32fc491da849ed75b3fdf3c84f542;
|
||||
*
|
||||
* modifier locked() {
|
||||
* require(!_LOCK_SLOT.asBoolean().tload());
|
||||
*
|
||||
* _LOCK_SLOT.asBoolean().tstore(true);
|
||||
* _;
|
||||
* _LOCK_SLOT.asBoolean().tstore(false);
|
||||
* }
|
||||
* }
|
||||
* \`\`\`
|
||||
*
|
||||
* TIP: Consider using this library along with {SlotDerivation}.
|
||||
*/
|
||||
`;
|
||||
|
||||
const struct = ({ type, name }) => `\
|
||||
struct ${name}Slot {
|
||||
${type} value;
|
||||
}
|
||||
`;
|
||||
|
||||
const get = ({ name }) => `\
|
||||
/**
|
||||
* @dev Returns an \`${name}Slot\` with member \`value\` located at \`slot\`.
|
||||
*/
|
||||
function get${name}Slot(bytes32 slot) internal pure returns (${name}Slot storage r) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
r.slot := slot
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const getStorage = ({ type, name }) => `\
|
||||
/**
|
||||
* @dev Returns an \`${name}Slot\` representation of the ${type} storage pointer \`store\`.
|
||||
*/
|
||||
function get${name}Slot(${type} storage store) internal pure returns (${name}Slot storage r) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
r.slot := store.slot
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const udvt = ({ type, name }) => `\
|
||||
/**
|
||||
* @dev UDVT that represent a slot holding a ${type}.
|
||||
*/
|
||||
type ${name}SlotType is bytes32;
|
||||
/**
|
||||
* @dev Cast an arbitrary slot to a ${name}SlotType.
|
||||
*/
|
||||
function as${name}(bytes32 slot) internal pure returns (${name}SlotType) {
|
||||
return ${name}SlotType.wrap(slot);
|
||||
}
|
||||
`;
|
||||
|
||||
const transient = ({ type, name }) => `\
|
||||
/**
|
||||
* @dev Load the value held at location \`slot\` in transient storage.
|
||||
*/
|
||||
function tload(${name}SlotType slot) internal view returns (${type} value) {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
value := tload(slot)
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @dev Store \`value\` at location \`slot\` in transient storage.
|
||||
*/
|
||||
function tstore(${name}SlotType slot, ${type} value) internal {
|
||||
/// @solidity memory-safe-assembly
|
||||
assembly {
|
||||
tstore(slot, value)
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'library StorageSlot {',
|
||||
TYPES.map(type => struct(type)),
|
||||
TYPES.flatMap(type => [get(type), type.isValueType ? '' : getStorage(type)]),
|
||||
TYPES.filter(type => type.isValueType).map(type => udvt(type)),
|
||||
TYPES.filter(type => type.isValueType).map(type => transient(type)),
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,65 @@
|
||||
const format = require('../format-lines');
|
||||
const { TYPES } = require('./Slot.opts');
|
||||
|
||||
const header = `\
|
||||
pragma solidity ^0.8.24;
|
||||
|
||||
import {Multicall} from "../utils/Multicall.sol";
|
||||
import {StorageSlot} from "../utils/StorageSlot.sol";
|
||||
`;
|
||||
|
||||
const storageSetValueType = ({ type, name }) => `\
|
||||
function set${name}Slot(bytes32 slot, ${type} value) public {
|
||||
slot.get${name}Slot().value = value;
|
||||
}
|
||||
`;
|
||||
|
||||
const storageGetValueType = ({ type, name }) => `\
|
||||
function get${name}Slot(bytes32 slot) public view returns (${type}) {
|
||||
return slot.get${name}Slot().value;
|
||||
}
|
||||
`;
|
||||
|
||||
const storageSetNonValueType = ({ type, name }) => `\
|
||||
mapping(uint256 key => ${type}) public ${type}Map;
|
||||
|
||||
function set${name}Slot(bytes32 slot, ${type} calldata value) public {
|
||||
slot.get${name}Slot().value = value;
|
||||
}
|
||||
|
||||
function set${name}Storage(uint256 key, ${type} calldata value) public {
|
||||
${type}Map[key].get${name}Slot().value = value;
|
||||
}
|
||||
|
||||
function get${name}Slot(bytes32 slot) public view returns (${type} memory) {
|
||||
return slot.get${name}Slot().value;
|
||||
}
|
||||
|
||||
function get${name}Storage(uint256 key) public view returns (${type} memory) {
|
||||
return ${type}Map[key].get${name}Slot().value;
|
||||
}
|
||||
`;
|
||||
|
||||
const transient = ({ type, name }) => `\
|
||||
event ${name}Value(bytes32 slot, ${type} value);
|
||||
|
||||
function tload${name}(bytes32 slot) public {
|
||||
emit ${name}Value(slot, slot.as${name}().tload());
|
||||
}
|
||||
|
||||
function tstore(bytes32 slot, ${type} value) public {
|
||||
slot.as${name}().tstore(value);
|
||||
}
|
||||
`;
|
||||
|
||||
// GENERATE
|
||||
module.exports = format(
|
||||
header.trimEnd(),
|
||||
'contract StorageSlotMock is Multicall {',
|
||||
'using StorageSlot for *;',
|
||||
TYPES.filter(type => type.isValueType).map(type => storageSetValueType(type)),
|
||||
TYPES.filter(type => type.isValueType).map(type => storageGetValueType(type)),
|
||||
TYPES.filter(type => !type.isValueType).map(type => storageSetNonValueType(type)),
|
||||
TYPES.filter(type => type.isValueType).map(type => transient(type)),
|
||||
'}',
|
||||
);
|
||||
@@ -0,0 +1,30 @@
|
||||
function toBytes32(type, value) {
|
||||
switch (type) {
|
||||
case 'bytes32':
|
||||
return value;
|
||||
case 'uint256':
|
||||
return `bytes32(${value})`;
|
||||
case 'address':
|
||||
return `bytes32(uint256(uint160(${value})))`;
|
||||
default:
|
||||
throw new Error(`Conversion from ${type} to bytes32 not supported`);
|
||||
}
|
||||
}
|
||||
|
||||
function fromBytes32(type, value) {
|
||||
switch (type) {
|
||||
case 'bytes32':
|
||||
return value;
|
||||
case 'uint256':
|
||||
return `uint256(${value})`;
|
||||
case 'address':
|
||||
return `address(uint160(uint256(${value})))`;
|
||||
default:
|
||||
throw new Error(`Conversion from bytes32 to ${type} not supported`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
toBytes32,
|
||||
fromBytes32,
|
||||
};
|
||||
6
lib_openzeppelin_contracts/scripts/git-user-config.sh
Normal file
6
lib_openzeppelin_contracts/scripts/git-user-config.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail -x
|
||||
|
||||
git config user.name 'github-actions'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
7
lib_openzeppelin_contracts/scripts/helpers.js
Normal file
7
lib_openzeppelin_contracts/scripts/helpers.js
Normal file
@@ -0,0 +1,7 @@
|
||||
const iterate = require('../test/helpers/iterate');
|
||||
const strings = require('../test/helpers/strings');
|
||||
|
||||
module.exports = {
|
||||
...iterate,
|
||||
...strings,
|
||||
};
|
||||
23
lib_openzeppelin_contracts/scripts/prepack.sh
Executable file
23
lib_openzeppelin_contracts/scripts/prepack.sh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
shopt -s globstar
|
||||
|
||||
# cross platform `mkdir -p`
|
||||
mkdirp() {
|
||||
node -e "fs.mkdirSync('$1', { recursive: true })"
|
||||
}
|
||||
|
||||
# cd to the root of the repo
|
||||
cd "$(git rev-parse --show-toplevel)"
|
||||
|
||||
npm run clean
|
||||
|
||||
env COMPILE_MODE=production npm run compile
|
||||
|
||||
mkdirp contracts/build/contracts
|
||||
cp artifacts/contracts/**/*.json contracts/build/contracts
|
||||
rm contracts/build/contracts/*.dbg.json
|
||||
node scripts/remove-ignored-artifacts.js
|
||||
|
||||
cp README.md contracts/
|
||||
26
lib_openzeppelin_contracts/scripts/prepare-docs.sh
Executable file
26
lib_openzeppelin_contracts/scripts/prepare-docs.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
shopt -s globstar
|
||||
|
||||
OUTDIR="$(node -p 'require("./docs/config.js").outputDir')"
|
||||
|
||||
if [ ! -d node_modules ]; then
|
||||
npm ci
|
||||
fi
|
||||
|
||||
rm -rf "$OUTDIR"
|
||||
|
||||
hardhat docgen
|
||||
|
||||
# copy examples and adjust imports
|
||||
examples_source_dir="contracts/mocks/docs"
|
||||
examples_target_dir="docs/modules/api/examples"
|
||||
|
||||
for f in "$examples_source_dir"/**/*.sol; do
|
||||
name="${f/#"$examples_source_dir/"/}"
|
||||
mkdir -p "$examples_target_dir/$(dirname "$name")"
|
||||
sed -Ee '/^import/s|"(\.\./)+|"@openzeppelin/contracts/|' "$f" > "$examples_target_dir/$name"
|
||||
done
|
||||
|
||||
node scripts/gen-nav.js "$OUTDIR" > "$OUTDIR/../nav.adoc"
|
||||
33
lib_openzeppelin_contracts/scripts/release/format-changelog.js
Executable file
33
lib_openzeppelin_contracts/scripts/release/format-changelog.js
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Adjusts the format of the changelog that changesets generates.
|
||||
// This is run automatically when npm version is run.
|
||||
|
||||
const fs = require('fs');
|
||||
const changelog = fs.readFileSync('CHANGELOG.md', 'utf8');
|
||||
|
||||
// Groups:
|
||||
// - 1: Pull Request Number and URL
|
||||
// - 2: Changeset entry
|
||||
const RELEASE_LINE_REGEX = /^- (\[#.*?\]\(.*?\))?.*?! - (.*)$/gm;
|
||||
|
||||
// Captures vX.Y.Z or vX.Y.Z-rc.W
|
||||
const VERSION_TITLE_REGEX = /^## (\d+\.\d+\.\d+(-rc\.\d+)?)$/gm;
|
||||
|
||||
const isPrerelease = process.env.PRERELEASE === 'true';
|
||||
|
||||
const formatted = changelog
|
||||
// Remove titles
|
||||
.replace(/^### Major Changes\n\n/gm, '')
|
||||
.replace(/^### Minor Changes\n\n/gm, '')
|
||||
.replace(/^### Patch Changes\n\n/gm, '')
|
||||
// Remove extra whitespace between items
|
||||
.replace(/^(- \[.*\n)\n(?=-)/gm, '$1')
|
||||
// Format each release line
|
||||
.replace(RELEASE_LINE_REGEX, (_, pr, entry) => (pr ? `- ${entry} (${pr})` : `- ${entry}`))
|
||||
// Add date to new version
|
||||
.replace(VERSION_TITLE_REGEX, `\n## $1 (${new Date().toISOString().split('T')[0]})`)
|
||||
// Conditionally allow vX.Y.Z.rc-.W sections only in prerelease
|
||||
.replace(/^## \d\.\d\.\d-rc\S+[^]+?(?=^#)/gm, section => (isPrerelease ? section : ''));
|
||||
|
||||
fs.writeFileSync('CHANGELOG.md', formatted);
|
||||
15
lib_openzeppelin_contracts/scripts/release/synchronize-versions.js
Executable file
15
lib_openzeppelin_contracts/scripts/release/synchronize-versions.js
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Synchronizes the version in contracts/package.json with the one in package.json.
|
||||
// This is run automatically when npm version is run.
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
setVersion('package.json', 'contracts/package.json');
|
||||
|
||||
function setVersion(from, to) {
|
||||
const fromJson = JSON.parse(fs.readFileSync(from));
|
||||
const toJson = JSON.parse(fs.readFileSync(to));
|
||||
toJson.version = fromJson.version;
|
||||
fs.writeFileSync(to, JSON.stringify(toJson, null, 2) + '\n');
|
||||
}
|
||||
34
lib_openzeppelin_contracts/scripts/release/update-comment.js
Executable file
34
lib_openzeppelin_contracts/scripts/release/update-comment.js
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require('fs');
|
||||
const proc = require('child_process');
|
||||
const semver = require('semver');
|
||||
const run = (cmd, ...args) => proc.execFileSync(cmd, args, { encoding: 'utf8' }).trim();
|
||||
|
||||
const gitStatus = run('git', 'status', '--porcelain', '-uno', 'contracts/**/*.sol');
|
||||
if (gitStatus.length > 0) {
|
||||
console.error('Contracts directory is not clean');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { version } = require('../../package.json');
|
||||
|
||||
// Get latest tag according to semver.
|
||||
const [tag] = run('git', 'tag')
|
||||
.split(/\r?\n/)
|
||||
.filter(semver.coerce) // check version can be processed
|
||||
.filter(v => semver.satisfies(v, `< ${version}`)) // ignores prereleases unless currently a prerelease
|
||||
.sort(semver.rcompare);
|
||||
|
||||
// Ordering tag → HEAD is important here.
|
||||
const files = run('git', 'diff', tag, 'HEAD', '--name-only', 'contracts/**/*.sol')
|
||||
.split(/\r?\n/)
|
||||
.filter(file => file && !file.match(/mock/i) && fs.existsSync(file));
|
||||
|
||||
for (const file of files) {
|
||||
const current = fs.readFileSync(file, 'utf8');
|
||||
const updated = current.replace(
|
||||
/(\/\/ SPDX-License-Identifier:.*)$(\n\/\/ OpenZeppelin Contracts .*$)?/m,
|
||||
`$1\n// OpenZeppelin Contracts (last updated v${version}) (${file.replace('contracts/', '')})`,
|
||||
);
|
||||
fs.writeFileSync(file, updated);
|
||||
}
|
||||
11
lib_openzeppelin_contracts/scripts/release/version.sh
Executable file
11
lib_openzeppelin_contracts/scripts/release/version.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
changeset version
|
||||
|
||||
scripts/release/format-changelog.js
|
||||
scripts/release/synchronize-versions.js
|
||||
scripts/release/update-comment.js
|
||||
|
||||
oz-docs update-version
|
||||
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
npx changeset pre exit rc
|
||||
git add .
|
||||
git commit -m "Exit release candidate"
|
||||
git push origin
|
||||
@@ -0,0 +1,48 @@
|
||||
const { readFileSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
const { version } = require(join(__dirname, '../../../package.json'));
|
||||
|
||||
module.exports = async ({ github, context }) => {
|
||||
const changelog = readFileSync('CHANGELOG.md', 'utf8');
|
||||
|
||||
await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `v${version}`,
|
||||
target_commitish: github.ref_name,
|
||||
body: extractSection(changelog, version),
|
||||
prerelease: process.env.PRERELEASE === 'true',
|
||||
});
|
||||
};
|
||||
|
||||
// From https://github.com/frangio/extract-changelog/blob/master/src/utils/word-regexp.ts
|
||||
function makeWordRegExp(word) {
|
||||
const start = word.length > 0 && /\b/.test(word[0]) ? '\\b' : '';
|
||||
const end = word.length > 0 && /\b/.test(word[word.length - 1]) ? '\\b' : '';
|
||||
return new RegExp(start + [...word].map(c => (/[a-z0-9]/i.test(c) ? c : '\\' + c)).join('') + end);
|
||||
}
|
||||
|
||||
// From https://github.com/frangio/extract-changelog/blob/master/src/core.ts
|
||||
function extractSection(document, wantedHeading) {
|
||||
// ATX Headings as defined in GitHub Flavored Markdown (https://github.github.com/gfm/#atx-headings)
|
||||
const heading = /^ {0,3}(?<lead>#{1,6})(?: [ \t\v\f]*(?<text>.*?)[ \t\v\f]*)?(?:[\n\r]+|$)/gm;
|
||||
|
||||
const wantedHeadingRe = makeWordRegExp(wantedHeading);
|
||||
|
||||
let start, end;
|
||||
|
||||
for (const m of document.matchAll(heading)) {
|
||||
if (!start) {
|
||||
if (m.groups.text.search(wantedHeadingRe) === 0) {
|
||||
start = m;
|
||||
}
|
||||
} else if (m.groups.lead.length <= start.groups.lead.length) {
|
||||
end = m;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (start) {
|
||||
return document.slice(start.index + start[0].length, end?.index);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CHECKSUMS="$RUNNER_TEMP/checksums.txt"
|
||||
|
||||
# Extract tarball content into a tmp directory
|
||||
tar xf "$TARBALL" -C "$RUNNER_TEMP"
|
||||
|
||||
# Move to extracted directory
|
||||
cd "$RUNNER_TEMP/package"
|
||||
|
||||
# Checksum all Solidity files
|
||||
find . -type f -name "*.sol" | xargs shasum > "$CHECKSUMS"
|
||||
|
||||
# Back to directory with git contents
|
||||
cd "$GITHUB_WORKSPACE/contracts"
|
||||
|
||||
# Check against tarball contents
|
||||
shasum -c "$CHECKSUMS"
|
||||
26
lib_openzeppelin_contracts/scripts/release/workflow/pack.sh
Normal file
26
lib_openzeppelin_contracts/scripts/release/workflow/pack.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
dist_tag() {
|
||||
PACKAGE_JSON_NAME="$(jq -r .name ./package.json)"
|
||||
LATEST_NPM_VERSION="$(npm info "$PACKAGE_JSON_NAME" version)"
|
||||
PACKAGE_JSON_VERSION="$(jq -r .version ./package.json)"
|
||||
|
||||
if [ "$PRERELEASE" = "true" ]; then
|
||||
echo "next"
|
||||
elif npx semver -r ">$LATEST_NPM_VERSION" "$PACKAGE_JSON_VERSION" > /dev/null; then
|
||||
echo "latest"
|
||||
else
|
||||
# This is a patch for an older version
|
||||
# npm can't publish without a tag
|
||||
echo "tmp"
|
||||
fi
|
||||
}
|
||||
|
||||
cd contracts
|
||||
TARBALL="$(npm pack | tee /dev/stderr | tail -1)"
|
||||
echo "tarball_name=$TARBALL" >> $GITHUB_OUTPUT
|
||||
echo "tarball=$(pwd)/$TARBALL" >> $GITHUB_OUTPUT
|
||||
echo "tag=$(dist_tag)" >> $GITHUB_OUTPUT
|
||||
cd ..
|
||||
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
PACKAGE_JSON_NAME="$(tar xfO "$TARBALL" package/package.json | jq -r .name)"
|
||||
PACKAGE_JSON_VERSION="$(tar xfO "$TARBALL" package/package.json | jq -r .version)"
|
||||
|
||||
# Intentionally escape $ to avoid interpolation and writing the token to disk
|
||||
echo "//registry.npmjs.org/:_authToken=\${NPM_TOKEN}" > .npmrc
|
||||
|
||||
# Actual publish
|
||||
npm publish "$TARBALL" --tag "$TAG"
|
||||
|
||||
# Clean up tags
|
||||
delete_tag() {
|
||||
npm dist-tag rm "$PACKAGE_JSON_NAME" "$1"
|
||||
}
|
||||
|
||||
if [ "$TAG" = tmp ]; then
|
||||
delete_tag "$TAG"
|
||||
elif [ "$TAG" = latest ]; then
|
||||
# Delete the next tag if it exists and is a prerelease for what is currently being published
|
||||
if npm dist-tag ls "$PACKAGE_JSON_NAME" | grep -q "next: $PACKAGE_JSON_VERSION"; then
|
||||
delete_tag next
|
||||
fi
|
||||
fi
|
||||
@@ -0,0 +1,7 @@
|
||||
module.exports = ({ github, context }) =>
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: 'release-cycle.yml',
|
||||
ref: process.env.REF || process.env.GITHUB_REF_NAME,
|
||||
});
|
||||
@@ -0,0 +1,17 @@
|
||||
const { coerce, inc, rsort } = require('semver');
|
||||
const { join } = require('path');
|
||||
const { version } = require(join(__dirname, '../../../package.json'));
|
||||
|
||||
module.exports = async ({ core }) => {
|
||||
// Variables not in the context
|
||||
const refName = process.env.GITHUB_REF_NAME;
|
||||
|
||||
// Compare package.json version's next patch vs. first version patch
|
||||
// A recently opened branch will give the next patch for the previous minor
|
||||
// So, we get the max against the patch 0 of the release branch's version
|
||||
const branchPatch0 = coerce(refName.replace('release-v', '')).version;
|
||||
const packageJsonNextPatch = inc(version, 'patch');
|
||||
const [nextVersion] = rsort([branchPatch0, packageJsonNextPatch], false);
|
||||
|
||||
core.exportVariable('TITLE', `Release v${nextVersion}`);
|
||||
};
|
||||
35
lib_openzeppelin_contracts/scripts/release/workflow/start.sh
Normal file
35
lib_openzeppelin_contracts/scripts/release/workflow/start.sh
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Set changeset status location
|
||||
# This is needed because `changeset status --output` only works with relative routes
|
||||
CHANGESETS_STATUS_JSON="$(realpath --relative-to=. "$RUNNER_TEMP/status.json")"
|
||||
|
||||
# Save changeset status to temp file
|
||||
npx changeset status --output="$CHANGESETS_STATUS_JSON"
|
||||
|
||||
# Defensive assertion. SHOULD NOT BE REACHED
|
||||
if [ "$(jq '.releases | length' "$CHANGESETS_STATUS_JSON")" != 1 ]; then
|
||||
echo "::error file=$CHANGESETS_STATUS_JSON::The status doesn't contain only 1 release"
|
||||
exit 1;
|
||||
fi;
|
||||
|
||||
# Create branch
|
||||
BRANCH_SUFFIX="$(jq -r '.releases[0].newVersion | gsub("\\.\\d+$"; "")' $CHANGESETS_STATUS_JSON)"
|
||||
RELEASE_BRANCH="release-v$BRANCH_SUFFIX"
|
||||
git checkout -b "$RELEASE_BRANCH"
|
||||
|
||||
# Output branch
|
||||
echo "branch=$RELEASE_BRANCH" >> $GITHUB_OUTPUT
|
||||
|
||||
# Enter in prerelease state
|
||||
npx changeset pre enter rc
|
||||
git add .
|
||||
git commit -m "Start release candidate"
|
||||
|
||||
# Push branch
|
||||
if ! git push origin "$RELEASE_BRANCH"; then
|
||||
echo "::error file=scripts/release/start.sh::Can't push $RELEASE_BRANCH. Did you forget to run this workflow from $RELEASE_BRANCH?"
|
||||
exit 1
|
||||
fi
|
||||
112
lib_openzeppelin_contracts/scripts/release/workflow/state.js
Normal file
112
lib_openzeppelin_contracts/scripts/release/workflow/state.js
Normal file
@@ -0,0 +1,112 @@
|
||||
const { readPreState } = require('@changesets/pre');
|
||||
const { default: readChangesets } = require('@changesets/read');
|
||||
const { join } = require('path');
|
||||
const { fetch } = require('undici');
|
||||
const { version, name: packageName } = require(join(__dirname, '../../../contracts/package.json'));
|
||||
|
||||
module.exports = async ({ github, context, core }) => {
|
||||
const state = await getState({ github, context, core });
|
||||
|
||||
function setOutput(key, value) {
|
||||
core.info(`State ${key} = ${value}`);
|
||||
core.setOutput(key, value);
|
||||
}
|
||||
|
||||
// Jobs to trigger
|
||||
setOutput('start', shouldRunStart(state));
|
||||
setOutput('promote', shouldRunPromote(state));
|
||||
setOutput('changesets', shouldRunChangesets(state));
|
||||
setOutput('publish', shouldRunPublish(state));
|
||||
setOutput('merge', shouldRunMerge(state));
|
||||
|
||||
// Global Variables
|
||||
setOutput('is_prerelease', state.prerelease);
|
||||
};
|
||||
|
||||
function shouldRunStart({ isMaster, isWorkflowDispatch, botRun }) {
|
||||
return isMaster && isWorkflowDispatch && !botRun;
|
||||
}
|
||||
|
||||
function shouldRunPromote({ isReleaseBranch, isWorkflowDispatch, botRun }) {
|
||||
return isReleaseBranch && isWorkflowDispatch && !botRun;
|
||||
}
|
||||
|
||||
function shouldRunChangesets({ isReleaseBranch, isPush, isWorkflowDispatch, botRun }) {
|
||||
return (isReleaseBranch && isPush) || (isReleaseBranch && isWorkflowDispatch && botRun);
|
||||
}
|
||||
|
||||
function shouldRunPublish({ isReleaseBranch, isPush, hasPendingChangesets, isPublishedOnNpm }) {
|
||||
return isReleaseBranch && isPush && !hasPendingChangesets && !isPublishedOnNpm;
|
||||
}
|
||||
|
||||
function shouldRunMerge({
|
||||
isReleaseBranch,
|
||||
isPush,
|
||||
prerelease,
|
||||
isCurrentFinalVersion,
|
||||
hasPendingChangesets,
|
||||
prBackExists,
|
||||
}) {
|
||||
return isReleaseBranch && isPush && !prerelease && isCurrentFinalVersion && !hasPendingChangesets && !prBackExists;
|
||||
}
|
||||
|
||||
async function getState({ github, context, core }) {
|
||||
// Variables not in the context
|
||||
const refName = process.env.GITHUB_REF_NAME;
|
||||
const botRun = process.env.TRIGGERING_ACTOR === 'github-actions[bot]';
|
||||
|
||||
const { changesets, preState } = await readChangesetState();
|
||||
|
||||
// Static vars
|
||||
const state = {
|
||||
refName,
|
||||
hasPendingChangesets: changesets.length > 0,
|
||||
prerelease: preState?.mode === 'pre',
|
||||
isMaster: refName === 'master',
|
||||
isReleaseBranch: refName.startsWith('release-v'),
|
||||
isWorkflowDispatch: context.eventName === 'workflow_dispatch',
|
||||
isPush: context.eventName === 'push',
|
||||
isCurrentFinalVersion: !version.includes('-rc.'),
|
||||
botRun,
|
||||
};
|
||||
|
||||
// Async vars
|
||||
const { data: prs } = await github.rest.pulls.list({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
head: `${context.repo.owner}:merge/${state.refName}`,
|
||||
base: 'master',
|
||||
state: 'open',
|
||||
});
|
||||
|
||||
state.prBackExists = prs.length !== 0;
|
||||
|
||||
state.isPublishedOnNpm = await isPublishedOnNpm(packageName, version);
|
||||
|
||||
// Log every state value in debug mode
|
||||
if (core.isDebug()) for (const [key, value] of Object.entries(state)) core.debug(`${key}: ${value}`);
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
// From https://github.com/changesets/action/blob/v1.4.1/src/readChangesetState.ts
|
||||
async function readChangesetState(cwd = process.cwd()) {
|
||||
const preState = await readPreState(cwd);
|
||||
const isInPreMode = preState !== undefined && preState.mode === 'pre';
|
||||
|
||||
let changesets = await readChangesets(cwd);
|
||||
|
||||
if (isInPreMode) {
|
||||
changesets = changesets.filter(x => !preState.changesets.includes(x.id));
|
||||
}
|
||||
|
||||
return {
|
||||
preState: isInPreMode ? preState : undefined,
|
||||
changesets,
|
||||
};
|
||||
}
|
||||
|
||||
async function isPublishedOnNpm(package, version) {
|
||||
const res = await fetch(`https://registry.npmjs.com/${package}/${version}`);
|
||||
return res.ok;
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// This script removes the build artifacts of ignored contracts.
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const match = require('micromatch');
|
||||
|
||||
function readJSON(path) {
|
||||
return JSON.parse(fs.readFileSync(path));
|
||||
}
|
||||
|
||||
const pkgFiles = readJSON('package.json').files;
|
||||
|
||||
// Get only negated patterns.
|
||||
const ignorePatterns = pkgFiles
|
||||
.filter(pat => pat.startsWith('!'))
|
||||
// Remove the negation part. Makes micromatch usage more intuitive.
|
||||
.map(pat => pat.slice(1));
|
||||
|
||||
const ignorePatternsSubtrees = ignorePatterns
|
||||
// Add **/* to ignore all files contained in the directories.
|
||||
.concat(ignorePatterns.map(pat => path.join(pat, '**/*')))
|
||||
.map(p => p.replace(/^\//, ''));
|
||||
|
||||
const artifactsDir = 'contracts/build/contracts';
|
||||
const buildinfo = 'artifacts/build-info';
|
||||
const filenames = fs.readdirSync(buildinfo);
|
||||
|
||||
let n = 0;
|
||||
|
||||
for (const filename of filenames) {
|
||||
const solcOutput = readJSON(path.join(buildinfo, filename)).output;
|
||||
for (const sourcePath in solcOutput.contracts) {
|
||||
const ignore = match.any(sourcePath, ignorePatternsSubtrees);
|
||||
if (ignore) {
|
||||
for (const contract in solcOutput.contracts[sourcePath]) {
|
||||
fs.unlinkSync(path.join(artifactsDir, contract + '.json'));
|
||||
n += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.error(`Removed ${n} mock artifacts`);
|
||||
84
lib_openzeppelin_contracts/scripts/solhint-custom/index.js
Normal file
84
lib_openzeppelin_contracts/scripts/solhint-custom/index.js
Normal file
@@ -0,0 +1,84 @@
|
||||
const path = require('path');
|
||||
const minimatch = require('minimatch');
|
||||
|
||||
// Files matching these patterns will be ignored unless a rule has `static global = true`
|
||||
const ignore = ['contracts/mocks/**/*', 'test/**/*'];
|
||||
|
||||
class Base {
|
||||
constructor(reporter, config, source, fileName) {
|
||||
this.reporter = reporter;
|
||||
this.ignored = this.constructor.global || ignore.some(p => minimatch(path.normalize(fileName), p));
|
||||
this.ruleId = this.constructor.ruleId;
|
||||
if (this.ruleId === undefined) {
|
||||
throw Error('missing ruleId static property');
|
||||
}
|
||||
}
|
||||
|
||||
error(node, message) {
|
||||
if (!this.ignored) {
|
||||
this.reporter.error(node, this.ruleId, message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = [
|
||||
class extends Base {
|
||||
static ruleId = 'interface-names';
|
||||
|
||||
ContractDefinition(node) {
|
||||
if (node.kind === 'interface' && !/^I[A-Z]/.test(node.name)) {
|
||||
this.error(node, 'Interface names should have a capital I prefix');
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
class extends Base {
|
||||
static ruleId = 'private-variables';
|
||||
|
||||
VariableDeclaration(node) {
|
||||
const constantOrImmutable = node.isDeclaredConst || node.isImmutable;
|
||||
if (node.isStateVar && !constantOrImmutable && node.visibility !== 'private') {
|
||||
this.error(node, 'State variables must be private');
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
class extends Base {
|
||||
static ruleId = 'leading-underscore';
|
||||
|
||||
VariableDeclaration(node) {
|
||||
if (node.isDeclaredConst) {
|
||||
// TODO: expand visibility and fix
|
||||
if (node.visibility === 'private' && /^_/.test(node.name)) {
|
||||
this.error(node, 'Constant variables should not have leading underscore');
|
||||
}
|
||||
} else if (node.visibility === 'private' && !/^_/.test(node.name)) {
|
||||
this.error(node, 'Non-constant private variables must have leading underscore');
|
||||
}
|
||||
}
|
||||
|
||||
FunctionDefinition(node) {
|
||||
if (node.visibility === 'private' || (node.visibility === 'internal' && node.parent.kind !== 'library')) {
|
||||
if (!/^_/.test(node.name)) {
|
||||
this.error(node, 'Private and internal functions must have leading underscore');
|
||||
}
|
||||
}
|
||||
if (node.visibility === 'internal' && node.parent.kind === 'library') {
|
||||
if (/^_/.test(node.name)) {
|
||||
this.error(node, 'Library internal functions should not have leading underscore');
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// TODO: re-enable and fix
|
||||
// class extends Base {
|
||||
// static ruleId = 'no-external-virtual';
|
||||
//
|
||||
// FunctionDefinition(node) {
|
||||
// if (node.visibility == 'external' && node.isVirtual) {
|
||||
// this.error(node, 'Functions should not be external and virtual');
|
||||
// }
|
||||
// }
|
||||
// },
|
||||
];
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "solhint-plugin-openzeppelin",
|
||||
"version": "0.0.0",
|
||||
"private": true
|
||||
}
|
||||
65
lib_openzeppelin_contracts/scripts/update-docs-branch.js
Normal file
65
lib_openzeppelin_contracts/scripts/update-docs-branch.js
Normal file
@@ -0,0 +1,65 @@
|
||||
const proc = require('child_process');
|
||||
const read = cmd => proc.execSync(cmd, { encoding: 'utf8' }).trim();
|
||||
const run = cmd => {
|
||||
proc.execSync(cmd, { stdio: 'inherit' });
|
||||
};
|
||||
const tryRead = cmd => {
|
||||
try {
|
||||
return read(cmd);
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const releaseBranchRegex = /^release-v(?<version>(?<major>\d+)\.(?<minor>\d+)(?:\.(?<patch>\d+))?)$/;
|
||||
|
||||
const currentBranch = read('git rev-parse --abbrev-ref HEAD');
|
||||
const match = currentBranch.match(releaseBranchRegex);
|
||||
|
||||
if (!match) {
|
||||
console.error('Not currently on a release branch');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const pkgVersion = require('../package.json').version;
|
||||
|
||||
if (pkgVersion.includes('-') && !pkgVersion.includes('.0.0-')) {
|
||||
console.error('Refusing to update docs: non-major prerelease detected');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const current = match.groups;
|
||||
const docsBranch = `docs-v${current.major}.x`;
|
||||
|
||||
// Fetch remotes and find the docs branch if it exists
|
||||
run('git fetch --all --no-tags');
|
||||
const matchingDocsBranches = tryRead(`git rev-parse --glob='*/${docsBranch}'`);
|
||||
|
||||
if (!matchingDocsBranches) {
|
||||
// Create the branch
|
||||
run(`git checkout --orphan ${docsBranch}`);
|
||||
} else {
|
||||
const [publishedRef, ...others] = new Set(matchingDocsBranches.split('\n'));
|
||||
if (others.length > 0) {
|
||||
console.error(
|
||||
`Found conflicting ${docsBranch} branches.\n` +
|
||||
'Either local branch is outdated or there are multiple matching remote branches.',
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
const publishedVersion = JSON.parse(read(`git show ${publishedRef}:package.json`)).version;
|
||||
const publishedMinor = publishedVersion.match(/\d+\.(?<minor>\d+)\.\d+/).groups.minor;
|
||||
if (current.minor < publishedMinor) {
|
||||
console.error('Refusing to update docs: newer version is published');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
run('git checkout --quiet --detach');
|
||||
run(`git reset --soft ${publishedRef}`);
|
||||
run(`git checkout ${docsBranch}`);
|
||||
}
|
||||
|
||||
run('npm run prepare-docs');
|
||||
run('git add -f docs'); // --force needed because generated docs files are gitignored
|
||||
run('git commit -m "Update docs"');
|
||||
run(`git checkout ${currentBranch}`);
|
||||
21
lib_openzeppelin_contracts/scripts/upgradeable/README.md
Normal file
21
lib_openzeppelin_contracts/scripts/upgradeable/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
The upgradeable variant of OpenZeppelin Contracts is automatically generated from the original Solidity code. We call this process "transpilation" and it is implemented by our [Upgradeability Transpiler](https://github.com/OpenZeppelin/openzeppelin-transpiler/).
|
||||
|
||||
When the `master` branch or `release-v*` branches are updated, the code is transpiled and pushed to [OpenZeppelin/openzeppelin-contracts-upgradeable](https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable) by the `upgradeable.yml` workflow.
|
||||
|
||||
## `transpile.sh`
|
||||
|
||||
Applies patches and invokes the transpiler with the command line flags we need for our requirements (for example, excluding certain files).
|
||||
|
||||
## `transpile-onto.sh`
|
||||
|
||||
```
|
||||
bash scripts/upgradeable/transpile-onto.sh <target> [<base>]
|
||||
```
|
||||
|
||||
Transpiles the contents of the current git branch and commits the result as a new commit on branch `<target>`. If branch `<target>` doesn't exist, it will copy the commit history of `[<base>]` (this is used in GitHub Actions, but is usually not necessary locally).
|
||||
|
||||
## `patch-apply.sh` & `patch-save.sh`
|
||||
|
||||
Some of the upgradeable contract variants require ad-hoc changes that are not implemented by the transpiler. These changes are implemented by patches stored in `upgradeable.patch` in this directory. `patch-apply.sh` applies these patches.
|
||||
|
||||
If the patches fail to apply due to changes in the repo, the conflicts have to be resolved manually. Once fixed, `patch-save.sh` will take the changes staged in Git and update `upgradeable.patch` to match.
|
||||
19
lib_openzeppelin_contracts/scripts/upgradeable/patch-apply.sh
Executable file
19
lib_openzeppelin_contracts/scripts/upgradeable/patch-apply.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
|
||||
PATCH="$DIRNAME/upgradeable.patch"
|
||||
|
||||
error() {
|
||||
echo Error: "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
if ! git diff-files --quiet ":!$PATCH" || ! git diff-index --quiet HEAD ":!$PATCH"; then
|
||||
error "Repository must have no staged or unstaged changes"
|
||||
fi
|
||||
|
||||
if ! git apply -3 "$PATCH"; then
|
||||
error "Fix conflicts and run $DIRNAME/patch-save.sh"
|
||||
fi
|
||||
18
lib_openzeppelin_contracts/scripts/upgradeable/patch-save.sh
Executable file
18
lib_openzeppelin_contracts/scripts/upgradeable/patch-save.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
|
||||
PATCH="$DIRNAME/upgradeable.patch"
|
||||
|
||||
error() {
|
||||
echo Error: "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
if ! git diff-files --quiet ":!$PATCH"; then
|
||||
error "Unstaged changes. Stage to include in patch or temporarily stash."
|
||||
fi
|
||||
|
||||
git diff-index --cached --patch --output="$PATCH" HEAD
|
||||
git restore --staged --worktree ":!$PATCH"
|
||||
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "usage: bash $0 <target> [<base>]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
target="$1"
|
||||
base="${2-}"
|
||||
|
||||
bash scripts/upgradeable/transpile.sh
|
||||
|
||||
commit="$(git rev-parse --short HEAD)"
|
||||
start_branch="$(git rev-parse --abbrev-ref HEAD)"
|
||||
|
||||
git add contracts
|
||||
|
||||
# detach from the current branch to avoid making changes to it
|
||||
git checkout --quiet --detach
|
||||
|
||||
# switch to the target branch, creating it if necessary
|
||||
if git rev-parse -q --verify "$target"; then
|
||||
# if the branch exists, make it the current HEAD without checking out its contents
|
||||
git reset --soft "$target"
|
||||
git checkout "$target"
|
||||
else
|
||||
# if the branch doesn't exist, create it as an orphan and check it out
|
||||
git checkout --orphan "$target"
|
||||
if [ -n "$base" ] && git rev-parse -q --verify "$base"; then
|
||||
# if base was specified and it exists, set it as the branch history
|
||||
git reset --soft "$base"
|
||||
fi
|
||||
fi
|
||||
|
||||
# abort if there are no changes to commit at this point
|
||||
if git diff --quiet --cached; then
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ -v SUBMODULE_REMOTE ]]; then
|
||||
lib=lib/openzeppelin-contracts
|
||||
git submodule add -b "${base#origin/}" "$SUBMODULE_REMOTE" "$lib"
|
||||
git -C "$lib" checkout "$commit"
|
||||
git add "$lib"
|
||||
fi
|
||||
|
||||
git commit -m "Transpile $commit"
|
||||
|
||||
# return to original branch
|
||||
git checkout "$start_branch"
|
||||
50
lib_openzeppelin_contracts/scripts/upgradeable/transpile.sh
Normal file
50
lib_openzeppelin_contracts/scripts/upgradeable/transpile.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail -x
|
||||
|
||||
VERSION="$(jq -r .version contracts/package.json)"
|
||||
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
|
||||
|
||||
bash "$DIRNAME/patch-apply.sh"
|
||||
sed -i'' -e "s/<package-version>/$VERSION/g" "contracts/package.json"
|
||||
git add contracts/package.json
|
||||
|
||||
npm run clean
|
||||
npm run compile
|
||||
|
||||
build_info=($(jq -r '.input.sources | keys | if any(test("^contracts/mocks/.*\\bunreachable\\b")) then empty else input_filename end' artifacts/build-info/*))
|
||||
build_info_num=${#build_info[@]}
|
||||
|
||||
if [ $build_info_num -ne 1 ]; then
|
||||
echo "found $build_info_num relevant build info files but expected just 1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# -D: delete original and excluded files
|
||||
# -b: use this build info file
|
||||
# -i: use included Initializable
|
||||
# -x: exclude proxy-related contracts with a few exceptions
|
||||
# -p: emit public initializer
|
||||
# -n: use namespaces
|
||||
# -N: exclude from namespaces transformation
|
||||
# -q: partial transpilation using @openzeppelin/contracts as peer project
|
||||
npx @openzeppelin/upgrade-safe-transpiler -D \
|
||||
-b "$build_info" \
|
||||
-i contracts/proxy/utils/Initializable.sol \
|
||||
-x 'contracts-exposed/**/*' \
|
||||
-x 'contracts/proxy/**/*' \
|
||||
-x '!contracts/proxy/Clones.sol' \
|
||||
-x '!contracts/proxy/ERC1967/ERC1967Storage.sol' \
|
||||
-x '!contracts/proxy/ERC1967/ERC1967Utils.sol' \
|
||||
-x '!contracts/proxy/utils/UUPSUpgradeable.sol' \
|
||||
-x '!contracts/proxy/beacon/IBeacon.sol' \
|
||||
-p 'contracts/access/manager/AccessManager.sol' \
|
||||
-p 'contracts/finance/VestingWallet.sol' \
|
||||
-p 'contracts/governance/TimelockController.sol' \
|
||||
-p 'contracts/metatx/ERC2771Forwarder.sol' \
|
||||
-n \
|
||||
-N 'contracts/mocks/**/*' \
|
||||
-q '@openzeppelin/'
|
||||
|
||||
# delete compilation artifacts of vanilla code
|
||||
npm run clean
|
||||
361
lib_openzeppelin_contracts/scripts/upgradeable/upgradeable.patch
Normal file
361
lib_openzeppelin_contracts/scripts/upgradeable/upgradeable.patch
Normal file
@@ -0,0 +1,361 @@
|
||||
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
|
||||
deleted file mode 100644
|
||||
index 35ad097ff..000000000
|
||||
--- a/.github/ISSUE_TEMPLATE/bug_report.md
|
||||
+++ /dev/null
|
||||
@@ -1,21 +0,0 @@
|
||||
----
|
||||
-name: Bug report
|
||||
-about: Report a bug in OpenZeppelin Contracts
|
||||
-
|
||||
----
|
||||
-
|
||||
-<!-- Briefly describe the issue you're experiencing. Tell us what you were trying to do and what happened instead. -->
|
||||
-
|
||||
-<!-- Remember, this is not a place to ask for help debugging code. For that, we welcome you in the OpenZeppelin Community Forum: https://forum.openzeppelin.com/. -->
|
||||
-
|
||||
-**💻 Environment**
|
||||
-
|
||||
-<!-- Tell us what version of OpenZeppelin Contracts you're using, and how you're using it: Hardhat, Remix, etc. -->
|
||||
-
|
||||
-**📝 Details**
|
||||
-
|
||||
-<!-- Describe the problem you have been experiencing in more detail. Include as much information as you think is relevant. Keep in mind that transactions can fail for many reasons; context is key here. -->
|
||||
-
|
||||
-**🔢 Code to reproduce bug**
|
||||
-
|
||||
-<!-- We will be able to better help if you provide a minimal example that triggers the bug. -->
|
||||
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
|
||||
index 4018cef29..d343a53d8 100644
|
||||
--- a/.github/ISSUE_TEMPLATE/config.yml
|
||||
+++ b/.github/ISSUE_TEMPLATE/config.yml
|
||||
@@ -1,4 +1,8 @@
|
||||
+blank_issues_enabled: false
|
||||
contact_links:
|
||||
+ - name: Bug Reports & Feature Requests
|
||||
+ url: https://github.com/OpenZeppelin/openzeppelin-contracts/issues/new/choose
|
||||
+ about: Visit the OpenZeppelin Contracts repository
|
||||
- name: Questions & Support Requests
|
||||
url: https://forum.openzeppelin.com/c/support/contracts/18
|
||||
about: Ask in the OpenZeppelin Forum
|
||||
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
|
||||
deleted file mode 100644
|
||||
index ff596b0c3..000000000
|
||||
--- a/.github/ISSUE_TEMPLATE/feature_request.md
|
||||
+++ /dev/null
|
||||
@@ -1,14 +0,0 @@
|
||||
----
|
||||
-name: Feature request
|
||||
-about: Suggest an idea for OpenZeppelin Contracts
|
||||
-
|
||||
----
|
||||
-
|
||||
-**🧐 Motivation**
|
||||
-<!-- Is your feature request related to a specific problem? Is it just a crazy idea? Tell us about it! -->
|
||||
-
|
||||
-**📝 Details**
|
||||
-<!-- Please describe your feature request in detail. -->
|
||||
-
|
||||
-<!-- Make sure that you have reviewed the OpenZeppelin Contracts Contributor Guidelines. -->
|
||||
-<!-- https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/CONTRIBUTING.md -->
|
||||
diff --git a/README.md b/README.md
|
||||
index fa7b4e31e..4799b6376 100644
|
||||
--- a/README.md
|
||||
+++ b/README.md
|
||||
@@ -19,6 +19,9 @@
|
||||
> [!IMPORTANT]
|
||||
> OpenZeppelin Contracts uses semantic versioning to communicate backwards compatibility of its API and storage layout. For upgradeable contracts, the storage layout of different major versions should be assumed incompatible, for example, it is unsafe to upgrade from 4.9.3 to 5.0.0. Learn more at [Backwards Compatibility](https://docs.openzeppelin.com/contracts/backwards-compatibility).
|
||||
|
||||
++> [!NOTE]
|
||||
++> You are looking at the upgradeable variant of OpenZeppelin Contracts. Be sure to review the documentation on [Using OpenZeppelin Contracts with Upgrades](https://docs.openzeppelin.com/contracts/upgradeable).
|
||||
++
|
||||
## Overview
|
||||
|
||||
### Installation
|
||||
@@ -26,7 +29,7 @@
|
||||
#### Hardhat (npm)
|
||||
|
||||
```
|
||||
-$ npm install @openzeppelin/contracts
|
||||
+$ npm install @openzeppelin/contracts-upgradeable
|
||||
```
|
||||
|
||||
#### Foundry (git)
|
||||
@@ -38,10 +41,10 @@ $ npm install @openzeppelin/contracts
|
||||
> Foundry installs the latest version initially, but subsequent `forge update` commands will use the `master` branch.
|
||||
|
||||
```
|
||||
-$ forge install OpenZeppelin/openzeppelin-contracts
|
||||
+$ forge install OpenZeppelin/openzeppelin-contracts-upgradeable
|
||||
```
|
||||
|
||||
-Add `@openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/` in `remappings.txt.`
|
||||
+Add `@openzeppelin/contracts-upgradeable/=lib/openzeppelin-contracts-upgradeable/contracts/` in `remappings.txt.`
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -50,10 +53,11 @@ Once installed, you can use the contracts in the library by importing them:
|
||||
```solidity
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
-import {ERC721} from "@openzeppelin/contracts/token/ERC721/ERC721.sol";
|
||||
+import {ERC721Upgradeable} from "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol";
|
||||
|
||||
-contract MyCollectible is ERC721 {
|
||||
- constructor() ERC721("MyCollectible", "MCO") {
|
||||
+contract MyCollectible is ERC721Upgradeable {
|
||||
+ function initialize() initializer public {
|
||||
+ __ERC721_init("MyCollectible", "MCO");
|
||||
}
|
||||
}
|
||||
```
|
||||
diff --git a/contracts/package.json b/contracts/package.json
|
||||
index 845e8c403..8dc181b91 100644
|
||||
--- a/contracts/package.json
|
||||
+++ b/contracts/package.json
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
- "name": "@openzeppelin/contracts",
|
||||
+ "name": "@openzeppelin/contracts-upgradeable",
|
||||
"description": "Secure Smart Contract library for Solidity",
|
||||
"version": "5.0.2",
|
||||
"files": [
|
||||
@@ -13,7 +13,7 @@
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
- "url": "https://github.com/OpenZeppelin/openzeppelin-contracts.git"
|
||||
+ "url": "https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git"
|
||||
},
|
||||
"keywords": [
|
||||
"solidity",
|
||||
@@ -28,5 +28,8 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/OpenZeppelin/openzeppelin-contracts/issues"
|
||||
},
|
||||
- "homepage": "https://openzeppelin.com/contracts/"
|
||||
+ "homepage": "https://openzeppelin.com/contracts/",
|
||||
+ "peerDependencies": {
|
||||
+ "@openzeppelin/contracts": "<package-version>"
|
||||
+ }
|
||||
}
|
||||
diff --git a/contracts/utils/cryptography/EIP712.sol b/contracts/utils/cryptography/EIP712.sol
|
||||
index 77c4c8990..602467f40 100644
|
||||
--- a/contracts/utils/cryptography/EIP712.sol
|
||||
+++ b/contracts/utils/cryptography/EIP712.sol
|
||||
@@ -4,7 +4,6 @@
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import {MessageHashUtils} from "./MessageHashUtils.sol";
|
||||
-import {ShortStrings, ShortString} from "../ShortStrings.sol";
|
||||
import {IERC5267} from "../../interfaces/IERC5267.sol";
|
||||
|
||||
/**
|
||||
@@ -28,28 +27,18 @@ import {IERC5267} from "../../interfaces/IERC5267.sol";
|
||||
* NOTE: In the upgradeable version of this contract, the cached values will correspond to the address, and the domain
|
||||
* separator of the implementation contract. This will cause the {_domainSeparatorV4} function to always rebuild the
|
||||
* separator from the immutable values, which is cheaper than accessing a cached version in cold storage.
|
||||
- *
|
||||
- * @custom:oz-upgrades-unsafe-allow state-variable-immutable
|
||||
*/
|
||||
abstract contract EIP712 is IERC5267 {
|
||||
- using ShortStrings for *;
|
||||
-
|
||||
bytes32 private constant TYPE_HASH =
|
||||
keccak256("EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)");
|
||||
|
||||
- // Cache the domain separator as an immutable value, but also store the chain id that it corresponds to, in order to
|
||||
- // invalidate the cached domain separator if the chain id changes.
|
||||
- bytes32 private immutable _cachedDomainSeparator;
|
||||
- uint256 private immutable _cachedChainId;
|
||||
- address private immutable _cachedThis;
|
||||
-
|
||||
+ /// @custom:oz-renamed-from _HASHED_NAME
|
||||
bytes32 private immutable _hashedName;
|
||||
+ /// @custom:oz-renamed-from _HASHED_VERSION
|
||||
bytes32 private immutable _hashedVersion;
|
||||
|
||||
- ShortString private immutable _name;
|
||||
- ShortString private immutable _version;
|
||||
- string private _nameFallback;
|
||||
- string private _versionFallback;
|
||||
+ string private _name;
|
||||
+ string private _version;
|
||||
|
||||
/**
|
||||
* @dev Initializes the domain separator and parameter caches.
|
||||
@@ -64,29 +53,23 @@ abstract contract EIP712 is IERC5267 {
|
||||
* contract upgrade].
|
||||
*/
|
||||
constructor(string memory name, string memory version) {
|
||||
- _name = name.toShortStringWithFallback(_nameFallback);
|
||||
- _version = version.toShortStringWithFallback(_versionFallback);
|
||||
- _hashedName = keccak256(bytes(name));
|
||||
- _hashedVersion = keccak256(bytes(version));
|
||||
-
|
||||
- _cachedChainId = block.chainid;
|
||||
- _cachedDomainSeparator = _buildDomainSeparator();
|
||||
- _cachedThis = address(this);
|
||||
+ _name = name;
|
||||
+ _version = version;
|
||||
+
|
||||
+ // Reset prior values in storage if upgrading
|
||||
+ _hashedName = 0;
|
||||
+ _hashedVersion = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the domain separator for the current chain.
|
||||
*/
|
||||
function _domainSeparatorV4() internal view returns (bytes32) {
|
||||
- if (address(this) == _cachedThis && block.chainid == _cachedChainId) {
|
||||
- return _cachedDomainSeparator;
|
||||
- } else {
|
||||
- return _buildDomainSeparator();
|
||||
- }
|
||||
+ return _buildDomainSeparator();
|
||||
}
|
||||
|
||||
function _buildDomainSeparator() private view returns (bytes32) {
|
||||
- return keccak256(abi.encode(TYPE_HASH, _hashedName, _hashedVersion, block.chainid, address(this)));
|
||||
+ return keccak256(abi.encode(TYPE_HASH, _EIP712NameHash(), _EIP712VersionHash(), block.chainid, address(this)));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -125,6 +108,10 @@ abstract contract EIP712 is IERC5267 {
|
||||
uint256[] memory extensions
|
||||
)
|
||||
{
|
||||
+ // If the hashed name and version in storage are non-zero, the contract hasn't been properly initialized
|
||||
+ // and the EIP712 domain is not reliable, as it will be missing name and version.
|
||||
+ require(_hashedName == 0 && _hashedVersion == 0, "EIP712: Uninitialized");
|
||||
+
|
||||
return (
|
||||
hex"0f", // 01111
|
||||
_EIP712Name(),
|
||||
@@ -139,22 +126,62 @@ abstract contract EIP712 is IERC5267 {
|
||||
/**
|
||||
* @dev The name parameter for the EIP712 domain.
|
||||
*
|
||||
- * NOTE: By default this function reads _name which is an immutable value.
|
||||
- * It only reads from storage if necessary (in case the value is too large to fit in a ShortString).
|
||||
+ * NOTE: This function reads from storage by default, but can be redefined to return a constant value if gas costs
|
||||
+ * are a concern.
|
||||
*/
|
||||
- // solhint-disable-next-line func-name-mixedcase
|
||||
- function _EIP712Name() internal view returns (string memory) {
|
||||
- return _name.toStringWithFallback(_nameFallback);
|
||||
+ function _EIP712Name() internal view virtual returns (string memory) {
|
||||
+ return _name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev The version parameter for the EIP712 domain.
|
||||
*
|
||||
- * NOTE: By default this function reads _version which is an immutable value.
|
||||
- * It only reads from storage if necessary (in case the value is too large to fit in a ShortString).
|
||||
+ * NOTE: This function reads from storage by default, but can be redefined to return a constant value if gas costs
|
||||
+ * are a concern.
|
||||
*/
|
||||
- // solhint-disable-next-line func-name-mixedcase
|
||||
- function _EIP712Version() internal view returns (string memory) {
|
||||
- return _version.toStringWithFallback(_versionFallback);
|
||||
+ function _EIP712Version() internal view virtual returns (string memory) {
|
||||
+ return _version;
|
||||
+ }
|
||||
+
|
||||
+ /**
|
||||
+ * @dev The hash of the name parameter for the EIP712 domain.
|
||||
+ *
|
||||
+ * NOTE: In previous versions this function was virtual. In this version you should override `_EIP712Name` instead.
|
||||
+ */
|
||||
+ function _EIP712NameHash() internal view returns (bytes32) {
|
||||
+ string memory name = _EIP712Name();
|
||||
+ if (bytes(name).length > 0) {
|
||||
+ return keccak256(bytes(name));
|
||||
+ } else {
|
||||
+ // If the name is empty, the contract may have been upgraded without initializing the new storage.
|
||||
+ // We return the name hash in storage if non-zero, otherwise we assume the name is empty by design.
|
||||
+ bytes32 hashedName = _hashedName;
|
||||
+ if (hashedName != 0) {
|
||||
+ return hashedName;
|
||||
+ } else {
|
||||
+ return keccak256("");
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ /**
|
||||
+ * @dev The hash of the version parameter for the EIP712 domain.
|
||||
+ *
|
||||
+ * NOTE: In previous versions this function was virtual. In this version you should override `_EIP712Version` instead.
|
||||
+ */
|
||||
+ function _EIP712VersionHash() internal view returns (bytes32) {
|
||||
+ string memory version = _EIP712Version();
|
||||
+ if (bytes(version).length > 0) {
|
||||
+ return keccak256(bytes(version));
|
||||
+ } else {
|
||||
+ // If the version is empty, the contract may have been upgraded without initializing the new storage.
|
||||
+ // We return the version hash in storage if non-zero, otherwise we assume the version is empty by design.
|
||||
+ bytes32 hashedVersion = _hashedVersion;
|
||||
+ if (hashedVersion != 0) {
|
||||
+ return hashedVersion;
|
||||
+ } else {
|
||||
+ return keccak256("");
|
||||
+ }
|
||||
+ }
|
||||
}
|
||||
}
|
||||
diff --git a/package.json b/package.json
|
||||
index c4b358e10..96ab2559c 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -32,7 +32,7 @@
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
- "url": "https://github.com/OpenZeppelin/openzeppelin-contracts.git"
|
||||
+ "url": "https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git"
|
||||
},
|
||||
"keywords": [
|
||||
"solidity",
|
||||
diff --git a/remappings.txt b/remappings.txt
|
||||
index 304d1386a..a1cd63bee 100644
|
||||
--- a/remappings.txt
|
||||
+++ b/remappings.txt
|
||||
@@ -1 +1,2 @@
|
||||
-@openzeppelin/contracts/=contracts/
|
||||
+@openzeppelin/contracts-upgradeable/=contracts/
|
||||
+@openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/
|
||||
diff --git a/test/utils/cryptography/EIP712.test.js b/test/utils/cryptography/EIP712.test.js
|
||||
index 2b6e7fa97..268e0d29d 100644
|
||||
--- a/test/utils/cryptography/EIP712.test.js
|
||||
+++ b/test/utils/cryptography/EIP712.test.js
|
||||
@@ -47,27 +47,6 @@ describe('EIP712', function () {
|
||||
const rebuildDomain = await getDomain(this.eip712);
|
||||
expect(rebuildDomain).to.be.deep.equal(this.domain);
|
||||
});
|
||||
-
|
||||
- if (shortOrLong === 'short') {
|
||||
- // Long strings are in storage, and the proxy will not be properly initialized unless
|
||||
- // the upgradeable contract variant is used and the initializer is invoked.
|
||||
-
|
||||
- it('adjusts when behind proxy', async function () {
|
||||
- const factory = await ethers.deployContract('$Clones');
|
||||
-
|
||||
- const clone = await factory
|
||||
- .$clone(this.eip712)
|
||||
- .then(tx => tx.wait())
|
||||
- .then(receipt => receipt.logs.find(ev => ev.fragment.name == 'return$clone_address').args.instance)
|
||||
- .then(address => ethers.getContractAt('$EIP712Verifier', address));
|
||||
-
|
||||
- const expectedDomain = { ...this.domain, verifyingContract: clone.target };
|
||||
- expect(await getDomain(clone)).to.be.deep.equal(expectedDomain);
|
||||
-
|
||||
- const expectedSeparator = await domainSeparator(expectedDomain);
|
||||
- expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
|
||||
- });
|
||||
- }
|
||||
});
|
||||
|
||||
it('hash digest', async function () {
|
||||
Reference in New Issue
Block a user