Merge branch 'master' into fv/Governor

fv/Governor
Hadrien Croubois 2 years ago committed by GitHub
commit aaa5a5abab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      .changeset/beige-buses-drop.md
  2. 4
      .changeset/curvy-shrimps-enjoy.md
  3. 4
      .changeset/curvy-suns-sort.md
  4. 4
      .changeset/famous-rules-burn.md
  5. 4
      .changeset/funny-rockets-compete.md
  6. 4
      .changeset/gold-chicken-clean.md
  7. 4
      .changeset/healthy-squids-stare.md
  8. 4
      .changeset/lemon-dogs-kiss.md
  9. 4
      .changeset/little-kiwis-ring.md
  10. 5
      .changeset/loud-wolves-promise.md
  11. 4
      .changeset/pretty-hornets-play.md
  12. 4
      .changeset/tame-ladybugs-sit.md
  13. 4
      .changeset/tender-needles-dance.md
  14. 30
      .github/workflows/checks.yml
  15. 31
      .github/workflows/upgradeable.yml
  16. BIN
      audits/2023-05-v4.9.pdf
  17. 1
      audits/README.md
  18. 240
      certora/specs/AccessControlDefaultAdminRules.spec
  19. 9
      certora/specs/helpers/helpers.spec
  20. 5
      contracts/access/AccessControlDefaultAdminRules.sol
  21. 6
      contracts/utils/cryptography/EIP712.sol
  22. 2
      docs/modules/ROOT/pages/access-control.adoc
  23. 8
      hardhat.config.js
  24. 24
      hardhat/env-artifacts.js
  25. 25
      hardhat/task-test-get-files.js
  26. 27
      package-lock.json
  27. 3
      package.json
  28. 2
      scripts/release/update-comment.js
  29. 14
      scripts/release/workflow/state.js
  30. 21
      scripts/upgradeable/README.md
  31. 19
      scripts/upgradeable/patch-apply.sh
  32. 18
      scripts/upgradeable/patch-save.sh
  33. 44
      scripts/upgradeable/transpile-onto.sh
  34. 35
      scripts/upgradeable/transpile.sh
  35. 481
      scripts/upgradeable/upgradeable.patch
  36. 31
      test/access/AccessControl.behavior.js
  37. 1
      test/migrate-imports.test.js
  38. 161
      test/utils/Checkpoints.test.js
  39. 6
      test/utils/cryptography/EIP712.test.js
  40. 93
      test/utils/structs/EnumerableMap.test.js
  41. 33
      test/utils/structs/EnumerableSet.test.js

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
`Initializable`: optimize `_disableInitializers` by using `!=` instead of `<`. ([#3787](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3787))
`Initializable`: optimize `_disableInitializers` by using `!=` instead of `<`.
pr: #3787

@ -2,4 +2,6 @@
'openzeppelin-solidity': minor
---
`ReentrancyGuard`: Add a `_reentrancyGuardEntered` function to expose the guard status. ([#3714](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3714))
`ReentrancyGuard`: Add a `_reentrancyGuardEntered` function to expose the guard status.
pr: #3714

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
`Ownable2Step`: make `acceptOwnership` public virtual to enable usecases that require overriding it. ([#3960](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3960))
`Ownable2Step`: make `acceptOwnership` public virtual to enable usecases that require overriding it.
pr: #3960

@ -2,4 +2,6 @@
'openzeppelin-solidity': minor
---
`EnumerableMap`: add a `keys()` function that returns an array containing all the keys. ([#3920](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3920))
`EnumerableMap`: add a `keys()` function that returns an array containing all the keys.
pr: #3920

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
Reformatted codebase with latest version of Prettier Solidity. ([#3898](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3898))
Reformatted codebase with latest version of Prettier Solidity.
pr: #3898

@ -2,4 +2,6 @@
'openzeppelin-solidity': minor
---
`Strings`: add `equal` method. ([#3774](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3774))
`Strings`: add `equal` method.
pr: #3774

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
`Math`: optimize `log256` rounding check. ([#3745](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3745))
`Math`: optimize `log256` rounding check.
pr: #3745

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
`ERC20Votes`: optimize by using unchecked arithmetic. ([#3748](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3748))
`ERC20Votes`: optimize by using unchecked arithmetic.
pr: #3748

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
`Multicall`: annotate `multicall` function as upgrade safe to not raise a flag for its delegatecall. ([#3961](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3961))
`Multicall`: annotate `multicall` function as upgrade safe to not raise a flag for its delegatecall.
pr: #3961

@ -0,0 +1,5 @@
---
'openzeppelin-solidity': patch
---
`AccessControlDefaultAdminRules`: Clean up pending admin schedule on renounce.

@ -2,4 +2,6 @@
'openzeppelin-solidity': minor
---
`Strings`: add `toString` method for signed integers. ([#3773](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3773))
`Strings`: add `toString` method for signed integers.
pr: #3773

@ -2,4 +2,6 @@
'openzeppelin-solidity': patch
---
`MerkleProof`: optimize by using unchecked arithmetic. ([#3745](https://github.com/OpenZeppelin/openzeppelin-contracts/pull/3745))
`MerkleProof`: optimize by using unchecked arithmetic.
pr: #3745

@ -2,4 +2,6 @@
'openzeppelin-solidity': minor
---
`ERC20Wrapper`: self wrapping and deposit by the wrapper itself are now explicitelly forbiden.
`ERC20Wrapper`: self wrapping and deposit by the wrapper itself are now explicitly forbidden.
commit: 3214f6c25

@ -14,7 +14,6 @@ concurrency:
jobs:
lint:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -37,19 +36,36 @@ jobs:
- name: Check linearisation of the inheritance graph
run: npm run test:inheritance
- name: Check proceduraly generated contracts are up-to-date
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
run: npm run test:generation
- name: Compare gas costs
uses: ./.github/actions/gas-compare
with:
token: ${{ github.token }}
tests-upgradeable:
runs-on: ubuntu-latest
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Include history so patch conflicts are resolved automatically
- name: Set up environment
uses: ./.github/actions/setup
- name: Transpile to upgradeable
run: bash scripts/upgradeable/transpile.sh
- name: Run tests
run: npm run test
env:
NODE_OPTIONS: --max_old_space_size=4096
- name: Check linearisation of the inheritance graph
run: npm run test:inheritance
- name: Check storage layout
uses: ./.github/actions/storage-layout
with:
token: ${{ github.token }}
foundry-tests:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
tests-foundry:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -63,7 +79,6 @@ jobs:
run: forge test -vv
coverage:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -77,7 +92,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
slither:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -89,12 +103,12 @@ jobs:
node-version: 18.15
codespell:
if: github.repository != 'OpenZeppelin/openzeppelin-contracts-upgradeable'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Run CodeSpell
uses: codespell-project/actions-codespell@v1.0
uses: codespell-project/actions-codespell@v2.0
with:
check_hidden: true
check_filenames: true
skip: package-lock.json,*.pdf

@ -1,4 +1,4 @@
name: Upgradeable Trigger
name: transpile upgradeable
on:
push:
@ -7,17 +7,24 @@ on:
- release-v*
jobs:
trigger:
transpile:
environment: push-upgradeable
runs-on: ubuntu-latest
steps:
- id: app
uses: getsentry/action-github-app-token@v2
- uses: actions/checkout@v3
with:
app_id: ${{ secrets.UPGRADEABLE_APP_ID }}
private_key: ${{ secrets.UPGRADEABLE_APP_PK }}
- run: |
curl -X POST \
https://api.github.com/repos/OpenZeppelin/openzeppelin-contracts-upgradeable/dispatches \
-H 'Accept: application/vnd.github.v3+json' \
-H 'Authorization: token ${{ steps.app.outputs.token }}' \
-d '{ "event_type": "Update", "client_payload": { "ref": "${{ github.ref }}" } }'
repository: OpenZeppelin/openzeppelin-contracts-upgradeable
fetch-depth: 0
token: ${{ secrets.GH_TOKEN_UPGRADEABLE }}
- name: Fetch current non-upgradeable branch
run: |
git fetch "https://github.com/${{ github.repository }}.git" "$REF"
git checkout FETCH_HEAD
env:
REF: ${{ github.ref }}
- name: Set up environment
uses: ./.github/actions/setup
- run: bash scripts/git-user-config.sh
- name: Transpile to upgradeable
run: bash scripts/upgradeable/transpile-onto.sh ${{ github.ref_name }} origin/${{ github.ref_name }}
- run: git push origin ${{ github.ref_name }}

Binary file not shown.

@ -2,6 +2,7 @@
| Date | Version | Commit | Auditor | Scope | Links |
| ------------ | ------- | --------- | ------------ | -------------------- | ----------------------------------------------------------- |
| May 2023 | v4.9.0 | `91df66c` | OpenZeppelin | v4.9 Changes | [🔗](./2023-05-v4.9.pdf) |
| October 2022 | v4.8.0 | `14f98db` | OpenZeppelin | ERC4626, Checkpoints | [🔗](./2022-10-ERC4626.pdf) [🔗](./2022-10-Checkpoints.pdf) |
| October 2018 | v2.0.0 | `dac5bcc` | LevelK | Everything | [🔗](./2018-10.pdf) |
| March 2017 | v1.0.4 | `9c5975a` | New Alchemy | Everything | [🔗](./2017-03.md) |

@ -12,44 +12,23 @@ use rule onlyGrantCanGrant filtered {
│ Helpers │
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
definition timeSanity(env e) returns bool =
e.block.timestamp > 0 && e.block.timestamp + defaultAdminDelay(e) < max_uint48();
function max_uint48() returns mathint {
return (1 << 48) - 1;
}
function nonZeroAccount(address account) returns bool {
return account != 0;
}
definition delayChangeWaitSanity(env e, uint48 newDelay) returns bool =
e.block.timestamp + delayChangeWait_(e, newDelay) < max_uint48();
function timeSanity(env e) returns bool {
return
e.block.timestamp > 0 && // Avoids 0 schedules
e.block.timestamp + defaultAdminDelay(e) < max_uint48();
}
definition isSet(uint48 schedule) returns bool =
schedule != 0;
function delayChangeWaitSanity(env e, uint48 newDelay) returns bool {
return e.block.timestamp + delayChangeWait_(e, newDelay) < max_uint48();
}
definition hasPassed(env e, uint48 schedule) returns bool =
schedule < e.block.timestamp;
function isSet(uint48 schedule) returns bool {
return schedule != 0;
}
definition increasingDelaySchedule(env e, uint48 newDelay) returns mathint =
e.block.timestamp + min(newDelay, defaultAdminDelayIncreaseWait());
function hasPassed(env e, uint48 schedule) returns bool {
return schedule < e.block.timestamp;
}
function min(uint48 a, uint48 b) returns mathint {
return a < b ? a : b;
}
function increasingDelaySchedule(env e, uint48 newDelay) returns mathint {
return e.block.timestamp + min(newDelay, defaultAdminDelayIncreaseWait());
}
function decreasingDelaySchedule(env e, uint48 newDelay) returns mathint {
return e.block.timestamp + defaultAdminDelay(e) - newDelay;
}
definition decreasingDelaySchedule(env e, uint48 newDelay) returns mathint =
e.block.timestamp + defaultAdminDelay(e) - newDelay;
/*
┌─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
@ -57,11 +36,10 @@ function decreasingDelaySchedule(env e, uint48 newDelay) returns mathint {
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
invariant defaultAdminConsistency(address account)
defaultAdmin() == account <=> hasRole(DEFAULT_ADMIN_ROLE(), account)
(account == defaultAdmin() && account != 0) <=> hasRole(DEFAULT_ADMIN_ROLE(), account)
{
preserved {
// defaultAdmin() returns the zero address when there's no default admin
require nonZeroAccount(account);
preserved with (env e) {
require nonzerosender(e);
}
}
@ -72,10 +50,12 @@ invariant defaultAdminConsistency(address account)
*/
invariant singleDefaultAdmin(address account, address another)
hasRole(DEFAULT_ADMIN_ROLE(), account) && hasRole(DEFAULT_ADMIN_ROLE(), another) => another == account
// We filter here because we couldn't find a way to force Certora to have an initial state with
// only one DEFAULT_ADMIN_ROLE enforced, so a counter example is a different default admin since inception
// triggering the transfer, which is known to be impossible by definition.
filtered { f -> f.selector != acceptDefaultAdminTransfer().selector }
{
preserved {
requireInvariant defaultAdminConsistency(account);
requireInvariant defaultAdminConsistency(another);
}
}
/*
┌─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
@ -118,7 +98,8 @@ rule revokeRoleEffect(env e, bytes32 role) {
"roles can only be revoked by their owner except for the default admin role";
// effect
assert success => !hasRole(role, account), "role is revoked";
assert success => !hasRole(role, account),
"role is revoked";
// no side effect
assert hasOtherRoleBefore != hasOtherRoleAfter => (role == otherRole && account == otherAccount),
@ -138,34 +119,58 @@ rule renounceRoleEffect(env e, bytes32 role) {
address otherAccount;
bool hasOtherRoleBefore = hasRole(otherRole, otherAccount);
uint48 scheduleBefore = pendingDefaultAdminSchedule_();
address adminBefore = defaultAdmin();
address pendingAdminBefore = pendingDefaultAdmin_();
uint48 scheduleBefore = pendingDefaultAdminSchedule_();
renounceRole@withrevert(e, role, account);
bool success = !lastReverted;
bool hasOtherRoleAfter = hasRole(otherRole, otherAccount);
address adminAfter = defaultAdmin();
address pendingAdminAfter = pendingDefaultAdmin_();
uint48 scheduleAfter = pendingDefaultAdminSchedule_();
// liveness
assert success <=> (
account == e.msg.sender &&
(
role != DEFAULT_ADMIN_ROLE() ||
account != adminBefore ||
(
role != DEFAULT_ADMIN_ROLE()
) || (
role == DEFAULT_ADMIN_ROLE() &&
pendingAdminBefore == 0 &&
isSet(scheduleBefore) &&
hasPassed(e, scheduleBefore)
)
)
), "an account only can renounce by itself with a delay for the default admin role";
),
"an account only can renounce by itself with a delay for the default admin role";
// effect
assert success => !hasRole(role, account), "role is renounced";
assert success => !hasRole(role, account),
"role is renounced";
assert success => (
(
role == DEFAULT_ADMIN_ROLE() &&
account == adminBefore
) ? (
adminAfter == 0 &&
pendingAdminAfter == 0 &&
scheduleAfter == 0
) : (
adminAfter == adminBefore &&
pendingAdminAfter == pendingAdminBefore &&
scheduleAfter == scheduleBefore
)
),
"renouncing default admin role cleans state iff called by previous admin";
// no side effect
assert hasOtherRoleBefore != hasOtherRoleAfter => (role == otherRole && account == otherAccount),
assert hasOtherRoleBefore != hasOtherRoleAfter => (
role == otherRole &&
account == otherAccount
),
"no other role is affected";
}
@ -175,10 +180,6 @@ rule renounceRoleEffect(env e, bytes32 role) {
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule noDefaultAdminChange(env e, method f, calldataarg args) {
require nonZeroAccount(e.msg.sender);
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
address adminBefore = defaultAdmin();
f(e, args);
address adminAfter = defaultAdmin();
@ -186,18 +187,17 @@ rule noDefaultAdminChange(env e, method f, calldataarg args) {
assert adminBefore != adminAfter => (
f.selector == acceptDefaultAdminTransfer().selector ||
f.selector == renounceRole(bytes32,address).selector
), "default admin is only affected by accepting an admin transfer or renoucing";
),
"default admin is only affected by accepting an admin transfer or renoucing";
}
/*
┌─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Rule: pendingDefaultAdmin is only affected by beginning, accepting or canceling an admin transfer │
│ Rule: pendingDefaultAdmin is only affected by beginning, completing (accept or renounce), or canceling an admin │
│ transfer │
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule noPendingDefaultAdminChange(env e, method f, calldataarg args) {
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
address pendingAdminBefore = pendingDefaultAdmin_();
address scheduleBefore = pendingDefaultAdminSchedule_();
f(e, args);
@ -210,8 +210,10 @@ rule noPendingDefaultAdminChange(env e, method f, calldataarg args) {
) => (
f.selector == beginDefaultAdminTransfer(address).selector ||
f.selector == acceptDefaultAdminTransfer().selector ||
f.selector == cancelDefaultAdminTransfer().selector
), "pending admin and its schedule is only affected by beginning, accepting or cancelling an admin transfer";
f.selector == cancelDefaultAdminTransfer().selector ||
f.selector == renounceRole(bytes32,address).selector
),
"pending admin and its schedule is only affected by beginning, completing, or cancelling an admin transfer";
}
/*
@ -224,7 +226,8 @@ rule noDefaultAdminDelayChange(env e, method f, calldataarg args) {
f(e, args);
uint48 delayAfter = defaultAdminDelay(e);
assert delayBefore == delayAfter, "delay can't be changed atomically by any function";
assert delayBefore == delayAfter,
"delay can't be changed atomically by any function";
}
/*
@ -240,7 +243,8 @@ rule noPendingDefaultAdminDelayChange(env e, method f, calldataarg args) {
assert pendingDelayBefore != pendingDelayAfter => (
f.selector == changeDefaultAdminDelay(uint48).selector ||
f.selector == rollbackDefaultAdminDelay().selector
), "pending delay is only affected by changeDefaultAdminDelay or rollbackDefaultAdminDelay";
),
"pending delay is only affected by changeDefaultAdminDelay or rollbackDefaultAdminDelay";
}
/*
@ -263,10 +267,10 @@ rule noDefaultAdminDelayIncreaseWaitChange(env e, method f, calldataarg args) {
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule beginDefaultAdminTransfer(env e, address newAdmin) {
require nonpayable(e);
require timeSanity(e);
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
require nonpayable(e);
require nonzerosender(e);
requireInvariant defaultAdminConsistency(e.msg.sender);
beginDefaultAdminTransfer@withrevert(e, newAdmin);
bool success = !lastReverted;
@ -288,18 +292,24 @@ rule beginDefaultAdminTransfer(env e, address newAdmin) {
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule pendingDefaultAdminDelayEnforced(env e1, env e2, method f, calldataarg args, address newAdmin) {
require e1.block.timestamp < e2.block.timestamp;
require e1.block.timestamp <= e2.block.timestamp;
uint48 delayBefore = defaultAdminDelay(e1);
address adminBefore = defaultAdmin();
// There might be a better way to generalize this without requiring `beginDefaultAdminTransfer`, but currently
// it's the only way in which we can attest that only `delayBefore` has passed before a change.
beginDefaultAdminTransfer(e1, newAdmin);
f(e2, args);
address adminAfter = defaultAdmin();
assert adminAfter == newAdmin => ((e2.block.timestamp >= e1.block.timestamp + delayBefore) || adminBefore == newAdmin),
"A delay can't change in less than applied schedule";
// change can only happen towards the newAdmin, with the delay
assert adminAfter != adminBefore => (
adminAfter == newAdmin &&
e2.block.timestamp >= e1.block.timestamp + delayBefore
),
"The admin can only change after the enforced delay and to the previously scheduled new admin";
}
/*
@ -309,17 +319,19 @@ rule pendingDefaultAdminDelayEnforced(env e1, env e2, method f, calldataarg args
*/
rule acceptDefaultAdminTransfer(env e) {
require nonpayable(e);
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
address pendingAdminBefore = pendingDefaultAdmin_();
uint48 scheduleAfter = pendingDefaultAdminSchedule_();
uint48 scheduleBefore = pendingDefaultAdminSchedule_();
acceptDefaultAdminTransfer@withrevert(e);
bool success = !lastReverted;
// liveness
assert success <=> e.msg.sender == pendingAdminBefore && isSet(scheduleAfter) && hasPassed(e, scheduleAfter),
assert success <=> (
e.msg.sender == pendingAdminBefore &&
isSet(scheduleBefore) &&
hasPassed(e, scheduleBefore)
),
"only the pending default admin can accept the role after the schedule has been set and passed";
// effect
@ -338,8 +350,8 @@ rule acceptDefaultAdminTransfer(env e) {
*/
rule cancelDefaultAdminTransfer(env e) {
require nonpayable(e);
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
require nonzerosender(e);
requireInvariant defaultAdminConsistency(e.msg.sender);
cancelDefaultAdminTransfer@withrevert(e);
bool success = !lastReverted;
@ -361,11 +373,11 @@ rule cancelDefaultAdminTransfer(env e) {
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule changeDefaultAdminDelay(env e, uint48 newDelay) {
require nonpayable(e);
require timeSanity(e);
require nonpayable(e);
require nonzerosender(e);
require delayChangeWaitSanity(e, newDelay);
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
requireInvariant defaultAdminConsistency(e.msg.sender);
uint48 delayBefore = defaultAdminDelay(e);
@ -377,7 +389,9 @@ rule changeDefaultAdminDelay(env e, uint48 newDelay) {
"only the current default admin can begin a delay change";
// effect
assert success => pendingDelay_(e) == newDelay, "pending delay is set";
assert success => pendingDelay_(e) == newDelay,
"pending delay is set";
assert success => (
pendingDelaySchedule_(e) > e.block.timestamp ||
delayBefore == newDelay || // Interpreted as decreasing, x - x = 0
@ -392,17 +406,22 @@ rule changeDefaultAdminDelay(env e, uint48 newDelay) {
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule pendingDelayWaitEnforced(env e1, env e2, method f, calldataarg args, uint48 newDelay) {
require e1.block.timestamp < e2.block.timestamp;
require e1.block.timestamp <= e2.block.timestamp;
uint48 delayBefore = defaultAdminDelay(e1);
changeDefaultAdminDelay(e1, newDelay);
f(e2, args);
uint48 delayAfter = defaultAdminDelay(e2);
mathint delayWait = newDelay > delayBefore ? increasingDelaySchedule(e1, newDelay) : decreasingDelaySchedule(e1, newDelay);
assert delayAfter == newDelay => (e2.block.timestamp >= delayWait || delayBefore == newDelay),
"A delay can't change in less than applied schedule";
assert delayAfter != delayBefore => (
delayAfter == newDelay &&
e2.block.timestamp >= delayWait
),
"A delay can only change after the applied schedule";
}
/*
@ -427,8 +446,8 @@ rule pendingDelayWait(env e, uint48 newDelay) {
*/
rule rollbackDefaultAdminDelay(env e) {
require nonpayable(e);
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
require nonzerosender(e);
requireInvariant defaultAdminConsistency(e.msg.sender);
rollbackDefaultAdminDelay@withrevert(e);
bool success = !lastReverted;
@ -443,58 +462,3 @@ rule rollbackDefaultAdminDelay(env e) {
assert success => pendingDelaySchedule_(e) == 0,
"Pending default admin delay is reset";
}
/*
┌─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Rule: pending default admin and the delay can only change along with their corresponding schedules │
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
*/
rule pendingValueAndScheduleCoupling(env e, address newAdmin, uint48 newDelay) {
requireInvariant defaultAdminConsistency(defaultAdmin());
requireInvariant singleDefaultAdmin(e.msg.sender, defaultAdmin());
// Pending admin
address pendingAdminBefore = pendingDefaultAdmin_();
uint48 pendingAdminScheduleBefore = pendingDefaultAdminSchedule_();
beginDefaultAdminTransfer(e, newAdmin);
address pendingAdminAfter = pendingDefaultAdmin_();
uint48 pendingAdminScheduleAfter = pendingDefaultAdminSchedule_();
assert (
pendingAdminScheduleBefore != pendingDefaultAdminSchedule_() &&
pendingAdminBefore == pendingAdminAfter
) => newAdmin == pendingAdminBefore, "pending admin stays the same if the new admin set is the same";
assert (
pendingAdminBefore != pendingAdminAfter &&
pendingAdminScheduleBefore == pendingDefaultAdminSchedule_()
) => (
// Schedule doesn't change if:
// - The defaultAdminDelay was reduced to a value such that added to the block.timestamp is equal to previous schedule
e.block.timestamp + defaultAdminDelay(e) == pendingAdminScheduleBefore
), "pending admin stays the same if a default admin transfer is begun on accepted edge cases";
// Pending delay
address pendingDelayBefore = pendingDelay_(e);
uint48 pendingDelayScheduleBefore = pendingDelaySchedule_(e);
changeDefaultAdminDelay(e, newDelay);
address pendingDelayAfter = pendingDelay_(e);
uint48 pendingDelayScheduleAfter = pendingDelaySchedule_(e);
assert (
pendingDelayScheduleBefore != pendingDelayScheduleAfter &&
pendingDelayBefore == pendingDelayAfter
) => newDelay == pendingDelayBefore || pendingDelayBefore == 0, "pending delay stays the same if the new delay set is the same";
assert (
pendingDelayBefore != pendingDelayAfter &&
pendingDelayScheduleBefore == pendingDelayScheduleAfter
) => (
increasingDelaySchedule(e, newDelay) == pendingDelayScheduleBefore ||
decreasingDelaySchedule(e, newDelay) == pendingDelayScheduleBefore
), "pending delay stays the same if a default admin transfer is begun on accepted edge cases";
}

@ -1,3 +1,10 @@
// environment
definition nonpayable(env e) returns bool = e.msg.value == 0;
definition nonzerosender(env e) returns bool = e.msg.sender != 0;
definition max_uint48() returns uint48 = 0xffffffffffff;
// constants
definition max_uint48() returns mathint = (1 << 48) - 1;
// math
definition min(mathint a, mathint b) returns mathint = a < b ? a : b;
definition max(mathint a, mathint b) returns mathint = a > b ? a : b;

@ -106,12 +106,13 @@ abstract contract AccessControlDefaultAdminRules is IAccessControlDefaultAdminRu
* non-administrated role.
*/
function renounceRole(bytes32 role, address account) public virtual override(AccessControl, IAccessControl) {
if (role == DEFAULT_ADMIN_ROLE) {
if (role == DEFAULT_ADMIN_ROLE && account == defaultAdmin()) {
(address newDefaultAdmin, uint48 schedule) = pendingDefaultAdmin();
require(
newDefaultAdmin == address(0) && _isScheduleSet(schedule) && _hasSchedulePassed(schedule),
"AccessControl: only can renounce in two delayed steps"
);
delete _pendingDefaultAdminSchedule;
}
super.renounceRole(role, account);
}
@ -137,7 +138,7 @@ abstract contract AccessControlDefaultAdminRules is IAccessControlDefaultAdminRu
* @dev See {AccessControl-_revokeRole}.
*/
function _revokeRole(bytes32 role, address account) internal virtual override {
if (role == DEFAULT_ADMIN_ROLE && account == _currentDefaultAdmin) {
if (role == DEFAULT_ADMIN_ROLE && account == defaultAdmin()) {
delete _currentDefaultAdmin;
}
super._revokeRole(role, account);

@ -44,14 +44,14 @@ abstract contract EIP712 is IERC5267 {
uint256 private immutable _cachedChainId;
address private immutable _cachedThis;
bytes32 private immutable _hashedName;
bytes32 private immutable _hashedVersion;
ShortString private immutable _name;
ShortString private immutable _version;
string private _nameFallback;
string private _versionFallback;
bytes32 private immutable _hashedName;
bytes32 private immutable _hashedVersion;
/**
* @dev Initializes the domain separator and parameter caches.
*

@ -171,7 +171,7 @@ Dynamic role allocation is often a desirable property, for example in systems wh
[[querying-privileged-accounts]]
=== Querying Privileged Accounts
Because accounts might <<granting-and-revoking, grant and revoke roles>> dynamically, it is not always possible to determine which accounts hold a particular role. This is important as it allows to prove certain properties about a system, such as that an administrative account is a multisig or a DAO, or that a certain role has been removed from all users, effectively disabling any associated functionality.
Because accounts might <<granting-and-revoking, grant and revoke roles>> dynamically, it is not always possible to determine which accounts hold a particular role. This is important as it allows proving certain properties about a system, such as that an administrative account is a multisig or a DAO, or that a certain role has been removed from all users, effectively disabling any associated functionality.
Under the hood, `AccessControl` uses `EnumerableSet`, a more powerful variant of Solidity's `mapping` type, which allows for key enumeration. `getRoleMemberCount` can be used to retrieve the number of accounts that have a particular role, and `getRoleMember` can then be called to get the address of each of these accounts.

@ -76,6 +76,9 @@ module.exports = {
},
},
warnings: {
'contracts-exposed/**/*': {
'code-size': 'off',
},
'*': {
'code-size': withOptimizations,
'unused-param': !argv.coverage, // coverage causes unused-param warnings
@ -89,10 +92,11 @@ module.exports = {
},
},
exposed: {
initializers: true,
exclude: [
'vendor/**/*',
// overflow clash
'utils/Timers.sol',
// Exclude Timers from hardhat-exposed because its overloaded functions are not transformed correctly
'utils/Timers{,Upgradeable}.sol',
],
},
docgen: require('./docs/config'),

@ -0,0 +1,24 @@
const { HardhatError } = require('hardhat/internal/core/errors');
// Modifies `artifacts.require(X)` so that instead of X it loads the XUpgradeable contract.
// This allows us to run the same test suite on both the original and the transpiled and renamed Upgradeable contracts.
extendEnvironment(env => {
const artifactsRequire = env.artifacts.require;
env.artifacts.require = name => {
for (const suffix of ['UpgradeableWithInit', 'Upgradeable', '']) {
try {
return artifactsRequire(name + suffix);
} catch (e) {
// HH700: Artifact not found - from https://hardhat.org/hardhat-runner/docs/errors#HH700
if (HardhatError.isHardhatError(e) && e.number === 700 && suffix !== '') {
continue;
} else {
throw e;
}
}
}
throw new Error('Unreachable');
};
});

@ -0,0 +1,25 @@
const { internalTask } = require('hardhat/config');
const { TASK_TEST_GET_TEST_FILES } = require('hardhat/builtin-tasks/task-names');
// Modifies `hardhat test` to skip the proxy tests after proxies are removed by the transpiler for upgradeability.
internalTask(TASK_TEST_GET_TEST_FILES).setAction(async (args, hre, runSuper) => {
const path = require('path');
const { promises: fs } = require('fs');
const hasProxies = await fs
.access(path.join(hre.config.paths.sources, 'proxy/Proxy.sol'))
.then(() => true)
.catch(() => false);
const ignoredIfProxy = [
'proxy/beacon/BeaconProxy.test.js',
'proxy/beacon/UpgradeableBeacon.test.js',
'proxy/ERC1967/ERC1967Proxy.test.js',
'proxy/transparent/ProxyAdmin.test.js',
'proxy/transparent/TransparentUpgradeableProxy.test.js',
'proxy/utils/UUPSUpgradeable.test.js',
].map(p => path.join(hre.config.paths.tests, p));
return (await runSuper(args)).filter(file => hasProxies || !ignoredIfProxy.includes(file));
});

27
package-lock.json generated

@ -31,7 +31,7 @@
"glob": "^8.0.3",
"graphlib": "^2.1.8",
"hardhat": "^2.9.1",
"hardhat-exposed": "^0.3.2",
"hardhat-exposed": "^0.3.3",
"hardhat-gas-reporter": "^1.0.4",
"hardhat-ignore-warnings": "^0.2.0",
"keccak256": "^1.0.2",
@ -48,6 +48,7 @@
"solidity-ast": "^0.4.25",
"solidity-coverage": "^0.8.0",
"solidity-docgen": "^0.6.0-beta.29",
"undici": "^5.22.1",
"web3": "^1.3.0",
"yargs": "^17.0.0"
}
@ -8269,9 +8270,9 @@
}
},
"node_modules/hardhat-exposed": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.2.tgz",
"integrity": "sha512-qhi60I2bfjoPZwKgrY7BIpuUiBE7aC/bHN2MzHxPcZdxaeFnjKJ50n59LE7yK3GK2qYzE8DMjzqfnH6SlKPUjw==",
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.3.tgz",
"integrity": "sha512-AFBM3IUlSU9wkt3886kYbCSzZteB4OQt0ciehPUrCgY/Tazn6g2cxsmoIZT8O8va1R2PtXd9PRC4KZ6WiSVXOw==",
"dev": true,
"dependencies": {
"micromatch": "^4.0.4",
@ -15175,9 +15176,9 @@
"dev": true
},
"node_modules/undici": {
"version": "5.22.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.22.0.tgz",
"integrity": "sha512-fR9RXCc+6Dxav4P9VV/sp5w3eFiSdOjJYsbtWfd4s5L5C4ogyuVpdKIVHeW0vV1MloM65/f7W45nR9ZxwVdyiA==",
"version": "5.22.1",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.22.1.tgz",
"integrity": "sha512-Ji2IJhFXZY0x/0tVBXeQwgPlLWw13GVzpsWPQ3rV50IFMMof2I55PZZxtm4P6iNq+L5znYN9nSTAq0ZyE6lSJw==",
"dev": true,
"dependencies": {
"busboy": "^1.6.0"
@ -22857,9 +22858,9 @@
}
},
"hardhat-exposed": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.2.tgz",
"integrity": "sha512-qhi60I2bfjoPZwKgrY7BIpuUiBE7aC/bHN2MzHxPcZdxaeFnjKJ50n59LE7yK3GK2qYzE8DMjzqfnH6SlKPUjw==",
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/hardhat-exposed/-/hardhat-exposed-0.3.3.tgz",
"integrity": "sha512-AFBM3IUlSU9wkt3886kYbCSzZteB4OQt0ciehPUrCgY/Tazn6g2cxsmoIZT8O8va1R2PtXd9PRC4KZ6WiSVXOw==",
"dev": true,
"requires": {
"micromatch": "^4.0.4",
@ -27985,9 +27986,9 @@
"dev": true
},
"undici": {
"version": "5.22.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.22.0.tgz",
"integrity": "sha512-fR9RXCc+6Dxav4P9VV/sp5w3eFiSdOjJYsbtWfd4s5L5C4ogyuVpdKIVHeW0vV1MloM65/f7W45nR9ZxwVdyiA==",
"version": "5.22.1",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.22.1.tgz",
"integrity": "sha512-Ji2IJhFXZY0x/0tVBXeQwgPlLWw13GVzpsWPQ3rV50IFMMof2I55PZZxtm4P6iNq+L5znYN9nSTAq0ZyE6lSJw==",
"dev": true,
"requires": {
"busboy": "^1.6.0"

@ -72,7 +72,7 @@
"glob": "^8.0.3",
"graphlib": "^2.1.8",
"hardhat": "^2.9.1",
"hardhat-exposed": "^0.3.2",
"hardhat-exposed": "^0.3.3",
"hardhat-gas-reporter": "^1.0.4",
"hardhat-ignore-warnings": "^0.2.0",
"keccak256": "^1.0.2",
@ -89,6 +89,7 @@
"solidity-ast": "^0.4.25",
"solidity-coverage": "^0.8.0",
"solidity-docgen": "^0.6.0-beta.29",
"undici": "^5.22.1",
"web3": "^1.3.0",
"yargs": "^17.0.0"
}

@ -16,7 +16,7 @@ const { version } = require('../../package.json');
const [tag] = run('git', 'tag')
.split(/\r?\n/)
.filter(semver.coerce) // check version can be processed
.filter(v => semver.lt(semver.coerce(v), version)) // only consider older tags, ignore current prereleases
.filter(v => semver.satisfies(v, `< ${version}`)) // ignores prereleases unless currently a prerelease
.sort(semver.rcompare);
// Ordering tag → HEAD is important here.

@ -1,7 +1,8 @@
const { readPreState } = require('@changesets/pre');
const { default: readChangesets } = require('@changesets/read');
const { join } = require('path');
const { version } = require(join(__dirname, '../../../package.json'));
const { fetch } = require('undici');
const { version, name: packageName } = require(join(__dirname, '../../../contracts/package.json'));
module.exports = async ({ github, context, core }) => {
const state = await getState({ github, context, core });
@ -34,8 +35,8 @@ function shouldRunChangesets({ isReleaseBranch, isPush, isWorkflowDispatch, botR
return (isReleaseBranch && isPush) || (isReleaseBranch && isWorkflowDispatch && botRun);
}
function shouldRunPublish({ isReleaseBranch, isPush, hasPendingChangesets }) {
return isReleaseBranch && isPush && !hasPendingChangesets;
function shouldRunPublish({ isReleaseBranch, isPush, hasPendingChangesets, isPublishedOnNpm }) {
return isReleaseBranch && isPush && !hasPendingChangesets && !isPublishedOnNpm;
}
function shouldRunMerge({
@ -80,6 +81,8 @@ async function getState({ github, context, core }) {
state.prBackExists = prs.length === 0;
state.isPublishedOnNpm = await isPublishedOnNpm(packageName, version);
// Log every state value in debug mode
if (core.isDebug()) for (const [key, value] of Object.entries(state)) core.debug(`${key}: ${value}`);
@ -102,3 +105,8 @@ async function readChangesetState(cwd = process.cwd()) {
changesets,
};
}
async function isPublishedOnNpm(package, version) {
const res = await fetch(`https://registry.npmjs.com/${package}/${version}`);
return res.ok;
}

@ -0,0 +1,21 @@
The upgradeable variant of OpenZeppelin Contracts is automatically generated from the original Solidity code. We call this process "transpilation" and it is implemented by our [Upgradeability Transpiler](https://github.com/OpenZeppelin/openzeppelin-transpiler/).
When the `master` branch or `release-v*` branches are updated, the code is transpiled and pushed to [OpenZeppelin/openzeppelin-contracts-upgradeable](https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable) by the `upgradeable.yml` workflow.
## `transpile.sh`
Applies patches and invokes the transpiler with the command line flags we need for our requirements (for example, excluding certain files).
## `transpile-onto.sh`
```
bash scripts/upgradeable/transpile-onto.sh <target> [<base>]
```
Transpiles the contents of the current git branch and commits the result as a new commit on branch `<target>`. If branch `<target>` doesn't exist, it will copy the commit history of `[<base>]` (this is used in GitHub Actions, but is usually not necessary locally).
## `patch-apply.sh` & `patch-save.sh`
Some of the upgradeable contract variants require ad-hoc changes that are not implemented by the transpiler. These changes are implemented by patches stored in `upgradeable.patch` in this directory. `patch-apply.sh` applies these patches.
If the patches fail to apply due to changes in the repo, the conflicts have to be resolved manually. Once fixed, `patch-save.sh` will take the changes staged in Git and update `upgradeable.patch` to match.

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -euo pipefail
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
PATCH="$DIRNAME/upgradeable.patch"
error() {
echo Error: "$*" >&2
exit 1
}
if ! git diff-files --quiet ":!$PATCH" || ! git diff-index --quiet HEAD ":!$PATCH"; then
error "Repository must have no staged or unstaged changes"
fi
if ! git apply -3 "$PATCH"; then
error "Fix conflicts and run $DIRNAME/patch-save.sh"
fi

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -euo pipefail
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
PATCH="$DIRNAME/upgradeable.patch"
error() {
echo Error: "$*" >&2
exit 1
}
if ! git diff-files --quiet ":!$PATCH"; then
error "Unstaged changes. Stage to include in patch or temporarily stash."
fi
git diff-index --cached --patch --output="$PATCH" HEAD
git restore --staged --worktree ":!$PATCH"

@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -euo pipefail
if [ $# -lt 1 ]; then
echo "usage: bash $0 <target> [<base>]" >&2
exit 1
fi
set -x
target="$1"
base="${2-}"
bash scripts/upgradeable/transpile.sh
commit="$(git rev-parse --short HEAD)"
branch="$(git rev-parse --abbrev-ref HEAD)"
git add contracts
# detach from the current branch to avoid making changes to it
git checkout --quiet --detach
# switch to the target branch, creating it if necessary
if git rev-parse -q --verify "$target"; then
# if the branch exists, make it the current HEAD without checking out its contents
git reset --soft "$target"
git checkout "$target"
else
# if the branch doesn't exist, create it as an orphan and check it out
git checkout --orphan "$target"
if [ -n "$base" ] && git rev-parse -q --verify "$base"; then
# if base was specified and it exists, set it as the branch history
git reset --soft "$base"
fi
fi
# commit if there are changes to commit
if ! git diff --quiet --cached; then
git commit -m "Transpile $commit"
fi
git checkout "$branch"

@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -euo pipefail -x
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
bash "$DIRNAME/patch-apply.sh"
npm run clean
npm run compile
build_info=($(jq -r '.input.sources | keys | if any(test("^contracts/mocks/.*\\bunreachable\\b")) then empty else input_filename end' artifacts/build-info/*))
build_info_num=${#build_info[@]}
if [ $build_info_num -ne 1 ]; then
echo "found $build_info_num relevant build info files but expected just 1"
exit 1
fi
# -D: delete original and excluded files
# -b: use this build info file
# -i: use included Initializable
# -x: exclude proxy-related contracts with a few exceptions
# -p: emit public initializer
npx @openzeppelin/upgrade-safe-transpiler@latest -D \
-b "$build_info" \
-i contracts/proxy/utils/Initializable.sol \
-x 'contracts-exposed/**/*' \
-x 'contracts/proxy/**/*' \
-x '!contracts/proxy/Clones.sol' \
-x '!contracts/proxy/ERC1967/ERC1967Storage.sol' \
-x '!contracts/proxy/ERC1967/ERC1967Upgrade.sol' \
-x '!contracts/proxy/utils/UUPSUpgradeable.sol' \
-x '!contracts/proxy/beacon/IBeacon.sol' \
-p 'contracts/**/presets/**/*'

@ -0,0 +1,481 @@
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index 2797a088..00000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,21 +0,0 @@
----
-name: Bug report
-about: Report a bug in OpenZeppelin Contracts
-
----
-
-<!-- Briefly describe the issue you're experiencing. Tell us what you were trying to do and what happened instead. -->
-
-<!-- Remember, this is not a place to ask for help debugging code. For that, we welcome you in the OpenZeppelin Community Forum: https://forum.openzeppelin.com/. -->
-
-**💻 Environment**
-
-<!-- Tell us what version of OpenZeppelin Contracts you're using, and how you're using it: Truffle, Remix, etc. -->
-
-**📝 Details**
-
-<!-- Describe the problem you have been experiencing in more detail. Include as much information as you think is relevant. Keep in mind that transactions can fail for many reasons; context is key here. -->
-
-**🔢 Code to reproduce bug**
-
-<!-- We will be able to better help if you provide a minimal example that triggers the bug. -->
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 4018cef2..d343a53d 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,4 +1,8 @@
+blank_issues_enabled: false
contact_links:
+ - name: Bug Reports & Feature Requests
+ url: https://github.com/OpenZeppelin/openzeppelin-contracts/issues/new/choose
+ about: Visit the OpenZeppelin Contracts repository
- name: Questions & Support Requests
url: https://forum.openzeppelin.com/c/support/contracts/18
about: Ask in the OpenZeppelin Forum
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index ff596b0c..00000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,14 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for OpenZeppelin Contracts
-
----
-
-**🧐 Motivation**
-<!-- Is your feature request related to a specific problem? Is it just a crazy idea? Tell us about it! -->
-
-**📝 Details**
-<!-- Please describe your feature request in detail. -->
-
-<!-- Make sure that you have reviewed the OpenZeppelin Contracts Contributor Guidelines. -->
-<!-- https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/CONTRIBUTING.md -->
diff --git a/README.md b/README.md
index 9fc95518..53130e3c 100644
--- a/README.md
+++ b/README.md
@@ -16,17 +16,20 @@
:building_construction: **Want to scale your decentralized application?** Check out [OpenZeppelin Defender](https://openzeppelin.com/defender) — a secure platform for automating and monitoring your operations.
+> **Note**
+> You are looking at the upgradeable variant of OpenZeppelin Contracts. Be sure to review the documentation on [Using OpenZeppelin Contracts with Upgrades](https://docs.openzeppelin.com/contracts/4.x/upgradeable).
+
## Overview
### Installation
```
-$ npm install @openzeppelin/contracts
+$ npm install @openzeppelin/contracts-upgradeable
```
OpenZeppelin Contracts features a [stable API](https://docs.openzeppelin.com/contracts/releases-stability#api-stability), which means that your contracts won't break unexpectedly when upgrading to a newer minor version.
-An alternative to npm is to use the GitHub repository (`openzeppelin/openzeppelin-contracts`) to retrieve the contracts. When doing this, make sure to specify the tag for a release such as `v4.5.0`, instead of using the `master` branch.
+An alternative to npm is to use the GitHub repository (`openzeppelin/openzeppelin-contracts-upgradeable`) to retrieve the contracts. When doing this, make sure to specify the tag for a release such as `v4.5.0`, instead of using the `master` branch.
### Usage
@@ -35,10 +38,11 @@ Once installed, you can use the contracts in the library by importing them:
```solidity
pragma solidity ^0.8.0;
-import "@openzeppelin/contracts/token/ERC721/ERC721.sol";
+import "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol";
-contract MyCollectible is ERC721 {
- constructor() ERC721("MyCollectible", "MCO") {
+contract MyCollectible is ERC721Upgradeable {
+ function initialize() initializer public {
+ __ERC721_init("MyCollectible", "MCO");
}
}
```
diff --git a/contracts/finance/VestingWallet.sol b/contracts/finance/VestingWallet.sol
index fe67eb54..d26ea4e1 100644
--- a/contracts/finance/VestingWallet.sol
+++ b/contracts/finance/VestingWallet.sol
@@ -15,6 +15,8 @@ import "../utils/Context.sol";
* Any token transferred to this contract will follow the vesting schedule as if they were locked from the beginning.
* Consequently, if the vesting has already started, any amount of tokens sent to this contract will (at least partly)
* be immediately releasable.
+ *
+ * @custom:storage-size 52
*/
contract VestingWallet is Context {
event EtherReleased(uint256 amount);
diff --git a/contracts/governance/TimelockControllerWith46Migration.sol b/contracts/governance/TimelockControllerWith46Migration.sol
new file mode 100644
index 00000000..3315e7bd
--- /dev/null
+++ b/contracts/governance/TimelockControllerWith46Migration.sol
@@ -0,0 +1,39 @@
+// SPDX-License-Identifier: MIT
+// OpenZeppelin Contracts v4.6.0 (governance/TimelockControllerWith46Migration.sol)
+
+pragma solidity ^0.8.0;
+
+import "./TimelockController.sol";
+
+/**
+ * @dev Extension of the TimelockController that includes an additional
+ * function to migrate from OpenZeppelin Upgradeable Contracts <4.6 to >=4.6.
+ *
+ * This migration is necessary to setup administration rights over the new
+ * `CANCELLER_ROLE`.
+ *
+ * The migration is trustless and can be performed by anyone.
+ *
+ * _Available since v4.6._
+ */
+contract TimelockControllerWith46Migration is TimelockController {
+ constructor(
+ uint256 minDelay,
+ address[] memory proposers,
+ address[] memory executors,
+ address admin
+ ) TimelockController(minDelay, proposers, executors, admin) {}
+
+ /**
+ * @dev Migration function. Running it is necessary for upgradeable
+ * instances that were initially setup with code <4.6 and that upgraded
+ * to >=4.6.
+ */
+ function migrateTo46() public virtual {
+ require(
+ getRoleAdmin(PROPOSER_ROLE) == TIMELOCK_ADMIN_ROLE && getRoleAdmin(CANCELLER_ROLE) == DEFAULT_ADMIN_ROLE,
+ "TimelockController: already migrated"
+ );
+ _setRoleAdmin(CANCELLER_ROLE, TIMELOCK_ADMIN_ROLE);
+ }
+}
diff --git a/contracts/governance/extensions/GovernorVotes.sol b/contracts/governance/extensions/GovernorVotes.sol
index 64431711..885f0e42 100644
--- a/contracts/governance/extensions/GovernorVotes.sol
+++ b/contracts/governance/extensions/GovernorVotes.sol
@@ -10,6 +10,8 @@ import "../../interfaces/IERC5805.sol";
* @dev Extension of {Governor} for voting weight extraction from an {ERC20Votes} token, or since v4.5 an {ERC721Votes} token.
*
* _Available since v4.3._
+ *
+ * @custom:storage-size 51
*/
abstract contract GovernorVotes is Governor {
IERC5805 public immutable token;
diff --git a/contracts/governance/extensions/GovernorVotesComp.sol b/contracts/governance/extensions/GovernorVotesComp.sol
index 17250ad7..1d26b72e 100644
--- a/contracts/governance/extensions/GovernorVotesComp.sol
+++ b/contracts/governance/extensions/GovernorVotesComp.sol
@@ -10,6 +10,8 @@ import "../../token/ERC20/extensions/ERC20VotesComp.sol";
* @dev Extension of {Governor} for voting weight extraction from a Comp token.
*
* _Available since v4.3._
+ *
+ * @custom:storage-size 51
*/
abstract contract GovernorVotesComp is Governor {
ERC20VotesComp public immutable token;
diff --git a/contracts/package.json b/contracts/package.json
index 55e70b17..ceefb984 100644
--- a/contracts/package.json
+++ b/contracts/package.json
@@ -1,5 +1,5 @@
{
- "name": "@openzeppelin/contracts",
+ "name": "@openzeppelin/contracts-upgradeable",
"description": "Secure Smart Contract library for Solidity",
"version": "4.8.2",
"files": [
@@ -13,7 +13,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/OpenZeppelin/openzeppelin-contracts.git"
+ "url": "https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git"
},
"keywords": [
"solidity",
diff --git a/contracts/security/PullPayment.sol b/contracts/security/PullPayment.sol
index 65b4980f..f336592e 100644
--- a/contracts/security/PullPayment.sol
+++ b/contracts/security/PullPayment.sol
@@ -22,6 +22,8 @@ import "../utils/escrow/Escrow.sol";
* To use, derive from the `PullPayment` contract, and use {_asyncTransfer}
* instead of Solidity's `transfer` function. Payees can query their due
* payments with {payments}, and retrieve them with {withdrawPayments}.
+ *
+ * @custom:storage-size 51
*/
abstract contract PullPayment {
Escrow private immutable _escrow;
diff --git a/contracts/token/ERC20/extensions/ERC20Capped.sol b/contracts/token/ERC20/extensions/ERC20Capped.sol
index 16f830d1..9ef98148 100644
--- a/contracts/token/ERC20/extensions/ERC20Capped.sol
+++ b/contracts/token/ERC20/extensions/ERC20Capped.sol
@@ -7,6 +7,8 @@ import "../ERC20.sol";
/**
* @dev Extension of {ERC20} that adds a cap to the supply of tokens.
+ *
+ * @custom:storage-size 51
*/
abstract contract ERC20Capped is ERC20 {
uint256 private immutable _cap;
diff --git a/contracts/token/ERC20/extensions/ERC20Permit.sol b/contracts/token/ERC20/extensions/ERC20Permit.sol
index a357199b..9dc8e894 100644
--- a/contracts/token/ERC20/extensions/ERC20Permit.sol
+++ b/contracts/token/ERC20/extensions/ERC20Permit.sol
@@ -18,6 +18,8 @@ import "../../../utils/Counters.sol";
* need to send a transaction, and thus is not required to hold Ether at all.
*
* _Available since v3.4._
+ *
+ * @custom:storage-size 51
*/
abstract contract ERC20Permit is ERC20, IERC20Permit, EIP712 {
using Counters for Counters.Counter;
diff --git a/contracts/token/ERC20/extensions/ERC20Wrapper.sol b/contracts/token/ERC20/extensions/ERC20Wrapper.sol
index bfe782e4..7264fe32 100644
--- a/contracts/token/ERC20/extensions/ERC20Wrapper.sol
+++ b/contracts/token/ERC20/extensions/ERC20Wrapper.sol
@@ -14,6 +14,8 @@ import "../utils/SafeERC20.sol";
* wrapping of an existing "basic" ERC20 into a governance token.
*
* _Available since v4.2._
+ *
+ * @custom:storage-size 51
*/
abstract contract ERC20Wrapper is ERC20 {
IERC20 private immutable _underlying;
diff --git a/contracts/token/ERC20/utils/TokenTimelock.sol b/contracts/token/ERC20/utils/TokenTimelock.sol
index ed855b7b..3d30f59d 100644
--- a/contracts/token/ERC20/utils/TokenTimelock.sol
+++ b/contracts/token/ERC20/utils/TokenTimelock.sol
@@ -11,6 +11,8 @@ import "./SafeERC20.sol";
*
* Useful for simple vesting schedules like "advisors get all of their tokens
* after 1 year".
+ *
+ * @custom:storage-size 53
*/
contract TokenTimelock {
using SafeERC20 for IERC20;
diff --git a/contracts/utils/cryptography/EIP712.sol b/contracts/utils/cryptography/EIP712.sol
index 6a4e1cad..55d8eced 100644
--- a/contracts/utils/cryptography/EIP712.sol
+++ b/contracts/utils/cryptography/EIP712.sol
@@ -4,7 +4,6 @@
pragma solidity ^0.8.8;
import "./ECDSA.sol";
-import "../ShortStrings.sol";
import "../../interfaces/IERC5267.sol";
/**
@@ -30,27 +29,19 @@ import "../../interfaces/IERC5267.sol";
*
* _Available since v3.4._
*
- * @custom:oz-upgrades-unsafe-allow state-variable-immutable state-variable-assignment
+ * @custom:storage-size 52
*/
abstract contract EIP712 is IERC5267 {
- using ShortStrings for *;
-
bytes32 private constant _TYPE_HASH =
keccak256("EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)");
- // Cache the domain separator as an immutable value, but also store the chain id that it corresponds to, in order to
- // invalidate the cached domain separator if the chain id changes.
- bytes32 private immutable _cachedDomainSeparator;
- uint256 private immutable _cachedChainId;
- address private immutable _cachedThis;
-
+ /// @custom:oz-renamed-from _HASHED_NAME
bytes32 private immutable _hashedName;
+ /// @custom:oz-renamed-from _HASHED_VERSION
bytes32 private immutable _hashedVersion;
- ShortString private immutable _name;
- ShortString private immutable _version;
- string private _nameFallback;
- string private _versionFallback;
+ string private _name;
+ string private _version;
/**
* @dev Initializes the domain separator and parameter caches.
@@ -65,29 +56,23 @@ abstract contract EIP712 is IERC5267 {
* contract upgrade].
*/
constructor(string memory name, string memory version) {
- _name = name.toShortStringWithFallback(_nameFallback);
- _version = version.toShortStringWithFallback(_versionFallback);
- _hashedName = keccak256(bytes(name));
- _hashedVersion = keccak256(bytes(version));
-
- _cachedChainId = block.chainid;
- _cachedDomainSeparator = _buildDomainSeparator();
- _cachedThis = address(this);
+ _name = name;
+ _version = version;
+
+ // Reset prior values in storage if upgrading
+ _hashedName = 0;
+ _hashedVersion = 0;
}
/**
* @dev Returns the domain separator for the current chain.
*/
function _domainSeparatorV4() internal view returns (bytes32) {
- if (address(this) == _cachedThis && block.chainid == _cachedChainId) {
- return _cachedDomainSeparator;
- } else {
- return _buildDomainSeparator();
- }
+ return _buildDomainSeparator();
}
function _buildDomainSeparator() private view returns (bytes32) {
- return keccak256(abi.encode(_TYPE_HASH, _hashedName, _hashedVersion, block.chainid, address(this)));
+ return keccak256(abi.encode(_TYPE_HASH, _EIP712NameHash(), _EIP712VersionHash(), block.chainid, address(this)));
}
/**
@@ -129,14 +114,80 @@ abstract contract EIP712 is IERC5267 {
uint256[] memory extensions
)
{
+ // If the hashed name and version in storage are non-zero, the contract hasn't been properly initialized
+ // and the EIP712 domain is not reliable, as it will be missing name and version.
+ require(_hashedName == 0 && _hashedVersion == 0, "EIP712: Uninitialized");
+
return (
hex"0f", // 01111
- _name.toStringWithFallback(_nameFallback),
- _version.toStringWithFallback(_versionFallback),
+ _EIP712Name(),
+ _EIP712Version(),
block.chainid,
address(this),
bytes32(0),
new uint256[](0)
);
}
+
+ /**
+ * @dev The name parameter for the EIP712 domain.
+ *
+ * NOTE: This function reads from storage by default, but can be redefined to return a constant value if gas costs
+ * are a concern.
+ */
+ function _EIP712Name() internal virtual view returns (string memory) {
+ return _name;
+ }
+
+ /**
+ * @dev The version parameter for the EIP712 domain.
+ *
+ * NOTE: This function reads from storage by default, but can be redefined to return a constant value if gas costs
+ * are a concern.
+ */
+ function _EIP712Version() internal virtual view returns (string memory) {
+ return _version;
+ }
+
+ /**
+ * @dev The hash of the name parameter for the EIP712 domain.
+ *
+ * NOTE: In previous versions this function was virtual. In this version you should override `_EIP712Name` instead.
+ */
+ function _EIP712NameHash() internal view returns (bytes32) {
+ string memory name = _EIP712Name();
+ if (bytes(name).length > 0) {
+ return keccak256(bytes(name));
+ } else {
+ // If the name is empty, the contract may have been upgraded without initializing the new storage.
+ // We return the name hash in storage if non-zero, otherwise we assume the name is empty by design.
+ bytes32 hashedName = _hashedName;
+ if (hashedName != 0) {
+ return hashedName;
+ } else {
+ return keccak256("");
+ }
+ }
+ }
+
+ /**
+ * @dev The hash of the version parameter for the EIP712 domain.
+ *
+ * NOTE: In previous versions this function was virtual. In this version you should override `_EIP712Version` instead.
+ */
+ function _EIP712VersionHash() internal view returns (bytes32) {
+ string memory version = _EIP712Version();
+ if (bytes(version).length > 0) {
+ return keccak256(bytes(version));
+ } else {
+ // If the version is empty, the contract may have been upgraded without initializing the new storage.
+ // We return the version hash in storage if non-zero, otherwise we assume the version is empty by design.
+ bytes32 hashedVersion = _hashedVersion;
+ if (hashedVersion != 0) {
+ return hashedVersion;
+ } else {
+ return keccak256("");
+ }
+ }
+ }
}
diff --git a/package.json b/package.json
index 8458dd61..b4672240 100644
--- a/package.json
+++ b/package.json
@@ -36,7 +36,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/OpenZeppelin/openzeppelin-contracts.git"
+ "url": "https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git"
},
"keywords": [
"solidity",
diff --git a/test/utils/cryptography/EIP712.test.js b/test/utils/cryptography/EIP712.test.js
index 54a4e772..ba4602ed 100644
--- a/test/utils/cryptography/EIP712.test.js
+++ b/test/utils/cryptography/EIP712.test.js
@@ -47,26 +47,6 @@ contract('EIP712', function (accounts) {
const rebuildDomain = await getDomain(this.eip712);
expect(mapValues(rebuildDomain, String)).to.be.deep.equal(mapValues(this.domain, String));
});
-
- if (shortOrLong === 'short') {
- // Long strings are in storage, and the proxy will not be properly initialized unless
- // the upgradeable contract variant is used and the initializer is invoked.
-
- it('adjusts when behind proxy', async function () {
- const factory = await Clones.new();
- const cloneReceipt = await factory.$clone(this.eip712.address);
- const cloneAddress = cloneReceipt.logs.find(({ event }) => event === 'return$clone').args.instance;
- const clone = new EIP712Verifier(cloneAddress);
-
- const cloneDomain = { ...this.domain, verifyingContract: clone.address };
-
- const reportedDomain = await getDomain(clone);
- expect(mapValues(reportedDomain, String)).to.be.deep.equal(mapValues(cloneDomain, String));
-
- const expectedSeparator = await domainSeparator(cloneDomain);
- expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
- });
- }
});
it('hash digest', async function () {

@ -267,7 +267,7 @@ function shouldBehaveLikeAccessControlDefaultAdminRules(errorPrefix, delay, defa
[0, 'exactly when'],
[1, 'after'],
]) {
it(`returns pending admin and delay ${tag} delay schedule passes if not accepted`, async function () {
it(`returns pending admin and schedule ${tag} it passes if not accepted`, async function () {
// Wait until schedule + fromSchedule
const { schedule: firstSchedule } = await this.accessControl.pendingDefaultAdmin();
await time.setNextBlockTimestamp(firstSchedule.toNumber() + fromSchedule);
@ -279,7 +279,7 @@ function shouldBehaveLikeAccessControlDefaultAdminRules(errorPrefix, delay, defa
});
}
it('returns 0 after delay schedule passes and the transfer was accepted', async function () {
it('returns 0 after schedule passes and the transfer was accepted', async function () {
// Wait after schedule
const { schedule: firstSchedule } = await this.accessControl.pendingDefaultAdmin();
await time.setNextBlockTimestamp(firstSchedule.addn(1));
@ -621,14 +621,15 @@ function shouldBehaveLikeAccessControlDefaultAdminRules(errorPrefix, delay, defa
});
describe('renounces admin', function () {
let expectedSchedule;
let delayPassed;
let delayNotPassed;
beforeEach(async function () {
await this.accessControl.beginDefaultAdminTransfer(constants.ZERO_ADDRESS, { from: defaultAdmin });
delayPassed = web3.utils
.toBN(await time.latest())
.add(delay)
.addn(1);
expectedSchedule = web3.utils.toBN(await time.latest()).add(delay);
delayNotPassed = expectedSchedule;
delayPassed = expectedSchedule.addn(1);
});
it('reverts if caller is not default admin', async function () {
@ -639,6 +640,15 @@ function shouldBehaveLikeAccessControlDefaultAdminRules(errorPrefix, delay, defa
);
});
it("renouncing the admin role when not an admin doesn't affect the schedule", async function () {
await time.setNextBlockTimestamp(delayPassed);
await this.accessControl.renounceRole(DEFAULT_ADMIN_ROLE, other, { from: other });
const { newAdmin, schedule } = await this.accessControl.pendingDefaultAdmin();
expect(newAdmin).to.equal(constants.ZERO_ADDRESS);
expect(schedule).to.be.bignumber.equal(expectedSchedule);
});
it('keeps defaultAdmin consistent with hasRole if another non-defaultAdmin user renounces the DEFAULT_ADMIN_ROLE', async function () {
await time.setNextBlockTimestamp(delayPassed);
@ -660,6 +670,9 @@ function shouldBehaveLikeAccessControlDefaultAdminRules(errorPrefix, delay, defa
account: defaultAdmin,
});
expect(await this.accessControl.owner()).to.equal(constants.ZERO_ADDRESS);
const { newAdmin, schedule } = await this.accessControl.pendingDefaultAdmin();
expect(newAdmin).to.eq(ZERO_ADDRESS);
expect(schedule).to.be.bignumber.eq(ZERO);
});
it('allows to recover access using the internal _grantRole', async function () {
@ -674,12 +687,6 @@ function shouldBehaveLikeAccessControlDefaultAdminRules(errorPrefix, delay, defa
});
describe('schedule not passed', function () {
let delayNotPassed;
beforeEach(function () {
delayNotPassed = delayPassed.subn(1);
});
for (const [fromSchedule, tag] of [
[-1, 'less'],
[0, 'equal'],

@ -13,6 +13,7 @@ describe('migrate-imports.js', function () {
try {
await fs.access(path.join('contracts', p), F_OK);
} catch (e) {
if (p.startsWith('proxy/')) continue; // excluded from transpilation of upgradeable contracts
await fs.access(path.join('contracts', getUpgradeablePath(p)), F_OK);
}
}

@ -6,28 +6,48 @@ const { batchInBlock } = require('../helpers/txpool');
const $Checkpoints = artifacts.require('$Checkpoints');
// The library name may be 'Checkpoints' or 'CheckpointsUpgradeable'
const libraryName = $Checkpoints._json.contractName.replace(/^\$/, '');
const traceLengths = [160, 224];
const first = array => (array.length ? array[0] : undefined);
const last = array => (array.length ? array[array.length - 1] : undefined);
contract('Checkpoints', function () {
beforeEach(async function () {
this.mock = await $Checkpoints.new();
this.methods = { trace: {} };
this.methods.history = {
latest: (...args) => this.mock.methods[`$latest_${libraryName}_History(uint256)`](0, ...args),
latestCheckpoint: (...args) => this.mock.methods[`$latestCheckpoint_${libraryName}_History(uint256)`](0, ...args),
length: (...args) => this.mock.methods[`$length_${libraryName}_History(uint256)`](0, ...args),
push: (...args) => this.mock.methods['$push(uint256,uint256)'](0, ...args),
getAtBlock: (...args) => this.mock.$getAtBlock(0, ...args),
getAtRecentBlock: (...args) => this.mock.$getAtProbablyRecentBlock(0, ...args),
};
for (const length of traceLengths) {
this.methods.trace[length] = {
latest: (...args) => this.mock.methods[`$latest_${libraryName}_Trace${length}(uint256)`](0, ...args),
latestCheckpoint: (...args) =>
this.mock.methods[`$latestCheckpoint_${libraryName}_Trace${length}(uint256)`](0, ...args),
length: (...args) => this.mock.methods[`$length_${libraryName}_Trace${length}(uint256)`](0, ...args),
push: (...args) => this.mock.methods[`$push(uint256,uint${256 - length},uint${length})`](0, ...args),
lowerLookup: (...args) => this.mock.methods[`$lowerLookup(uint256,uint${256 - length})`](0, ...args),
upperLookup: (...args) => this.mock.methods[`$upperLookup(uint256,uint${256 - length})`](0, ...args),
upperLookupRecent: (...args) =>
this.mock.methods[`$upperLookupRecent(uint256,uint${256 - length})`](0, ...args),
};
}
});
describe('History checkpoints', function () {
const latest = (self, ...args) => self.methods['$latest_Checkpoints_History(uint256)'](0, ...args);
const latestCheckpoint = (self, ...args) =>
self.methods['$latestCheckpoint_Checkpoints_History(uint256)'](0, ...args);
const push = (self, ...args) => self.methods['$push(uint256,uint256)'](0, ...args);
const getAtBlock = (self, ...args) => self.methods['$getAtBlock(uint256,uint256)'](0, ...args);
const getAtRecentBlock = (self, ...args) => self.methods['$getAtProbablyRecentBlock(uint256,uint256)'](0, ...args);
const getLength = (self, ...args) => self.methods['$length_Checkpoints_History(uint256)'](0, ...args);
describe('without checkpoints', function () {
it('returns zero as latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal('0');
expect(await this.methods.history.latest()).to.be.bignumber.equal('0');
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.history.latestCheckpoint();
expect(ckpt[0]).to.be.equal(false);
expect(ckpt[1]).to.be.bignumber.equal('0');
expect(ckpt[2]).to.be.bignumber.equal('0');
@ -35,49 +55,63 @@ contract('Checkpoints', function () {
it('returns zero as past value', async function () {
await time.advanceBlock();
expect(await getAtBlock(this.mock, (await web3.eth.getBlockNumber()) - 1)).to.be.bignumber.equal('0');
expect(await getAtRecentBlock(this.mock, (await web3.eth.getBlockNumber()) - 1)).to.be.bignumber.equal('0');
expect(await this.methods.history.getAtBlock((await web3.eth.getBlockNumber()) - 1)).to.be.bignumber.equal('0');
expect(
await this.methods.history.getAtRecentBlock((await web3.eth.getBlockNumber()) - 1),
).to.be.bignumber.equal('0');
});
});
describe('with checkpoints', function () {
beforeEach('pushing checkpoints', async function () {
this.tx1 = await push(this.mock, 1);
this.tx2 = await push(this.mock, 2);
this.tx1 = await this.methods.history.push(1);
this.tx2 = await this.methods.history.push(2);
await time.advanceBlock();
this.tx3 = await push(this.mock, 3);
this.tx3 = await this.methods.history.push(3);
await time.advanceBlock();
await time.advanceBlock();
});
it('returns latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal('3');
expect(await this.methods.history.latest()).to.be.bignumber.equal('3');
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.history.latestCheckpoint();
expect(ckpt[0]).to.be.equal(true);
expect(ckpt[1]).to.be.bignumber.equal(web3.utils.toBN(this.tx3.receipt.blockNumber));
expect(ckpt[2]).to.be.bignumber.equal(web3.utils.toBN('3'));
});
for (const getAtBlockVariant of [getAtBlock, getAtRecentBlock]) {
for (const getAtBlockVariant of ['getAtBlock', 'getAtRecentBlock']) {
describe(`lookup: ${getAtBlockVariant}`, function () {
it('returns past values', async function () {
expect(await getAtBlockVariant(this.mock, this.tx1.receipt.blockNumber - 1)).to.be.bignumber.equal('0');
expect(await getAtBlockVariant(this.mock, this.tx1.receipt.blockNumber)).to.be.bignumber.equal('1');
expect(await getAtBlockVariant(this.mock, this.tx2.receipt.blockNumber)).to.be.bignumber.equal('2');
expect(
await this.methods.history[getAtBlockVariant](this.tx1.receipt.blockNumber - 1),
).to.be.bignumber.equal('0');
expect(await this.methods.history[getAtBlockVariant](this.tx1.receipt.blockNumber)).to.be.bignumber.equal(
'1',
);
expect(await this.methods.history[getAtBlockVariant](this.tx2.receipt.blockNumber)).to.be.bignumber.equal(
'2',
);
// Block with no new checkpoints
expect(await getAtBlockVariant(this.mock, this.tx2.receipt.blockNumber + 1)).to.be.bignumber.equal('2');
expect(await getAtBlockVariant(this.mock, this.tx3.receipt.blockNumber)).to.be.bignumber.equal('3');
expect(await getAtBlockVariant(this.mock, this.tx3.receipt.blockNumber + 1)).to.be.bignumber.equal('3');
expect(
await this.methods.history[getAtBlockVariant](this.tx2.receipt.blockNumber + 1),
).to.be.bignumber.equal('2');
expect(await this.methods.history[getAtBlockVariant](this.tx3.receipt.blockNumber)).to.be.bignumber.equal(
'3',
);
expect(
await this.methods.history[getAtBlockVariant](this.tx3.receipt.blockNumber + 1),
).to.be.bignumber.equal('3');
});
it('reverts if block number >= current block', async function () {
await expectRevert(
getAtBlockVariant(this.mock, await web3.eth.getBlockNumber()),
this.methods.history[getAtBlockVariant](await web3.eth.getBlockNumber()),
'Checkpoints: block not yet mined',
);
await expectRevert(
getAtBlockVariant(this.mock, (await web3.eth.getBlockNumber()) + 1),
this.methods.history[getAtBlockVariant]((await web3.eth.getBlockNumber()) + 1),
'Checkpoints: block not yet mined',
);
});
@ -85,58 +119,48 @@ contract('Checkpoints', function () {
}
it('multiple checkpoints in the same block', async function () {
const lengthBefore = await getLength(this.mock);
const lengthBefore = await this.methods.history.length();
await batchInBlock([
() => push(this.mock, 8, { gas: 100000 }),
() => push(this.mock, 9, { gas: 100000 }),
() => push(this.mock, 10, { gas: 100000 }),
() => this.methods.history.push(8, { gas: 100000 }),
() => this.methods.history.push(9, { gas: 100000 }),
() => this.methods.history.push(10, { gas: 100000 }),
]);
expect(await getLength(this.mock)).to.be.bignumber.equal(lengthBefore.addn(1));
expect(await latest(this.mock)).to.be.bignumber.equal('10');
expect(await this.methods.history.length()).to.be.bignumber.equal(lengthBefore.addn(1));
expect(await this.methods.history.latest()).to.be.bignumber.equal('10');
});
it('more than 5 checkpoints', async function () {
for (let i = 4; i <= 6; i++) {
await push(this.mock, i);
await this.methods.history.push(i);
}
expect(await getLength(this.mock)).to.be.bignumber.equal('6');
expect(await this.methods.history.length()).to.be.bignumber.equal('6');
const block = await web3.eth.getBlockNumber();
// recent
expect(await getAtRecentBlock(this.mock, block - 1)).to.be.bignumber.equal('5');
expect(await this.methods.history.getAtRecentBlock(block - 1)).to.be.bignumber.equal('5');
// non-recent
expect(await getAtRecentBlock(this.mock, block - 9)).to.be.bignumber.equal('0');
expect(await this.methods.history.getAtRecentBlock(block - 9)).to.be.bignumber.equal('0');
});
});
});
for (const length of [160, 224]) {
for (const length of traceLengths) {
describe(`Trace${length}`, function () {
const latest = (self, ...args) => self.methods[`$latest_Checkpoints_Trace${length}(uint256)`](0, ...args);
const latestCheckpoint = (self, ...args) =>
self.methods[`$latestCheckpoint_Checkpoints_Trace${length}(uint256)`](0, ...args);
const push = (self, ...args) => self.methods[`$push(uint256,uint${256 - length},uint${length})`](0, ...args);
const lowerLookup = (self, ...args) => self.methods[`$lowerLookup(uint256,uint${256 - length})`](0, ...args);
const upperLookup = (self, ...args) => self.methods[`$upperLookup(uint256,uint${256 - length})`](0, ...args);
const upperLookupRecent = (self, ...args) =>
self.methods[`$upperLookupRecent(uint256,uint${256 - length})`](0, ...args);
const getLength = (self, ...args) => self.methods[`$length_Checkpoints_Trace${length}(uint256)`](0, ...args);
describe('without checkpoints', function () {
it('returns zero as latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].latest()).to.be.bignumber.equal('0');
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.trace[length].latestCheckpoint();
expect(ckpt[0]).to.be.equal(false);
expect(ckpt[1]).to.be.bignumber.equal('0');
expect(ckpt[2]).to.be.bignumber.equal('0');
});
it('lookup returns 0', async function () {
expect(await lowerLookup(this.mock, 0)).to.be.bignumber.equal('0');
expect(await upperLookup(this.mock, 0)).to.be.bignumber.equal('0');
expect(await upperLookupRecent(this.mock, 0)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].lowerLookup(0)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].upperLookup(0)).to.be.bignumber.equal('0');
expect(await this.methods.trace[length].upperLookupRecent(0)).to.be.bignumber.equal('0');
});
});
@ -150,46 +174,49 @@ contract('Checkpoints', function () {
{ key: '11', value: '99' },
];
for (const { key, value } of this.checkpoints) {
await push(this.mock, key, value);
await this.methods.trace[length].push(key, value);
}
});
it('length', async function () {
expect(await getLength(this.mock)).to.be.bignumber.equal(this.checkpoints.length.toString());
expect(await this.methods.trace[length].length()).to.be.bignumber.equal(this.checkpoints.length.toString());
});
it('returns latest value', async function () {
expect(await latest(this.mock)).to.be.bignumber.equal(last(this.checkpoints).value);
expect(await this.methods.trace[length].latest()).to.be.bignumber.equal(last(this.checkpoints).value);
const ckpt = await latestCheckpoint(this.mock);
const ckpt = await this.methods.trace[length].latestCheckpoint();
expect(ckpt[0]).to.be.equal(true);
expect(ckpt[1]).to.be.bignumber.equal(last(this.checkpoints).key);
expect(ckpt[2]).to.be.bignumber.equal(last(this.checkpoints).value);
});
it('cannot push values in the past', async function () {
await expectRevert(push(this.mock, last(this.checkpoints).key - 1, '0'), 'Checkpoint: decreasing keys');
await expectRevert(
this.methods.trace[length].push(last(this.checkpoints).key - 1, '0'),
'Checkpoint: decreasing keys',
);
});
it('can update last value', async function () {
const newValue = '42';
// check length before the update
expect(await getLength(this.mock)).to.be.bignumber.equal(this.checkpoints.length.toString());
expect(await this.methods.trace[length].length()).to.be.bignumber.equal(this.checkpoints.length.toString());
// update last key
await push(this.mock, last(this.checkpoints).key, newValue);
expect(await latest(this.mock)).to.be.bignumber.equal(newValue);
await this.methods.trace[length].push(last(this.checkpoints).key, newValue);
expect(await this.methods.trace[length].latest()).to.be.bignumber.equal(newValue);
// check that length did not change
expect(await getLength(this.mock)).to.be.bignumber.equal(this.checkpoints.length.toString());
expect(await this.methods.trace[length].length()).to.be.bignumber.equal(this.checkpoints.length.toString());
});
it('lower lookup', async function () {
for (let i = 0; i < 14; ++i) {
const value = first(this.checkpoints.filter(x => i <= x.key))?.value || '0';
expect(await lowerLookup(this.mock, i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].lowerLookup(i)).to.be.bignumber.equal(value);
}
});
@ -197,8 +224,8 @@ contract('Checkpoints', function () {
for (let i = 0; i < 14; ++i) {
const value = last(this.checkpoints.filter(x => i >= x.key))?.value || '0';
expect(await upperLookup(this.mock, i)).to.be.bignumber.equal(value);
expect(await upperLookupRecent(this.mock, i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookup(i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookupRecent(i)).to.be.bignumber.equal(value);
}
});
@ -213,13 +240,13 @@ contract('Checkpoints', function () {
const allCheckpoints = [].concat(this.checkpoints, moreCheckpoints);
for (const { key, value } of moreCheckpoints) {
await push(this.mock, key, value);
await this.methods.trace[length].push(key, value);
}
for (let i = 0; i < 25; ++i) {
const value = last(allCheckpoints.filter(x => i >= x.key))?.value || '0';
expect(await upperLookup(this.mock, i)).to.be.bignumber.equal(value);
expect(await upperLookupRecent(this.mock, i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookup(i)).to.be.bignumber.equal(value);
expect(await this.methods.trace[length].upperLookupRecent(i)).to.be.bignumber.equal(value);
}
});
});

@ -60,11 +60,11 @@ contract('EIP712', function (accounts) {
const cloneDomain = { ...this.domain, verifyingContract: clone.address };
const expectedSeparator = await domainSeparator(cloneDomain);
expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
const reportedDomain = await getDomain(clone);
expect(mapValues(reportedDomain, String)).to.be.deep.equal(mapValues(cloneDomain, String));
const expectedSeparator = await domainSeparator(cloneDomain);
expect(await clone.$_domainSeparatorV4()).to.equal(expectedSeparator);
});
}
});

@ -14,6 +14,9 @@ const getMethods = ms => {
);
};
// Get the name of the library. In the transpiled code it will be EnumerableMapUpgradeable.
const library = EnumerableMap._json.contractName.replace(/^\$/, '');
contract('EnumerableMap', function (accounts) {
const [accountA, accountB, accountC] = accounts;
@ -41,14 +44,14 @@ contract('EnumerableMap', function (accounts) {
getWithMessage: '$get(uint256,address,string)',
tryGet: '$tryGet(uint256,address)',
remove: '$remove(uint256,address)',
length: '$length_EnumerableMap_AddressToUintMap(uint256)',
at: '$at_EnumerableMap_AddressToUintMap(uint256,uint256)',
length: `$length_${library}_AddressToUintMap(uint256)`,
at: `$at_${library}_AddressToUintMap(uint256,uint256)`,
contains: '$contains(uint256,address)',
keys: '$keys_EnumerableMap_AddressToUintMap(uint256)',
keys: `$keys_${library}_AddressToUintMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_AddressToUintMap_address_uint256',
removeReturn: 'return$remove_EnumerableMap_AddressToUintMap_address',
setReturn: `return$set_${library}_AddressToUintMap_address_uint256`,
removeReturn: `return$remove_${library}_AddressToUintMap_address`,
},
);
});
@ -61,18 +64,18 @@ contract('EnumerableMap', function (accounts) {
constants.ZERO_ADDRESS,
getMethods({
set: '$set(uint256,uint256,address)',
get: '$get_EnumerableMap_UintToAddressMap(uint256,uint256)',
getWithMessage: '$get_EnumerableMap_UintToAddressMap(uint256,uint256,string)',
tryGet: '$tryGet_EnumerableMap_UintToAddressMap(uint256,uint256)',
remove: '$remove_EnumerableMap_UintToAddressMap(uint256,uint256)',
length: '$length_EnumerableMap_UintToAddressMap(uint256)',
at: '$at_EnumerableMap_UintToAddressMap(uint256,uint256)',
contains: '$contains_EnumerableMap_UintToAddressMap(uint256,uint256)',
keys: '$keys_EnumerableMap_UintToAddressMap(uint256)',
get: `$get_${library}_UintToAddressMap(uint256,uint256)`,
getWithMessage: `$get_${library}_UintToAddressMap(uint256,uint256,string)`,
tryGet: `$tryGet_${library}_UintToAddressMap(uint256,uint256)`,
remove: `$remove_${library}_UintToAddressMap(uint256,uint256)`,
length: `$length_${library}_UintToAddressMap(uint256)`,
at: `$at_${library}_UintToAddressMap(uint256,uint256)`,
contains: `$contains_${library}_UintToAddressMap(uint256,uint256)`,
keys: `$keys_${library}_UintToAddressMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_UintToAddressMap_uint256_address',
removeReturn: 'return$remove_EnumerableMap_UintToAddressMap_uint256',
setReturn: `return$set_${library}_UintToAddressMap_uint256_address`,
removeReturn: `return$remove_${library}_UintToAddressMap_uint256`,
},
);
});
@ -85,18 +88,18 @@ contract('EnumerableMap', function (accounts) {
constants.ZERO_BYTES32,
getMethods({
set: '$set(uint256,bytes32,bytes32)',
get: '$get_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
getWithMessage: '$get_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32,string)',
tryGet: '$tryGet_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
remove: '$remove_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
length: '$length_EnumerableMap_Bytes32ToBytes32Map(uint256)',
at: '$at_EnumerableMap_Bytes32ToBytes32Map(uint256,uint256)',
contains: '$contains_EnumerableMap_Bytes32ToBytes32Map(uint256,bytes32)',
keys: '$keys_EnumerableMap_Bytes32ToBytes32Map(uint256)',
get: `$get_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
getWithMessage: `$get_${library}_Bytes32ToBytes32Map(uint256,bytes32,string)`,
tryGet: `$tryGet_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
remove: `$remove_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
length: `$length_${library}_Bytes32ToBytes32Map(uint256)`,
at: `$at_${library}_Bytes32ToBytes32Map(uint256,uint256)`,
contains: `$contains_${library}_Bytes32ToBytes32Map(uint256,bytes32)`,
keys: `$keys_${library}_Bytes32ToBytes32Map(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_Bytes32ToBytes32Map_bytes32_bytes32',
removeReturn: 'return$remove_EnumerableMap_Bytes32ToBytes32Map_bytes32',
setReturn: `return$set_${library}_Bytes32ToBytes32Map_bytes32_bytes32`,
removeReturn: `return$remove_${library}_Bytes32ToBytes32Map_bytes32`,
},
);
});
@ -109,18 +112,18 @@ contract('EnumerableMap', function (accounts) {
new BN('0'),
getMethods({
set: '$set(uint256,uint256,uint256)',
get: '$get_EnumerableMap_UintToUintMap(uint256,uint256)',
getWithMessage: '$get_EnumerableMap_UintToUintMap(uint256,uint256,string)',
tryGet: '$tryGet_EnumerableMap_UintToUintMap(uint256,uint256)',
remove: '$remove_EnumerableMap_UintToUintMap(uint256,uint256)',
length: '$length_EnumerableMap_UintToUintMap(uint256)',
at: '$at_EnumerableMap_UintToUintMap(uint256,uint256)',
contains: '$contains_EnumerableMap_UintToUintMap(uint256,uint256)',
keys: '$keys_EnumerableMap_UintToUintMap(uint256)',
get: `$get_${library}_UintToUintMap(uint256,uint256)`,
getWithMessage: `$get_${library}_UintToUintMap(uint256,uint256,string)`,
tryGet: `$tryGet_${library}_UintToUintMap(uint256,uint256)`,
remove: `$remove_${library}_UintToUintMap(uint256,uint256)`,
length: `$length_${library}_UintToUintMap(uint256)`,
at: `$at_${library}_UintToUintMap(uint256,uint256)`,
contains: `$contains_${library}_UintToUintMap(uint256,uint256)`,
keys: `$keys_${library}_UintToUintMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_UintToUintMap_uint256_uint256',
removeReturn: 'return$remove_EnumerableMap_UintToUintMap_uint256',
setReturn: `return$set_${library}_UintToUintMap_uint256_uint256`,
removeReturn: `return$remove_${library}_UintToUintMap_uint256`,
},
);
});
@ -133,18 +136,18 @@ contract('EnumerableMap', function (accounts) {
new BN('0'),
getMethods({
set: '$set(uint256,bytes32,uint256)',
get: '$get_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
getWithMessage: '$get_EnumerableMap_Bytes32ToUintMap(uint256,bytes32,string)',
tryGet: '$tryGet_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
remove: '$remove_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
length: '$length_EnumerableMap_Bytes32ToUintMap(uint256)',
at: '$at_EnumerableMap_Bytes32ToUintMap(uint256,uint256)',
contains: '$contains_EnumerableMap_Bytes32ToUintMap(uint256,bytes32)',
keys: '$keys_EnumerableMap_Bytes32ToUintMap(uint256)',
get: `$get_${library}_Bytes32ToUintMap(uint256,bytes32)`,
getWithMessage: `$get_${library}_Bytes32ToUintMap(uint256,bytes32,string)`,
tryGet: `$tryGet_${library}_Bytes32ToUintMap(uint256,bytes32)`,
remove: `$remove_${library}_Bytes32ToUintMap(uint256,bytes32)`,
length: `$length_${library}_Bytes32ToUintMap(uint256)`,
at: `$at_${library}_Bytes32ToUintMap(uint256,uint256)`,
contains: `$contains_${library}_Bytes32ToUintMap(uint256,bytes32)`,
keys: `$keys_${library}_Bytes32ToUintMap(uint256)`,
}),
{
setReturn: 'return$set_EnumerableMap_Bytes32ToUintMap_bytes32_uint256',
removeReturn: 'return$remove_EnumerableMap_Bytes32ToUintMap_bytes32',
setReturn: `return$set_${library}_Bytes32ToUintMap_bytes32_uint256`,
removeReturn: `return$remove_${library}_Bytes32ToUintMap_bytes32`,
},
);
});

@ -12,6 +12,9 @@ const getMethods = ms => {
);
};
// Get the name of the library. In the transpiled code it will be EnumerableSetUpgradeable.
const library = EnumerableSet._json.contractName.replace(/^\$/, '');
contract('EnumerableSet', function (accounts) {
beforeEach(async function () {
this.set = await EnumerableSet.new();
@ -25,13 +28,13 @@ contract('EnumerableSet', function (accounts) {
add: '$add(uint256,bytes32)',
remove: '$remove(uint256,bytes32)',
contains: '$contains(uint256,bytes32)',
length: '$length_EnumerableSet_Bytes32Set(uint256)',
at: '$at_EnumerableSet_Bytes32Set(uint256,uint256)',
values: '$values_EnumerableSet_Bytes32Set(uint256)',
length: `$length_${library}_Bytes32Set(uint256)`,
at: `$at_${library}_Bytes32Set(uint256,uint256)`,
values: `$values_${library}_Bytes32Set(uint256)`,
}),
{
addReturn: 'return$add_EnumerableSet_Bytes32Set_bytes32',
removeReturn: 'return$remove_EnumerableSet_Bytes32Set_bytes32',
addReturn: `return$add_${library}_Bytes32Set_bytes32`,
removeReturn: `return$remove_${library}_Bytes32Set_bytes32`,
},
);
});
@ -44,13 +47,13 @@ contract('EnumerableSet', function (accounts) {
add: '$add(uint256,address)',
remove: '$remove(uint256,address)',
contains: '$contains(uint256,address)',
length: '$length_EnumerableSet_AddressSet(uint256)',
at: '$at_EnumerableSet_AddressSet(uint256,uint256)',
values: '$values_EnumerableSet_AddressSet(uint256)',
length: `$length_${library}_AddressSet(uint256)`,
at: `$at_${library}_AddressSet(uint256,uint256)`,
values: `$values_${library}_AddressSet(uint256)`,
}),
{
addReturn: 'return$add_EnumerableSet_AddressSet_address',
removeReturn: 'return$remove_EnumerableSet_AddressSet_address',
addReturn: `return$add_${library}_AddressSet_address`,
removeReturn: `return$remove_${library}_AddressSet_address`,
},
);
});
@ -63,13 +66,13 @@ contract('EnumerableSet', function (accounts) {
add: '$add(uint256,uint256)',
remove: '$remove(uint256,uint256)',
contains: '$contains(uint256,uint256)',
length: '$length_EnumerableSet_UintSet(uint256)',
at: '$at_EnumerableSet_UintSet(uint256,uint256)',
values: '$values_EnumerableSet_UintSet(uint256)',
length: `$length_${library}_UintSet(uint256)`,
at: `$at_${library}_UintSet(uint256,uint256)`,
values: `$values_${library}_UintSet(uint256)`,
}),
{
addReturn: 'return$add_EnumerableSet_UintSet_uint256',
removeReturn: 'return$remove_EnumerableSet_UintSet_uint256',
addReturn: `return$add_${library}_UintSet_uint256`,
removeReturn: `return$remove_${library}_UintSet_uint256`,
},
);
});

Loading…
Cancel
Save