diff --git a/docs/modules/ROOT/pages/api-hardhat-upgrades.adoc b/docs/modules/ROOT/pages/api-hardhat-upgrades.adoc index 001a2fb18..dfc345909 100644 --- a/docs/modules/ROOT/pages/api-hardhat-upgrades.adoc +++ b/docs/modules/ROOT/pages/api-hardhat-upgrades.adoc @@ -10,8 +10,8 @@ Creates a proxy given an ethers contract factory to use as implementation, and r Options are: * `initializer`: set a different initializer function to call, or specify `false` to disable initialization -* `unsafeAllowCustomTypes`: allows a deployment where structs or enums are used in the implementation contract (required since xref:faq.adoc#what-does-it-mean-for-an-implementation-to-be-compatible[storage compatibility validations] do not handle custom types, so make sure the change you are introducing is safe) * `unsafeAllowLinkedLibraries`: allows a deployment with external libraries linked to the implementation contract (required since the plugins do not support xref:faq.adoc#why-cant-i-use-external-libraries[external libraries] yet) +* `unsafeAllowCustomTypes`: an override for a few situations where structs or enums cannot be automatically verified for safety [source,ts] ---- @@ -29,8 +29,8 @@ Upgrades a proxy at a specified address to a new implementation contract, and re Options are: -* `unsafeAllowCustomTypes`: allows an upgrade where structs or enums are used in the implementation contract (required since xref:faq.adoc#what-does-it-mean-for-an-implementation-to-be-compatible[storage compatibility validations] do not handle custom types, so make sure the change you are introducing is safe) * `unsafeAllowLinkedLibraries`: allows an upgrade with external libraries linked to the implementation contract (required since the plugins do not support xref:faq.adoc#why-cant-i-use-external-libraries[external libraries] yet) +* `unsafeAllowCustomTypes`: an override for a few situations where structs or enums cannot be automatically verified for safety [source,ts] ---- @@ -48,8 +48,8 @@ Validates and deploys a new implementation contract, and returns its address. Us Options are: -* `unsafeAllowCustomTypes`: allows an upgrade where structs or enums are used in the implementation contract (required since xref:faq.adoc#what-does-it-mean-for-an-implementation-to-be-compatible[storage compatibility validations] do not handle custom types, so make sure the change you are introducing is safe) * `unsafeAllowLinkedLibraries`: allows an upgrade with external libraries linked to the implementation contract (required since the plugins do not support xref:faq.adoc#why-cant-i-use-external-libraries[external libraries] yet) +* `unsafeAllowCustomTypes`: an override for a few situations where structs or enums cannot be automatically verified for safety [source,ts] ---- diff --git a/docs/modules/ROOT/pages/api-truffle-upgrades.adoc b/docs/modules/ROOT/pages/api-truffle-upgrades.adoc index 086ee9fbe..a3bc2ed38 100644 --- a/docs/modules/ROOT/pages/api-truffle-upgrades.adoc +++ b/docs/modules/ROOT/pages/api-truffle-upgrades.adoc @@ -13,8 +13,8 @@ Options for this function are: * `initializer`: set a different initializer function to call, or specify `false` to disable initialization * `deployer`: set as the Truffle migration deployer during migrations -* `unsafeAllowCustomTypes`: allows a deployment where structs or enums are used in the implementation contract (required since xref:faq.adoc#what-does-it-mean-for-an-implementation-to-be-compatible[storage compatibility validations] do not handle custom types, so make sure the change you are introducing is safe) * `unsafeAllowLinkedLibraries`: allows a deployment with external libraries linked to the implementation contract (required since the plugins do not support xref:faq.adoc#why-cant-i-use-external-libraries[external libraries] yet) +* `unsafeAllowCustomTypes`: an override for a few situations where structs or enums cannot be automatically verified for safety [source,ts] ---- @@ -33,8 +33,8 @@ Upgrades a proxy at a specified address to a new implementation contract, and re Options for this function are: * `deployer`: set as the Truffle migration deployer during migrations -* `unsafeAllowCustomTypes`: allows an upgrade where structs or enums are used in the implementation contract (required since (required since xref:faq.adoc#what-does-it-mean-for-an-implementation-to-be-compatible[storage compatibility validations] do not handle custom types, so make sure the change you are introducing is safe) * `unsafeAllowLinkedLibraries`: allows an upgrade with external libraries linked to the implementation contract (required since the plugins do not support xref:faq.adoc#why-cant-i-use-external-libraries[external libraries] yet) +* `unsafeAllowCustomTypes`: an override for a few situations where structs or enums cannot be automatically verified for safety [source,ts] ---- @@ -53,8 +53,8 @@ Validates and deploys a new implementation contract, and returns its address. Us Options are: * `deployer`: set as the Truffle migration deployer during migrations -* `unsafeAllowCustomTypes`: allows an upgrade where structs or enums are used in the implementation contract (required since (required since xref:faq.adoc#what-does-it-mean-for-an-implementation-to-be-compatible[storage compatibility validations] do not handle custom types, so make sure the change you are introducing is safe) * `unsafeAllowLinkedLibraries`: allows an upgrade with external libraries linked to the implementation contract (required since the plugins do not support xref:faq.adoc#why-cant-i-use-external-libraries[external libraries] yet) +* `unsafeAllowCustomTypes`: an override for a few situations where structs or enums cannot be automatically verified for safety [source,ts] ---- diff --git a/docs/modules/ROOT/pages/faq.adoc b/docs/modules/ROOT/pages/faq.adoc index 0db0a3bf1..918580651 100644 --- a/docs/modules/ROOT/pages/faq.adoc +++ b/docs/modules/ROOT/pages/faq.adoc @@ -41,7 +41,7 @@ Both plugins will validate that the contract you are trying to deploy complies w When upgrading a proxy from one implementation to another, the _storage layout_ of both implementations must be compatible. This means that, even though you can completely change the code of the implementation, you cannot modify the existing contract state variables. The only operation allowed is to append new state variables after the ones already declared. -Both plugins will validate that the new implementation contract is compatible with the previous one. However, the plugins currently do not support validating custom types (enums or structs). To force a deployment where custom types are involved, set the `unsafeAllowCustomTypes` flag to true in the `deployProxy` or `upgradeProxy` calls. +Both plugins will validate that the new implementation contract is compatible with the previous one. You can read more about how to make storage-compatible changes to an implementation contract xref:upgrades::writing-upgradeable.adoc#modifying-your-contracts.adoc[here]. @@ -86,13 +86,11 @@ In the meantime you can deploy upgradeable contracts linked to external librarie You can follow or contribute to https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/52[this issue in Github]. [[why-cant-i-use-custom-types]] -== Why can't I use custom types like structs and enums? +== Can I use custom types like structs and enums? -At the moment the plugins do not support upgradeable contracts that implement or make use of custom types like structs or enums in their code or linked libraries. This is because of the additional complexity of checking for storage layout incompatibilities during an upgrade. (See <>.) +Past versions of the plugins did not support upgradeable contracts that used custom types like structs or enums in their code or linked libraries. This is no longer the case for current versions of the plugins, and structs and enums will be automatically checked for compatibility when upgrading a contract. -In the meantime, we encourage users to either avoid using these kind of types or to manually check for xref:upgrades::writing-upgradeable.adoc#modifying-your-contracts[storage incompatibilities] and make use of the `unsafeAllowCustomTypes` flag available for the `deployProxy`, `upgradeProxy` and `prepareUpgrade` functions. If you're unsure about how to do this manual check, we'll be happy to help out with your situation if you https://forum.openzeppelin.com[post in the forum]. - -You can follow or contribute to https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/95[this issue in Github]. +Some users who have already deployed proxies with structs and/or enums and who need to upgrade those proxies may need to use the override flag `unsafeAllowCustomTypes` for their next upgrade, after which it will no longer be necessary. If the project contains the source code for the implementation currently in use by the proxy, the plugin will attempt to recover the metadata that it needs before the upgrade, falling back to the override flag if this is not possible. [[why-do-i-have-to-recompile-all-contracts-for-truffle]] == Why do I have to recompile all contracts for Truffle? diff --git a/packages/core/ava.config.js b/packages/core/ava.config.js index bd228c14d..e7fef5251 100644 --- a/packages/core/ava.config.js +++ b/packages/core/ava.config.js @@ -2,4 +2,7 @@ export default { verbose: true, ignoredByWatcher: ['**/*.{ts,map,tsbuildinfo}', 'artifacts', 'cache'], typescript: { rewritePaths: { 'src/': 'dist/' } }, + environmentVariables: { + FORCE_COLOR: '0', + }, }; diff --git a/packages/core/contracts/test/ManifestMigrate.sol b/packages/core/contracts/test/ManifestMigrate.sol new file mode 100644 index 000000000..f97ce961e --- /dev/null +++ b/packages/core/contracts/test/ManifestMigrate.sol @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.6.8; + +contract ManifestMigrateLayout { + struct T { + bool b; + } + + struct S { + uint x; + string s; + T t; + } + + enum E { + E1, + E2 + } + + uint x1; + T t1; + S s1; + string s2; + bool b1; + E e1; +} + +contract ManifestMigrateUnique is ManifestMigrateLayout { + function useAll() external { + x1 = 5; + t1 = T({ b: true }); + s1 = S({ x: 6, s: "ok", t: t1 }); + s2 = "no"; + b1 = false; + e1 = E.E2; + } +} + +contract ManifestMigrateUnambiguous0 is ManifestMigrateLayout { + function useAll() external { + x1 = 10; + t1 = T({ b: false }); + s1 = S({ x: 6, s: "string", t: t1 }); + s2 = "ok"; + b1 = false; + e1 = E.E1; + } +} + +// These two are expected to have the same bytecode modulo metadata. +contract ManifestMigrateUnambiguous1 is ManifestMigrateUnambiguous0 { } +contract ManifestMigrateUnambiguous2 is ManifestMigrateUnambiguous0 { } + +// These two are expected to have the same bytecode modulo metadata and similar +// layout, but different types (see struct D members); +contract ManifestMigrateAmbiguous1 is ManifestMigrateUnique { + struct D { + uint w; + } + D d1; + function test() external { + d1.w += 1; + } +} +contract ManifestMigrateAmbiguous2 is ManifestMigrateUnique { + struct D { + uint z; + } + D d1; + function test() external { + d1.z += 1; + } +} diff --git a/packages/core/contracts/test/Storage.sol b/packages/core/contracts/test/Storage.sol index d8ea08a7a..bb1c5ffc0 100644 --- a/packages/core/contracts/test/Storage.sol +++ b/packages/core/contracts/test/Storage.sol @@ -128,3 +128,234 @@ contract StorageUpgrade_Delete_V1 { contract StorageUpgrade_Delete_V2 { address x2; } + +contract StorageUpgrade_Struct_V1 { + struct StructInner { + uint y; + } + struct Struct1 { + uint x; + string s1; + string s2; + StructInner inner; + } + Struct1 data1; + Struct1 data2; + mapping (uint => Struct1) m; + Struct1[10] a1; + Struct1[10] a2; + Struct1 data3; +} + +contract StorageUpgrade_Struct_V2_Ok { + struct StructInner { + uint y; + } + struct Struct2 { + uint x; + string s1; + string s2; + StructInner inner; + } + struct Struct2Plus { + uint x; + string s1; + string s2; + StructInner inner; + uint z; + } + Struct2 data1; + Struct2 data2; + mapping (uint => Struct2Plus) m; + Struct2[10] a1; + Struct2[10] a2; + Struct2 data3; +} + +contract StorageUpgrade_Struct_V2_Bad { + struct StructInner { + uint y; + } + struct StructInnerPlus { + uint y; + uint z; + } + struct Struct2Minus { + uint x; + string s2; + StructInner inner; + } + struct Struct2Plus { + uint x; + string s1; + string s2; + StructInner inner; + uint z; + } + struct Struct2Changed { + string x; + uint s1; + string s2; + StructInnerPlus inner; + } + Struct2Minus data1; + Struct2Plus data2; + mapping (uint => Struct2Minus) m; + Struct2Minus[10] a1; + Struct2Plus[10] a2; + Struct2Changed data3; +} + +contract StorageUpgrade_Enum_V1 { + enum Enum1 { + A, + B + } + Enum1 data1; + Enum1 data2; + Enum1 data3; + Enum1 data4; +} + +contract StorageUpgrade_Enum_V2_Ok { + enum Enum2 { + A, + B + } + enum Enum2Larger { + A, + B, + C + } + Enum2Larger data1; + Enum2 data2; + Enum2 data3; + Enum2 data4; +} + +contract StorageUpgrade_Enum_V2_Bad { + enum Enum2Delete { + A + } + enum Enum2Replace { + A, + X + } + enum Enum2Insert { + X, + A, + B + } + Enum2Delete data1; + Enum2Replace data2; + Enum2Insert data3; + + enum Enum2TooLarge { + A, + B, + V3, V4, V5, V6, V7, V8, V9, V10, V11, V12, V13, V14, V15, V16, V17, V18, V19, V20, V21, V22, V23, V24, V25, V26, V27, V28, V29, V30, V31, V32, V33, V34, V35, V36, V37, V38, V39, V40, V41, V42, V43, V44, V45, V46, V47, V48, V49, V50, V51, V52, V53, V54, V55, V56, V57, V58, V59, V60, V61, V62, V63, V64, V65, V66, V67, V68, V69, V70, V71, V72, V73, V74, V75, V76, V77, V78, V79, V80, V81, V82, V83, V84, V85, V86, V87, V88, V89, V90, V91, V92, V93, V94, V95, V96, V97, V98, V99, V100, V101, V102, V103, V104, V105, V106, V107, V108, V109, V110, V111, V112, V113, V114, V115, V116, V117, V118, V119, V120, V121, V122, V123, V124, V125, V126, V127, V128, V129, V130, V131, V132, V133, V134, V135, V136, V137, V138, V139, V140, V141, V142, V143, V144, V145, V146, V147, V148, V149, V150, V151, V152, V153, V154, V155, V156, V157, V158, V159, V160, V161, V162, V163, V164, V165, V166, V167, V168, V169, V170, V171, V172, V173, V174, V175, V176, V177, V178, V179, V180, V181, V182, V183, V184, V185, V186, V187, V188, V189, V190, V191, V192, V193, V194, V195, V196, V197, V198, V199, V200, V201, V202, V203, V204, V205, V206, V207, V208, V209, V210, V211, V212, V213, V214, V215, V216, V217, V218, V219, V220, V221, V222, V223, V224, V225, V226, V227, V228, V229, V230, V231, V232, V233, V234, V235, V236, V237, V238, V239, V240, V241, V242, V243, V244, V245, V246, V247, V248, V249, V250, V251, V252, V253, V254, V255, V256, + OneTooMany + } + Enum2TooLarge data4; +} + +contract StorageUpgrade_Recursive_V1 { + struct Recursive { + mapping (uint => Recursive) s; + } + Recursive data; +} + +contract StorageUpgrade_Recursive_V2 { + struct Recursive { + mapping (uint => Recursive) s; + } + Recursive data; +} + +contract StorageUpgrade_Rename_V1 { + uint x1; + uint x2; + uint x3; +} + +contract StorageUpgrade_Rename_V2 { + uint x1; + uint renamed; + uint x3; +} + +contract StorageUpgrade_Replace_V1 { + uint x1; + uint x2; + uint x3; +} + +contract StorageUpgrade_Replace_V2 { + uint x1; + string renamed; + uint x3; +} + +contract StorageUpgrade_ObviousMismatch_V1 { + struct S { + uint m; + } + + uint x1; + S s1; + uint[] a1; +} + +contract StorageUpgrade_ObviousMismatch_V2_Bad { + string x1; + uint s1; + mapping (uint => uint) a1; +} + +contract StorageUpgrade_Contract_V1 { + StorageUpgrade_Contract_V1 data; +} + +contract StorageUpgrade_Contract_V2 { + StorageUpgrade_Contract_V2 data; +} + +contract StorageUpgrade_Array_V1 { + uint[20] x1; + uint[20] x2; + uint[20] x3; + uint[] x4; + mapping (uint => uint[20]) m; +} + +contract StorageUpgrade_Array_V2_Ok { + uint[20] x1; + uint[20] x2; + uint[20] x3; + uint[] x4; + mapping (uint => uint[25]) m; +} + +contract StorageUpgrade_Array_V2_Bad { + uint[15] x1; + uint[25] x2; + uint[] x3; + uint[20] x4; + mapping (uint => uint[15]) m; +} + +contract StorageUpgrade_Mapping_V1 { + mapping (uint => uint[10]) m1; + mapping (string => uint) m2; +} + +contract StorageUpgrade_Mapping_V2_Ok { + mapping (uint => uint[10]) m1; + mapping (string => uint) m2; +} + +contract StorageUpgrade_Mapping_V2_Bad { + mapping (uint => uint) m1; + mapping (bool => uint) m2; +} diff --git a/packages/core/contracts/test/Validations.sol b/packages/core/contracts/test/Validations.sol index babed58d6..d3f2a8ff1 100644 --- a/packages/core/contracts/test/Validations.sol +++ b/packages/core/contracts/test/Validations.sol @@ -55,60 +55,6 @@ import './ValidationsImport.sol'; contract ImportedParentHasStateVariableAssignment is ImportedHasStateVariableAssignment { } -contract HasStruct { - struct Foo { - bool bar; - } - - Foo foo; -} - -contract HasEnum { - enum Foo { BAR } -} - -contract ParentHasStruct is HasStruct {} -contract ParentHasEnum is HasEnum {} - -library LibraryWithEnum { - enum Animal { DOG, CAT } - - function isCat(Animal animal) internal pure returns (bool) { - return animal == Animal.CAT; - } -} - -library LibraryWithStruct { - struct Animal { - string kind; - uint age; - } - - function getAge(Animal memory animal) internal pure returns (uint) { - return animal.age; - } -} - -contract UsesLibraryWithStruct { - using LibraryWithStruct for LibraryWithStruct.Animal; - - LibraryWithStruct.Animal animal; - - function getAge() public view returns (uint) { - return animal.getAge(); - } -} - -contract UsesLibraryWithEnum { - using LibraryWithEnum for LibraryWithEnum.Animal; - - LibraryWithEnum.Animal animal; - - function isCat() public view returns (bool) { - return animal.isCat(); - } -} - // For each of 3 dimensions, libraries usage can be // 1. implicit or explicit (_use for_ directive or not) // 2. upgrade safe or unsafe diff --git a/packages/core/package.json b/packages/core/package.json index 12fffcdcb..7d722d1f3 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -20,7 +20,7 @@ "prepare:contracts": "hardhat compile", "prepublishOnly": "bash scripts/copy-artifacts.sh", "test": "tsc -b && hardhat compile --force && ava", - "test:watch": "hardhat compile --force && fgbg 'ava --watch' 'tsc -b --watch'", + "test:watch": "hardhat compile --force && fgbg 'ava --watch' 'tsc -b --watch' --", "version": "node ../../scripts/bump-changelog.js" }, "devDependencies": { @@ -51,6 +51,6 @@ "fp-ts": "^2.7.1", "io-ts": "^2.2.9", "proper-lockfile": "^4.1.1", - "solidity-ast": "^0.4.4" + "solidity-ast": "^0.4.15" } } diff --git a/packages/core/src/ast-dereferencer.ts b/packages/core/src/ast-dereferencer.ts new file mode 100644 index 000000000..2ae117b5a --- /dev/null +++ b/packages/core/src/ast-dereferencer.ts @@ -0,0 +1,42 @@ +import { findAll } from 'solidity-ast/utils'; +import type { Node, NodeType, NodeTypeMap } from 'solidity-ast/node'; + +import { curry2 } from './utils/curry'; +import type { SolcOutput } from './solc-api'; + +// An ASTDereferencer is a function that looks up an AST node given its id, in all of the source files involved in a +// solc run. It will generally be used together with the AST property `referencedDeclaration` (found in Identifier, +// UserDefinedTypeName, etc.) to look up a variable definition or type definition. + +export interface ASTDereferencer { + (nodeTypes: T[]): (id: number) => NodeTypeMap[T]; + (nodeTypes: T[], id: number): NodeTypeMap[T]; +} + +export function astDereferencer(solcOutput: SolcOutput): ASTDereferencer { + const asts = Array.from(Object.values(solcOutput.sources), s => s.ast); + const cache = new Map(); + + function deref(nodeTypes: T[], id: number): NodeTypeMap[T] { + const cached = cache.get(id); + + if (cached) { + if ((nodeTypes as NodeType[]).includes(cached.nodeType)) { + return cached as NodeTypeMap[T]; + } + } + + for (const ast of asts) { + for (const node of findAll(nodeTypes, ast)) { + if (node.id === id) { + cache.set(id, node); + return node; + } + } + } + + throw new Error(`No node with id ${id} of type ${nodeTypes}`); + } + + return curry2(deref); +} diff --git a/packages/core/src/error.ts b/packages/core/src/error.ts index 68a802009..623d8d265 100644 --- a/packages/core/src/error.ts +++ b/packages/core/src/error.ts @@ -3,7 +3,7 @@ import chalk from 'chalk'; interface ErrorDescriptor { msg: (e: E) => string; - hint?: string; + hint?: (e: E) => string | undefined; link?: string; } @@ -15,7 +15,7 @@ function noDetails() { return ''; } -export abstract class UpgradesError extends Error { +export class UpgradesError extends Error { constructor(message: string, details = noDetails) { super(message + '\n\n' + details()); } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 942c9d8a7..354ee28de 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -10,4 +10,6 @@ export * from './solc-api'; export * from './deployment'; export * from './link-refs'; +export { getStorageLayoutForAddress } from './manifest-storage-layout'; + export * from './scripts/migrate-oz-cli-project'; diff --git a/packages/core/src/levenshtein.test.ts b/packages/core/src/levenshtein.test.ts index b83de4779..fa591e4b9 100644 --- a/packages/core/src/levenshtein.test.ts +++ b/packages/core/src/levenshtein.test.ts @@ -2,57 +2,90 @@ import test from 'ava'; import { levenshtein } from './levenshtein'; +const match = (a: T, b: T) => (a === b ? undefined : { kind: 'replace', original: a, updated: b }); + test('equal', t => { const a = [...'abc']; const b = [...'abc']; - const ops = levenshtein(a, b, (a, b) => (a === b ? 'equal' : 'different')); + const ops = levenshtein(a, b, match); t.deepEqual(ops, []); }); test('append', t => { const a = [...'abc']; const b = [...'abcd']; - const ops = levenshtein(a, b, (a, b) => (a === b ? 'equal' : 'different')); - t.deepEqual(ops, [ - { + const ops = levenshtein(a, b, match); + t.like(ops, { + length: 1, + 0: { kind: 'append', updated: 'd', }, - ]); + }); }); test('delete from end', t => { const a = [...'abcd']; const b = [...'abc']; - const ops = levenshtein(a, b, (a, b) => (a === b ? 'equal' : 'different')); - t.deepEqual(ops, [ - { + const ops = levenshtein(a, b, match); + t.like(ops, { + length: 1, + 0: { kind: 'delete', original: 'd', }, - ]); + }); }); test('delete from middle', t => { const a = [...'abc']; const b = [...'ac']; - const ops = levenshtein(a, b, (a, b) => (a === b ? 'equal' : 'different')); - t.deepEqual(ops, [ - { + const ops = levenshtein(a, b, match); + t.like(ops, { + length: 1, + 0: { kind: 'delete', original: 'b', }, - ]); + }); +}); + +test('delete from beginning', t => { + const a = [...'abc']; + const b = [...'bc']; + const ops = levenshtein(a, b, match); + t.like(ops, { + length: 1, + 0: { + kind: 'delete', + original: 'a', + }, + }); }); test('insert', t => { const a = [...'abc']; const b = [...'azbc']; - const ops = levenshtein(a, b, (a, b) => (a === b ? 'equal' : 'different')); - t.deepEqual(ops, [ - { + const ops = levenshtein(a, b, match); + t.like(ops, { + length: 1, + 0: { kind: 'insert', updated: 'z', }, - ]); + }); +}); + +test('replace', t => { + const a = [...'abc']; + const b = [...'axc']; + const ops = levenshtein(a, b, match); + t.like(ops, { + length: 1, + 0: { + kind: 'replace', + original: 'b', + updated: 'x', + }, + }); }); diff --git a/packages/core/src/levenshtein.ts b/packages/core/src/levenshtein.ts index 662786eae..4fb8ef509 100644 --- a/packages/core/src/levenshtein.ts +++ b/packages/core/src/levenshtein.ts @@ -1,92 +1,105 @@ -export function levenshtein(a: T[], b: T[], match: Match): Operation[] { - const matrix = buildMatrix(a, b, (a, b) => match(a, b) === 'equal'); - return walkMatrix(matrix, a, b, match); -} +export type BasicOperation = + | { + kind: 'append' | 'insert'; + updated: T; + } + | { + kind: 'delete'; + original: T; + }; + +export type Operation = C | BasicOperation; -type Equal = (a: T, b: T) => boolean; +type GetChangeOp = (a: T, b: T) => C | undefined; -const SUBSTITUTION_COST = 3; +export function levenshtein(a: T[], b: T[], getChangeOp: GetChangeOp): Operation[] { + const matrix = buildMatrix(a, b, getChangeOp); + return buildOps(matrix, a, b); +} + +const CHANGE_COST = 3; const INSERTION_COST = 2; const DELETION_COST = 2; -// Adapted from https://gist.github.com/andrei-m/982927 by Andrei Mackenzie -function buildMatrix(a: T[], b: T[], eq: Equal): number[][] { - const matrix: number[][] = new Array(a.length + 1); +type MatrixOp = BasicOperation | { kind: 'change'; change: C } | { kind: 'nop' }; +type MatrixEntry = MatrixOp & { totalCost: number; predecessor?: MatrixEntry }; - type CostFunction = (i: number, j: number) => number; - const insertionCost: CostFunction = (i, j) => (j > a.length ? 0 : INSERTION_COST); - const substitutionCost: CostFunction = (i, j) => (eq(a[i - 1], b[j - 1]) ? 0 : SUBSTITUTION_COST); - const deletionCost: CostFunction = () => DELETION_COST; +function buildMatrix(a: T[], b: T[], getChangeOp: GetChangeOp): MatrixEntry[][] { + // matrix[i][j] will contain the last operation that takes a.slice(0, i) to b.slice(0, j) + // The list of operations can be recovered following the predecessors as in buildOps - // increment along the first column of each row - for (let i = 0; i <= a.length; i++) { - matrix[i] = new Array(b.length + 1); - matrix[i][0] = i * deletionCost(i, 0); - } + const matrix: MatrixEntry[][] = new Array(a.length + 1); + + matrix[0] = new Array(b.length + 1); + matrix[0][0] = { kind: 'nop', totalCost: 0 }; - // increment each column in the first row + // Populate first row for (let j = 1; j <= b.length; j++) { - matrix[0][j] = matrix[0][j - 1] + insertionCost(0, j); + matrix[0][j] = insertion(0, j); } - // fill in the rest of the matrix + // Fill in the rest of the matrix for (let i = 1; i <= a.length; i++) { + matrix[i] = new Array(b.length + 1); + matrix[i][0] = deletion(i, 0); for (let j = 1; j <= b.length; j++) { - matrix[i][j] = Math.min( - matrix[i - 1][j - 1] + substitutionCost(i, j), - matrix[i][j - 1] + insertionCost(i, j), - matrix[i - 1][j] + deletionCost(i, j), - ); + matrix[i][j] = minBy([change(i, j), insertion(i, j), deletion(i, j)], e => e.totalCost); } } return matrix; -} -type Match = (a: T, b: T) => K | 'equal'; + // The different kinds of matrix entries are built by these helpers -export interface Operation { - kind: K | 'append' | 'insert' | 'delete'; - original?: T; - updated?: T; -} + function insertion(i: number, j: number): MatrixEntry { + const updated = b[j - 1]; + const predecessor = matrix[i][j - 1]; + const predCost = predecessor.totalCost; + if (j > a.length) { + return { kind: 'append', totalCost: predCost, predecessor, updated }; + } else { + return { kind: 'insert', totalCost: predCost + INSERTION_COST, predecessor, updated }; + } + } + + function deletion(i: number, j: number): MatrixEntry { + const original = a[i - 1]; + const predecessor = matrix[i - 1][j]; + const predCost = predecessor.totalCost; + return { kind: 'delete', totalCost: predCost + DELETION_COST, predecessor, original }; + } -// Walks an edit distance matrix, returning the sequence of operations performed -function walkMatrix(matrix: number[][], a: T[], b: T[], match: Match): Operation[] { - let i = matrix.length - 1; - let j = matrix[0].length - 1; - - const operations: Operation[] = []; - - while (i > 0 || j > 0) { - const cost = matrix[i][j]; - - const isAppend = j >= matrix.length; - const insertionCost = isAppend ? 0 : INSERTION_COST; - - const matchResult = i > 0 && j > 0 ? match(a[i - 1], b[j - 1]) : undefined; - const substitutionCost = matchResult === 'equal' ? 0 : SUBSTITUTION_COST; - - const original = i > 0 ? a[i - 1] : undefined; - const updated = j > 0 ? b[j - 1] : undefined; - - if (i > 0 && j > 0 && cost === matrix[i - 1][j - 1] + substitutionCost) { - if (matchResult !== 'equal') { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - operations.unshift({ kind: matchResult!, updated, original }); - } - i--; - j--; - } else if (j > 0 && cost === matrix[i][j - 1] + insertionCost) { - operations.unshift({ kind: isAppend ? 'append' : 'insert', updated }); - j--; - } else if (i > 0 && cost === matrix[i - 1][j] + DELETION_COST) { - operations.unshift({ kind: 'delete', original }); - i--; + function change(i: number, j: number): MatrixEntry { + const original = a[i - 1]; + const updated = b[j - 1]; + const predecessor = matrix[i - 1][j - 1]; + const predCost = predecessor.totalCost; + const change = getChangeOp(original, updated); + if (change !== undefined) { + return { kind: 'change', totalCost: predCost + CHANGE_COST, predecessor, change }; } else { - throw Error(`Could not walk matrix at position ${i},${j}`); + return { kind: 'nop', totalCost: predCost, predecessor }; + } + } +} + +function minBy(arr: [T, ...T[]], value: (item: T) => number): T { + return arr.reduce((min, item) => (value(item) < value(min) ? item : min)); +} + +function buildOps(matrix: MatrixEntry[][], a: T[], b: T[]): Operation[] { + const ops: Operation[] = []; + + let entry: MatrixEntry | undefined = matrix[a.length][b.length]; + + while (entry !== undefined) { + if (entry.kind === 'change') { + ops.unshift(entry.change); + } else if (entry.kind !== 'nop') { + ops.unshift(entry); } + entry = entry.predecessor; } - return operations; + return ops; } diff --git a/packages/core/src/manifest-storage-layout.test.ts b/packages/core/src/manifest-storage-layout.test.ts new file mode 100644 index 000000000..4b367fb62 --- /dev/null +++ b/packages/core/src/manifest-storage-layout.test.ts @@ -0,0 +1,116 @@ +import _test, { TestInterface } from 'ava'; +import assert from 'assert'; + +import { artifacts } from 'hardhat'; + +import { solcInputOutputDecoder } from './src-decoder'; +import { validate, ValidationRunData } from './validate/run'; +import { getStorageLayout } from './validate/query'; +import { normalizeValidationData, ValidationData } from './validate/data'; +import { StorageLayout } from './storage/layout'; + +import { Manifest, ManifestData } from './manifest'; +import { getUpdatedStorageLayout, getStorageLayoutForAddress } from './manifest-storage-layout'; + +interface Context { + validationRun: ValidationRunData; + validationData: ValidationData; +} + +const test = _test as TestInterface; + +test.before(async t => { + const buildInfo = await artifacts.getBuildInfo('contracts/test/ManifestMigrate.sol:ManifestMigrateUnique'); + assert(buildInfo !== undefined, 'Build info not found'); + const decodeSrc = solcInputOutputDecoder(buildInfo.input, buildInfo.output); + t.context.validationRun = validate(buildInfo.output, decodeSrc); + t.context.validationData = normalizeValidationData([t.context.validationRun]); +}); + +test('getStorageLayoutForAddress - update layout', async t => { + const { version } = t.context.validationRun['ManifestMigrateUnique']; + assert(version !== undefined); + const updatedLayout = getStorageLayout(t.context.validationData, version); + const outdatedLayout = removeStorageLayoutMembers(updatedLayout); + const address = '0xCf7Ed3AccA5a467e9e704C703E8D87F634fB0Fc9'; + const manifest = mockManifest({ + manifestVersion: '3.1', + impls: { + c0b708c73eb888fc608e606f94eccb59e8b14170d9167dc00d7c90ce39ad72ea: { + address, + txHash: '0x6580b51f3edcacacf30d7b4140e4022b65d2a5ba7cbe7e4d91397f4c3b5e8a6b', + layout: outdatedLayout, + }, + }, + }); + const layout = await getStorageLayoutForAddress(manifest, t.context.validationData, address); + t.deepEqual(layout, updatedLayout); + t.like(manifest.data, { + impls: { + c0b708c73eb888fc608e606f94eccb59e8b14170d9167dc00d7c90ce39ad72ea: { + address, + txHash: '0x6580b51f3edcacacf30d7b4140e4022b65d2a5ba7cbe7e4d91397f4c3b5e8a6b', + layout: updatedLayout, + }, + }, + }); +}); + +test('getUpdatedLayout - unique layout match', async t => { + const { version } = t.context.validationRun['ManifestMigrateUnique']; + assert(version !== undefined); + const targetLayout = getStorageLayout(t.context.validationData, version); + const outdatedLayout = removeStorageLayoutMembers(targetLayout); + const updatedLayout = getUpdatedStorageLayout(t.context.validationData, version.withoutMetadata, outdatedLayout); + t.deepEqual(updatedLayout, targetLayout); +}); + +test('getUpdatedLayout - multiple unambiguous layout matches', async t => { + const { version: version1 } = t.context.validationRun['ManifestMigrateUnambiguous1']; + const { version: version2 } = t.context.validationRun['ManifestMigrateUnambiguous2']; + assert(version1 !== undefined && version2 !== undefined); + t.is(version1.withoutMetadata, version2.withoutMetadata, 'version is meant to be ambiguous'); + t.not(version1.withMetadata, version2.withMetadata, 'version with metadata should be different'); + const targetLayout = getStorageLayout(t.context.validationData, version1); + const outdatedLayout = removeStorageLayoutMembers(targetLayout); + const updatedLayout = getUpdatedStorageLayout(t.context.validationData, version1.withoutMetadata, outdatedLayout); + t.deepEqual(updatedLayout, targetLayout); +}); + +test('getUpdatedLayout - multiple ambiguous layout matches', async t => { + const { version: version1 } = t.context.validationRun['ManifestMigrateAmbiguous1']; + const { version: version2 } = t.context.validationRun['ManifestMigrateAmbiguous2']; + assert(version1 !== undefined && version2 !== undefined); + t.is(version1.withoutMetadata, version2.withoutMetadata, 'version is meant to be ambiguous'); + t.not(version1.withMetadata, version2.withMetadata, 'version with metadata should be different'); + const targetLayout = getStorageLayout(t.context.validationData, version1); + const outdatedLayout = removeStorageLayoutMembers(targetLayout); + const updatedLayout = getUpdatedStorageLayout(t.context.validationData, version1.withoutMetadata, outdatedLayout); + t.is(updatedLayout, undefined); +}); + +function mockManifest(data: ManifestData) { + type Mocked = 'read' | 'write' | 'lockedRun'; + const manifest: Pick & { data: ManifestData } = { + data, + async read() { + return this.data; + }, + async write(data: ManifestData) { + this.data = data; + }, + async lockedRun(cb: () => Promise) { + return cb(); + }, + }; + return manifest as Manifest & { data: ManifestData }; +} + +// Simulate a layout from a version without struct/enum members +function removeStorageLayoutMembers(layout: StorageLayout): StorageLayout { + const res = { ...layout, types: { ...layout.types } }; + for (const id in res.types) { + res.types[id] = { ...layout.types[id], members: undefined }; + } + return res; +} diff --git a/packages/core/src/manifest-storage-layout.ts b/packages/core/src/manifest-storage-layout.ts new file mode 100644 index 000000000..37c818455 --- /dev/null +++ b/packages/core/src/manifest-storage-layout.ts @@ -0,0 +1,107 @@ +import { Manifest } from './manifest'; +import { isCurrentLayoutVersion } from './storage/extract'; +import { StorageLayout, isStructMembers } from './storage/layout'; +import { ValidationData } from './validate/data'; +import { findVersionWithoutMetadataMatches, unfoldStorageLayout } from './validate/query'; +import { stabilizeTypeIdentifier } from './utils/type-id'; +import { DeepArray, deepEqual } from './utils/deep-array'; + +export async function getStorageLayoutForAddress( + manifest: Manifest, + validations: ValidationData, + implAddress: string, +): Promise { + const data = await manifest.read(); + const versionWithoutMetadata = Object.keys(data.impls).find(v => data.impls[v]?.address === implAddress); + if (versionWithoutMetadata === undefined) { + throw new Error(`Deployment at address ${implAddress} is not registered`); + } + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const { layout } = data.impls[versionWithoutMetadata]!; + + if (isCurrentLayoutVersion(layout)) { + return layout; + } else { + const updatedLayout = getUpdatedStorageLayout(validations, versionWithoutMetadata, layout); + + if (updatedLayout === undefined) { + return layout; + } else { + await manifest.lockedRun(async () => { + const data = await manifest.read(); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + data.impls[versionWithoutMetadata]!.layout = updatedLayout; + await manifest.write(data); + }); + return updatedLayout; + } + } +} + +// This function is used to retrieve the updated storage layout for an impl +// contract from the Manifest file, for which we only have the version hash +// without metadata and a storage layout. The storage layout is updated in the +// sense that it is updated to include newer information that hadn't been +// extracted when the impl contract was deployed, such as struct members. +export function getUpdatedStorageLayout( + data: ValidationData, + versionWithoutMetadata: string, + layout: StorageLayout, +): StorageLayout | undefined { + // In this map we store the candidate storage layouts, based on having the + // same versionWithoutMetadata and a sufficiently similar layout. The keys of + // the map are the versionWithMetadata of each contract, to avoid storing the + // same contract multiple times. + const sameLayout = new Map(); + + outer: for (const [contractName, validationRun] of findVersionWithoutMetadataMatches(data, versionWithoutMetadata)) { + const updatedLayout = unfoldStorageLayout(validationRun, contractName); + + // Check that the layout roughly matches the one we already have. + if (updatedLayout.storage.length !== layout.storage.length) { + continue; + } + for (const [i, item] of layout.storage.entries()) { + const updatedItem = updatedLayout.storage[i]; + if (item.label !== updatedItem.label) { + continue outer; + } + if (stabilizeTypeIdentifier(item.type) !== stabilizeTypeIdentifier(updatedItem.type)) { + continue outer; + } + } + + const { version } = validationRun[contractName]; + if (version && !sameLayout.has(version.withMetadata)) { + sameLayout.set(version.withMetadata, updatedLayout); + } + } + + if (sameLayout.size > 1) { + // Reject multiple matches unless they all have exactly the same layout. + const typeMembers = new Map>(); + for (const { types } of sameLayout.values()) { + for (const [typeId, item] of Object.entries(types)) { + if (item.members === undefined) { + continue; + } + + const members: DeepArray = isStructMembers(item.members) + ? item.members.map(m => [stabilizeTypeIdentifier(m.type), m.label]) + : item.members; + const stableId = stabilizeTypeIdentifier(typeId); + + if (!typeMembers.has(stableId)) { + typeMembers.set(stableId, members); + } else if (!deepEqual(members, typeMembers.get(stableId))) { + return undefined; + } + } + } + } + + if (sameLayout.size > 0) { + const [updatedLayout] = sameLayout.values(); + return updatedLayout; + } +} diff --git a/packages/core/src/manifest.test.ts b/packages/core/src/manifest.test.ts index d78ec0c8e..574adca1f 100644 --- a/packages/core/src/manifest.test.ts +++ b/packages/core/src/manifest.test.ts @@ -1,5 +1,5 @@ import test from 'ava'; -import { Manifest, migrateManifest } from './manifest'; +import { Manifest } from './manifest'; test('manifest name for a known network', t => { const manifest = new Manifest(1); @@ -11,13 +11,3 @@ test('manifest name for an unknown network', t => { const manifest = new Manifest(id); t.is(manifest.file, `.openzeppelin/unknown-${id}.json`); }); - -test('manifest 3.0 → 3.1 version bump', t => { - const migratableManifest = { - manifestVersion: '3.0', - impls: {}, - }; - - const migratedManifest = migrateManifest(migratableManifest); - t.is(migratedManifest.manifestVersion, `3.1`); -}); diff --git a/packages/core/src/manifest.ts b/packages/core/src/manifest.ts index 9819f0b39..8c54e4be1 100644 --- a/packages/core/src/manifest.ts +++ b/packages/core/src/manifest.ts @@ -6,7 +6,7 @@ import lockfile from 'proper-lockfile'; import { compare as compareVersions } from 'compare-versions'; import type { Deployment } from './deployment'; -import { StorageLayout } from './storage'; +import type { StorageLayout } from './storage'; const currentManifestVersion = '3.1'; diff --git a/packages/core/src/scripts/migrate-oz-cli-project.ts b/packages/core/src/scripts/migrate-oz-cli-project.ts index 219b5db0a..b142ad2a3 100644 --- a/packages/core/src/scripts/migrate-oz-cli-project.ts +++ b/packages/core/src/scripts/migrate-oz-cli-project.ts @@ -3,7 +3,7 @@ import chalk from 'chalk'; import { promises as fs } from 'fs'; import { compare as compareVersions } from 'compare-versions'; import { ManifestData, ImplDeployment } from '../manifest'; -import type { StorageItem, StorageLayout, TypeItem, TypeItemMembers, StructMember } from '../storage'; +import type { StorageItem, StorageLayout, TypeItem, TypeItemMembers, StructMember } from '../storage/layout'; const OPEN_ZEPPELIN_FOLDER = '.openzeppelin'; const EXPORT_FILE = 'openzeppelin-cli-export.json'; diff --git a/packages/core/src/storage.test.ts b/packages/core/src/storage.test.ts index 0e23ee1d1..9340c905e 100644 --- a/packages/core/src/storage.test.ts +++ b/packages/core/src/storage.test.ts @@ -4,10 +4,14 @@ import { findAll } from 'solidity-ast/utils'; import { artifacts } from 'hardhat'; import { SolcOutput } from './solc-api'; -import { extractStorageLayout, getStorageUpgradeErrors, stabilizeTypeIdentifier, StorageLayout } from './storage'; +import { astDereferencer } from './ast-dereferencer'; +import { getStorageUpgradeErrors } from './storage'; +import { StorageLayout, isEnumMembers } from './storage/layout'; +import { extractStorageLayout } from './storage/extract'; +import { stabilizeTypeIdentifier } from './utils/type-id'; interface Context { - contracts: Record; + extractStorageLayout: (contract: string) => ReturnType; } const test = _test as TestInterface; @@ -18,62 +22,412 @@ test.before(async t => { throw new Error('Build info not found'); } const solcOutput: SolcOutput = buildInfo.output; - t.context.contracts = {}; + const contracts: Record = {}; for (const def of findAll('ContractDefinition', solcOutput.sources['contracts/test/Storage.sol'].ast)) { - t.context.contracts[def.name] = def; + contracts[def.name] = def; } + const deref = astDereferencer(solcOutput); + t.context.extractStorageLayout = name => extractStorageLayout(contracts[name], dummyDecodeSrc, deref); }); const dummyDecodeSrc = () => 'file.sol:1'; test('Storage1', t => { - const contract = 'Storage1'; - const def = t.context.contracts[contract]; - const layout = extractStorageLayout(def, dummyDecodeSrc); + const layout = t.context.extractStorageLayout('Storage1'); t.snapshot(stabilizeStorageLayout(layout)); }); test('Storage2', t => { - const contract = 'Storage2'; - const def = t.context.contracts[contract]; - const layout = extractStorageLayout(def, dummyDecodeSrc); + const layout = t.context.extractStorageLayout('Storage2'); t.snapshot(stabilizeStorageLayout(layout)); }); test('storage upgrade equal', t => { - const v1 = extractStorageLayout(t.context.contracts['StorageUpgrade_Equal_V1'], dummyDecodeSrc); - const v2 = extractStorageLayout(t.context.contracts['StorageUpgrade_Equal_V2'], dummyDecodeSrc); + const v1 = t.context.extractStorageLayout('StorageUpgrade_Equal_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Equal_V2'); const comparison = getStorageUpgradeErrors(v1, v2); t.deepEqual(comparison, []); }); test('storage upgrade append', t => { - const v1 = extractStorageLayout(t.context.contracts['StorageUpgrade_Append_V1'], dummyDecodeSrc); - const v2 = extractStorageLayout(t.context.contracts['StorageUpgrade_Append_V2'], dummyDecodeSrc); + const v1 = t.context.extractStorageLayout('StorageUpgrade_Append_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Append_V2'); const comparison = getStorageUpgradeErrors(v1, v2); t.deepEqual(comparison, []); }); test('storage upgrade delete', t => { - const v1 = extractStorageLayout(t.context.contracts['StorageUpgrade_Delete_V1'], dummyDecodeSrc); - const v2 = extractStorageLayout(t.context.contracts['StorageUpgrade_Delete_V2'], dummyDecodeSrc); + const v1 = t.context.extractStorageLayout('StorageUpgrade_Delete_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Delete_V2'); const comparison = getStorageUpgradeErrors(v1, v2); - t.deepEqual(comparison, [ - { + t.like(comparison, { + length: 1, + 0: { kind: 'delete', original: { contract: 'StorageUpgrade_Delete_V1', label: 'x1', - type: 't_uint256', - src: 'file.sol:1', + type: { + id: 't_uint256', + }, }, }, - ]); + }); +}); + +test('storage upgrade replace', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Replace_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Replace_V2'); + const comparison = getStorageUpgradeErrors(v1, v2); + t.like(comparison, { + length: 1, + 0: { + kind: 'replace', + original: { + contract: 'StorageUpgrade_Replace_V1', + label: 'x2', + type: { + id: 't_uint256', + }, + }, + updated: { + contract: 'StorageUpgrade_Replace_V2', + label: 'renamed', + type: { + id: 't_string_storage', + }, + }, + }, + }); +}); + +test('storage upgrade rename', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Rename_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Rename_V2'); + const comparison = getStorageUpgradeErrors(v1, v2); + t.like(comparison, { + length: 1, + 0: { + kind: 'rename', + original: { + contract: 'StorageUpgrade_Rename_V1', + label: 'x2', + type: { + id: 't_uint256', + }, + }, + updated: { + contract: 'StorageUpgrade_Rename_V2', + label: 'renamed', + type: { + id: 't_uint256', + }, + }, + }, + }); +}); + +test('storage upgrade with obvious mismatch', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_ObviousMismatch_V1'); + + const v2_Bad = t.context.extractStorageLayout('StorageUpgrade_ObviousMismatch_V2_Bad'); + t.like(getStorageUpgradeErrors(v1, v2_Bad), { + length: 3, + 0: { + kind: 'typechange', + change: { kind: 'obvious mismatch' }, + original: { label: 'x1' }, + updated: { label: 'x1' }, + }, + 1: { + kind: 'typechange', + change: { kind: 'obvious mismatch' }, + original: { label: 's1' }, + updated: { label: 's1' }, + }, + 2: { + kind: 'typechange', + change: { kind: 'obvious mismatch' }, + original: { label: 'a1' }, + updated: { label: 'a1' }, + }, + }); +}); + +test('storage upgrade with structs', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Struct_V1'); + + const v2_Ok = t.context.extractStorageLayout('StorageUpgrade_Struct_V2_Ok'); + t.deepEqual(getStorageUpgradeErrors(v1, v2_Ok), []); + + const v2_Bad = t.context.extractStorageLayout('StorageUpgrade_Struct_V2_Bad'); + t.like(getStorageUpgradeErrors(v1, v2_Bad), { + length: 6, + 0: { + kind: 'typechange', + change: { + kind: 'struct members', + ops: { + length: 1, + 0: { kind: 'delete' }, + }, + }, + original: { label: 'data1' }, + updated: { label: 'data1' }, + }, + 1: { + kind: 'typechange', + change: { + kind: 'struct members', + ops: { + length: 1, + 0: { kind: 'append' }, + }, + }, + original: { label: 'data2' }, + updated: { label: 'data2' }, + }, + 2: { + kind: 'typechange', + change: { + kind: 'mapping value', + inner: { kind: 'struct members' }, + }, + original: { label: 'm' }, + updated: { label: 'm' }, + }, + 3: { + kind: 'typechange', + change: { + kind: 'array value', + inner: { kind: 'struct members' }, + }, + original: { label: 'a1' }, + updated: { label: 'a1' }, + }, + 4: { + kind: 'typechange', + change: { + kind: 'array value', + inner: { kind: 'struct members' }, + }, + original: { label: 'a2' }, + updated: { label: 'a2' }, + }, + 5: { + kind: 'typechange', + change: { + kind: 'struct members', + ops: { + length: 3, + 0: { kind: 'typechange' }, + 1: { kind: 'typechange' }, + 2: { kind: 'typechange' }, + }, + }, + original: { label: 'data3' }, + updated: { label: 'data3' }, + }, + }); +}); + +test('storage upgrade with missing struct members', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Struct_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Struct_V2_Ok'); + + const t_struct = Object.keys(v1.types).find(t => stabilizeTypeIdentifier(t) === 't_struct(Struct1)storage'); + if (t_struct === undefined) { + throw new Error('Struct type not found'); + } + + // Simulate missing struct members + v1.types[t_struct].members = undefined; + + t.like(getStorageUpgradeErrors(v1, v2), { + 0: { + kind: 'typechange', + change: { kind: 'missing members' }, + original: { label: 'data1' }, + updated: { label: 'data1' }, + }, + }); + + t.deepEqual(getStorageUpgradeErrors(v1, v2, { unsafeAllowCustomTypes: true }), []); +}); + +test('storage upgrade with enums', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Enum_V1'); + + const v2_Ok = t.context.extractStorageLayout('StorageUpgrade_Enum_V2_Ok'); + t.deepEqual(getStorageUpgradeErrors(v1, v2_Ok), []); + + const v2_Bad = t.context.extractStorageLayout('StorageUpgrade_Enum_V2_Bad'); + t.like(getStorageUpgradeErrors(v1, v2_Bad), { + length: 4, + 0: { + kind: 'typechange', + change: { + kind: 'enum members', + ops: { + length: 1, + 0: { kind: 'delete' }, + }, + }, + original: { label: 'data1' }, + updated: { label: 'data1' }, + }, + 1: { + kind: 'typechange', + change: { + kind: 'enum members', + ops: { + length: 1, + 0: { kind: 'replace', original: 'B', updated: 'X' }, + }, + }, + original: { label: 'data2' }, + updated: { label: 'data2' }, + }, + 2: { + kind: 'typechange', + change: { + kind: 'enum members', + ops: { + length: 1, + 0: { kind: 'insert', updated: 'X' }, + }, + }, + original: { label: 'data3' }, + updated: { label: 'data3' }, + }, + 3: { + kind: 'typechange', + change: { kind: 'enum resize' }, + original: { label: 'data4' }, + updated: { label: 'data4' }, + }, + }); +}); + +test('storage upgrade with missing enum members', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Enum_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Enum_V2_Ok'); + + const t_enum = Object.keys(v1.types).find(t => stabilizeTypeIdentifier(t) === 't_enum(Enum1)'); + if (t_enum === undefined) { + throw new Error('Enum type not found'); + } + + // Simulate missing enum members + v1.types[t_enum].members = undefined; + + t.like(getStorageUpgradeErrors(v1, v2), { + 0: { + kind: 'typechange', + change: { kind: 'missing members' }, + original: { label: 'data1' }, + updated: { label: 'data1' }, + }, + }); + + t.deepEqual(getStorageUpgradeErrors(v1, v2, { unsafeAllowCustomTypes: true }), []); +}); + +test('storage upgrade with recursive type', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Recursive_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Recursive_V2'); + const e = t.throws(() => getStorageUpgradeErrors(v1, v2)); + t.true(e.message.includes('Recursion found')); +}); + +test('storage upgrade with contract type', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Contract_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Contract_V2'); + t.deepEqual(getStorageUpgradeErrors(v1, v2), []); +}); + +test('storage upgrade with arrays', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Array_V1'); + + const v2_Ok = t.context.extractStorageLayout('StorageUpgrade_Array_V2_Ok'); + t.deepEqual(getStorageUpgradeErrors(v1, v2_Ok), []); + + const v2_Bad = t.context.extractStorageLayout('StorageUpgrade_Array_V2_Bad'); + t.like(getStorageUpgradeErrors(v1, v2_Bad), { + length: 5, + 0: { + kind: 'typechange', + change: { kind: 'array shrink' }, + original: { label: 'x1' }, + updated: { label: 'x1' }, + }, + 1: { + kind: 'typechange', + change: { kind: 'array grow' }, + original: { label: 'x2' }, + updated: { label: 'x2' }, + }, + 2: { + kind: 'typechange', + change: { kind: 'array dynamic' }, + original: { label: 'x3' }, + updated: { label: 'x3' }, + }, + 3: { + kind: 'typechange', + change: { kind: 'array dynamic' }, + original: { label: 'x4' }, + updated: { label: 'x4' }, + }, + 4: { + kind: 'typechange', + change: { + kind: 'mapping value', + inner: { kind: 'array shrink' }, + }, + original: { label: 'm' }, + updated: { label: 'm' }, + }, + }); +}); + +test('storage upgrade with mappings', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Mapping_V1'); + + const v2_Ok = t.context.extractStorageLayout('StorageUpgrade_Mapping_V2_Ok'); + t.deepEqual(getStorageUpgradeErrors(v1, v2_Ok), []); + + const v2_Bad = t.context.extractStorageLayout('StorageUpgrade_Mapping_V2_Bad'); + t.like(getStorageUpgradeErrors(v1, v2_Bad), { + length: 2, + 0: { + kind: 'typechange', + change: { + kind: 'mapping value', + inner: { kind: 'obvious mismatch' }, + }, + original: { label: 'm1' }, + updated: { label: 'm1' }, + }, + 1: { + kind: 'typechange', + change: { kind: 'mapping key' }, + original: { label: 'm2' }, + updated: { label: 'm2' }, + }, + }); }); function stabilizeStorageLayout(layout: StorageLayout) { return { storage: layout.storage.map(s => ({ ...s, type: stabilizeTypeIdentifier(s.type) })), - types: Object.entries(layout.types).map(([type, item]) => [stabilizeTypeIdentifier(type), item]), + types: Object.entries(layout.types).map(([type, item]) => { + const members = + item.members && + (isEnumMembers(item.members) + ? item.members + : item.members.map(m => ({ ...m, type: stabilizeTypeIdentifier(m.type) }))); + return [stabilizeTypeIdentifier(type), { ...item, members }]; + }), }; } diff --git a/packages/core/src/storage.test.ts.md b/packages/core/src/storage.test.ts.md index 7b52ce7f2..085452987 100644 --- a/packages/core/src/storage.test.ts.md +++ b/packages/core/src/storage.test.ts.md @@ -40,24 +40,28 @@ Generated by [AVA](https://avajs.dev). 't_uint256', { label: 'uint256', + members: undefined, }, ], [ 't_address', { label: 'address', + members: undefined, }, ], [ 't_mapping(t_uint256,t_uint256)', { label: 'mapping(uint256 => uint256)', + members: undefined, }, ], [ 't_array(t_uint256)3_storage', { label: 'uint256[3]', + members: undefined, }, ], ], @@ -309,222 +313,330 @@ Generated by [AVA](https://avajs.dev). 't_uint256', { label: 'uint256', + members: undefined, }, ], [ 't_string_storage', { label: 'string', + members: undefined, }, ], [ 't_uint8', { label: 'uint8', + members: undefined, }, ], [ 't_int8', { label: 'int8', + members: undefined, }, ], [ 't_bool', { label: 'bool', + members: undefined, }, ], [ 't_address', { label: 'address', + members: undefined, }, ], [ 't_bytes_storage', { label: 'bytes', + members: undefined, }, ], [ 't_bytes8', { label: 'bytes8', + members: undefined, }, ], [ 't_bytes32', { label: 'bytes32', + members: undefined, }, ], [ 't_array(t_uint256)dyn_storage', { label: 'uint256[]', + members: undefined, }, ], [ 't_array(t_string_storage)dyn_storage', { label: 'string[]', + members: undefined, }, ], [ 't_array(t_address)dyn_storage', { label: 'address[]', + members: undefined, }, ], [ 't_array(t_int8)10_storage', { label: 'int8[10]', + members: undefined, }, ], [ 't_array(t_bool)20_storage', { label: 'bool[20]', + members: undefined, }, ], [ 't_array(t_uint256)30_storage', { label: 'uint256[30]', + members: undefined, }, ], [ 't_mapping(t_uint256,t_string_storage)', { label: 'mapping(uint256 => string)', + members: undefined, }, ], [ 't_mapping(t_uint256,t_mapping(t_string_memory_ptr,t_address))', { label: 'mapping(uint256 => mapping(string => address))', + members: undefined, + }, + ], + [ + 't_mapping(t_string_memory_ptr,t_address)', + { + label: 'mapping(string => address)', + members: undefined, }, ], [ 't_mapping(t_uint256,t_array(t_bool)dyn_storage)', { label: 'mapping(uint256 => bool[])', + members: undefined, + }, + ], + [ + 't_array(t_bool)dyn_storage', + { + label: 'bool[]', + members: undefined, }, ], [ 't_function_internal_nonpayable(t_uint256)returns()', { label: 'function (uint256)', + members: undefined, }, ], [ 't_array(t_function_internal_nonpayable(t_string_memory_ptr,t_string_memory_ptr)returns())dyn_storage', { label: 'function (string,string)[]', + members: undefined, + }, + ], + [ + 't_function_internal_nonpayable(t_string_memory_ptr,t_string_memory_ptr)returns()', + { + label: 'function (string,string)', + members: undefined, }, ], [ 't_array(t_function_internal_nonpayable(t_uint256)returns(t_address))10_storage', { label: 'function (uint256) returns (address)[10]', + members: undefined, + }, + ], + [ + 't_function_internal_nonpayable(t_uint256)returns(t_address)', + { + label: 'function (uint256) returns (address)', + members: undefined, }, ], [ 't_mapping(t_uint256,t_function_internal_nonpayable(t_bool)returns())', { label: 'mapping(uint256 => function (bool))', + members: undefined, + }, + ], + [ + 't_function_internal_nonpayable(t_bool)returns()', + { + label: 'function (bool)', + members: undefined, }, ], [ 't_contract(Storage2)', { label: 'contract Storage2', + members: undefined, }, ], [ 't_array(t_contract(Storage2))dyn_storage', { label: 'contract Storage2[]', + members: undefined, }, ], [ 't_array(t_contract(Storage2))10_storage', { label: 'contract Storage2[10]', + members: undefined, }, ], [ 't_mapping(t_uint256,t_contract(Storage2))', { label: 'mapping(uint256 => contract Storage2)', + members: undefined, }, ], [ 't_mapping(t_uint256,t_array(t_contract(Storage2))dyn_storage)', { label: 'mapping(uint256 => contract Storage2[])', + members: undefined, }, ], [ 't_mapping(t_uint256,t_array(t_contract(Storage2))10_storage)', { label: 'mapping(uint256 => contract Storage2[10])', + members: undefined, }, ], [ 't_struct(MyStruct)storage', { label: 'struct Storage2.MyStruct', + members: [ + { + label: 'struct_uint256', + type: 't_uint256', + }, + { + label: 'struct_string', + type: 't_string_storage', + }, + { + label: 'struct_address', + type: 't_address', + }, + ], }, ], [ 't_array(t_struct(MyStruct)storage)dyn_storage', { label: 'struct Storage2.MyStruct[]', + members: undefined, }, ], [ 't_array(t_struct(MyStruct)storage)10_storage', { label: 'struct Storage2.MyStruct[10]', + members: undefined, }, ], [ 't_mapping(t_uint256,t_struct(MyStruct)storage)', { label: 'mapping(uint256 => struct Storage2.MyStruct)', + members: undefined, }, ], [ 't_enum(MyEnum)', { label: 'enum Storage2.MyEnum', + members: [ + 'State1', + 'State2', + ], }, ], [ 't_array(t_enum(MyEnum))dyn_storage', { label: 'enum Storage2.MyEnum[]', + members: undefined, }, ], [ 't_array(t_enum(MyEnum))10_storage', { label: 'enum Storage2.MyEnum[10]', + members: undefined, }, ], [ 't_mapping(t_uint256,t_enum(MyEnum))', { label: 'mapping(uint256 => enum Storage2.MyEnum)', + members: undefined, }, ], [ 't_struct(MyComplexStruct)storage', { label: 'struct Storage2.MyComplexStruct', + members: [ + { + label: 'uint256_dynarray', + type: 't_array(t_uint256)dyn_storage', + }, + { + label: 'mapping_enums', + type: 't_mapping(t_string_memory_ptr,t_enum(MyEnum))', + }, + { + label: 'other_struct', + type: 't_struct(MyStruct)storage', + }, + ], + }, + ], + [ + 't_mapping(t_string_memory_ptr,t_enum(MyEnum))', + { + label: 'mapping(string => enum Storage2.MyEnum)', + members: undefined, }, ], ], diff --git a/packages/core/src/storage.test.ts.snap b/packages/core/src/storage.test.ts.snap index aaa611c71..c4a6d4323 100644 Binary files a/packages/core/src/storage.test.ts.snap and b/packages/core/src/storage.test.ts.snap differ diff --git a/packages/core/src/storage.ts b/packages/core/src/storage.ts deleted file mode 100644 index 702e8adf4..000000000 --- a/packages/core/src/storage.ts +++ /dev/null @@ -1,210 +0,0 @@ -import assert from 'assert'; -import chalk from 'chalk'; -import { isNodeType } from 'solidity-ast/utils'; -import { ContractDefinition } from 'solidity-ast'; - -import { SrcDecoder } from './src-decoder'; -import { levenshtein, Operation } from './levenshtein'; -import { UpgradesError, ErrorDescriptions } from './error'; - -export interface StorageItem { - contract: string; - label: string; - type: string; - src: string; -} - -export interface StorageLayout { - storage: StorageItem[]; - types: Record; -} - -export interface TypeItem { - label: string; - members?: TypeItemMembers; -} - -export type TypeItemMembers = StructMember[] | EnumMember[]; - -export interface StructMember { - label: string; - type: string; -} - -type EnumMember = string; - -export function extractStorageLayout(contractDef: ContractDefinition, decodeSrc: SrcDecoder): StorageLayout { - const layout: StorageLayout = { storage: [], types: {} }; - - for (const varDecl of contractDef.nodes) { - if (isNodeType('VariableDeclaration', varDecl)) { - if (!varDecl.constant && varDecl.mutability !== 'immutable') { - const { typeIdentifier, typeString } = varDecl.typeDescriptions; - assert(typeof typeIdentifier === 'string'); - assert(typeof typeString === 'string'); - const type = decodeTypeIdentifier(typeIdentifier); - layout.storage.push({ - contract: contractDef.name, - label: varDecl.name, - type, - src: decodeSrc(varDecl), - }); - layout.types[type] = { - label: typeString, - }; - } - } - } - - return layout; -} - -export function assertStorageUpgradeSafe( - original: StorageLayout, - updated: StorageLayout, - unsafeAllowCustomTypes = false, -): void { - let errors = getStorageUpgradeErrors(original, updated); - - if (unsafeAllowCustomTypes) { - // Types with the same name are assumed compatible - errors = errors.filter(error => !isTypechange(error) || typechangePreservesNames(error)); - } - - if (errors.length > 0) { - throw new StorageUpgradeErrors(errors); - } -} - -function isTypechange(error: StorageOperation): error is StorageOperation & { kind: 'typechange' } { - return error.kind === 'typechange'; -} - -function typechangePreservesNames(error: StorageOperation & { kind: 'typechange' }): boolean { - assert(error.updated !== undefined); - assert(error.original !== undefined); - return stabilizeTypeIdentifier(error.original.type) !== stabilizeTypeIdentifier(error.updated.type); -} - -class StorageUpgradeErrors extends UpgradesError { - constructor(readonly errors: StorageOperation[]) { - super(`New storage layout is incompatible due to the following changes`, () => { - return errors.map(describeError).join('\n\n'); - }); - } -} - -function label(variable?: { label: string }): string { - return variable?.label ? '`' + variable.label + '`' : ''; -} - -const errorInfo: ErrorDescriptions = { - typechange: { - msg: o => `Type of variable ${label(o.updated)} was changed`, - }, - rename: { - msg: o => `Variable ${label(o.original)} was renamed`, - }, - replace: { - msg: o => `Variable ${label(o.original)} was replaced with ${label(o.updated)}`, - }, - insert: { - msg: o => `Inserted variable ${label(o.updated)}`, - hint: 'Only insert variables at the end of the most derived contract', - }, - delete: { - msg: o => `Deleted variable ${label(o.original)}`, - hint: 'Keep the variable even if unused', - }, - append: { - // this would not be shown to the user but TypeScript needs append here - msg: () => 'Appended a variable but it is not an error', - }, -}; - -export function describeError(e: StorageOperation): string { - const info = errorInfo[e.kind]; - const src = e.updated?.src ?? e.original?.contract ?? 'unknown'; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const log = [chalk.bold(src) + ': ' + info.msg(e as any)]; - if (info.hint) { - log.push(info.hint); - } - if (info.link) { - log.push(chalk.dim(info.link)); - } - return log.join('\n '); -} - -type StorageOperation = Operation; - -export function getStorageUpgradeErrors(original: StorageLayout, updated: StorageLayout): StorageOperation[] { - function matchStorageItem(o: StorageItem, u: StorageItem) { - const nameMatches = o.label === u.label; - - // TODO: type matching should compare struct members, etc. - const typeMatches = original.types[o.type].label === updated.types[u.type].label; - - if (typeMatches && nameMatches) { - return 'equal'; - } else if (typeMatches) { - return 'rename'; - } else if (nameMatches) { - return 'typechange'; - } else { - return 'replace'; - } - } - - const ops = levenshtein(original.storage, updated.storage, matchStorageItem); - return ops.filter(o => o.kind !== 'append'); -} - -// Type Identifiers in the AST are for some reason encoded so that they don't -// contain parentheses or commas, which have been substituted as follows: -// ( -> $_ -// ) -> _$ -// , -> _$_ -// This is particularly hard to decode because it is not a prefix-free code. -// Thus, the following regex has to perform a lookahead to make sure it gets -// the substitution right. -export function decodeTypeIdentifier(typeIdentifier: string): string { - return typeIdentifier.replace(/(\$_|_\$_|_\$)(?=(\$_|_\$_|_\$)*([^_$]|$))/g, m => { - switch (m) { - case '$_': - return '('; - case '_$': - return ')'; - case '_$_': - return ','; - default: - throw new Error('Unreachable'); - } - }); -} - -// Type Identifiers contain AST id numbers, which makes them sensitive to -// unrelated changes in the source code. This function stabilizes a type -// identifier by removing all AST ids. -export function stabilizeTypeIdentifier(typeIdentifier: string): string { - let decoded = decodeTypeIdentifier(typeIdentifier); - const re = /(t_struct|t_enum|t_contract)\(/g; - let match; - while ((match = re.exec(decoded))) { - let i; - let d = 1; - for (i = match.index + match[0].length; d !== 0; i++) { - assert(i < decoded.length, 'index out of bounds'); - const c = decoded[i]; - if (c === '(') { - d += 1; - } else if (c === ')') { - d -= 1; - } - } - const re2 = /\d+_?/y; - re2.lastIndex = i; - decoded = decoded.replace(re2, ''); - } - return decoded; -} diff --git a/packages/core/src/storage/compare.ts b/packages/core/src/storage/compare.ts new file mode 100644 index 000000000..c4dcf57d7 --- /dev/null +++ b/packages/core/src/storage/compare.ts @@ -0,0 +1,243 @@ +import { levenshtein, Operation } from '../levenshtein'; +import { ParsedTypeDetailed, StorageItem as _StorageItem } from './layout'; +import { UpgradesError } from '../error'; +import { StructMember as _StructMember, isEnumMembers, isStructMembers } from './layout'; +import { LayoutCompatibilityReport } from './report'; +import { assert } from '../utils/assert'; + +export type StorageItem = _StorageItem; +type StructMember = _StructMember; + +export type StorageField = StorageItem | StructMember; +export type StorageOperation = Operation>; + +export type EnumOperation = Operation; + +type StorageFieldChange = ( + | { kind: 'replace' | 'rename' } + | { kind: 'typechange'; change: TypeChange } +) & { + original: F; + updated: F; +}; + +export type TypeChange = ( + | { + kind: + | 'obvious mismatch' + | 'unknown' + | 'array grow' + | 'array shrink' + | 'array dynamic' + | 'enum resize' + | 'missing members' + | 'mapping key'; + } + | { + kind: 'mapping value' | 'array value'; + inner: TypeChange; + } + | { + kind: 'enum members'; + ops: EnumOperation[]; + } + | { + kind: 'struct members'; + ops: StorageOperation[]; + allowAppend: boolean; + } +) & { + original: ParsedTypeDetailed; + updated: ParsedTypeDetailed; +}; + +export class StorageLayoutComparator { + hasAllowedUncheckedCustomTypes = false; + + // Holds a stack of type comparisons to detect recursion + stack = new Set(); + cache = new Map(); + + constructor(readonly unsafeAllowCustomTypes = false) {} + + compareLayouts(original: StorageItem[], updated: StorageItem[]): LayoutCompatibilityReport { + return new LayoutCompatibilityReport(this.layoutLevenshtein(original, updated, { allowAppend: true })); + } + + private layoutLevenshtein( + original: F[], + updated: F[], + { allowAppend }: { allowAppend: boolean }, + ): StorageOperation[] { + const ops = levenshtein(original, updated, (a, b) => this.getFieldChange(a, b)); + + if (allowAppend) { + return ops.filter(o => o.kind !== 'append'); + } else { + return ops; + } + } + + getFieldChange(original: F, updated: F): StorageFieldChange | undefined { + const nameMatches = original.label === updated.label; + const typeChange = this.getTypeChange(original.type, updated.type, { allowAppend: false }); + + if (typeChange && !nameMatches) { + return { kind: 'replace', original, updated }; + } else if (!nameMatches) { + return { kind: 'rename', original, updated }; + } else if (typeChange) { + return { kind: 'typechange', change: typeChange, original, updated }; + } + } + + getTypeChange( + original: ParsedTypeDetailed, + updated: ParsedTypeDetailed, + { allowAppend }: { allowAppend: boolean }, + ): TypeChange | undefined { + const key = JSON.stringify({ original: original.id, updated: updated.id, allowAppend }); + + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.stack.has(key)) { + throw new UpgradesError(`Recursive types are not supported`, () => `Recursion found in ${updated.item.label}\n`); + } + + try { + this.stack.add(key); + const result = this.uncachedGetTypeChange(original, updated, { allowAppend }); + this.cache.set(key, result); + return result; + } finally { + this.stack.delete(key); + } + } + + private uncachedGetTypeChange( + original: ParsedTypeDetailed, + updated: ParsedTypeDetailed, + { allowAppend }: { allowAppend: boolean }, + ): TypeChange | undefined { + if (original.head !== updated.head) { + return { kind: 'obvious mismatch', original, updated }; + } + + if (original.args === undefined || updated.args === undefined) { + // both should be undefined at the same time + assert(original.args === updated.args); + return undefined; + } + + switch (original.head) { + case 't_contract': + // no storage layout errors can be introduced here since it is just an address + return undefined; + + case 't_struct': { + const originalMembers = original.item.members; + const updatedMembers = updated.item.members; + if (originalMembers === undefined || updatedMembers === undefined) { + if (this.unsafeAllowCustomTypes) { + this.hasAllowedUncheckedCustomTypes = true; + return undefined; + } else { + return { kind: 'missing members', original, updated }; + } + } + assert(isStructMembers(originalMembers) && isStructMembers(updatedMembers)); + const ops = this.layoutLevenshtein(originalMembers, updatedMembers, { allowAppend }); + if (ops.length > 0) { + return { kind: 'struct members', ops, original, updated, allowAppend }; + } else { + return undefined; + } + } + + case 't_enum': { + const originalMembers = original.item.members; + const updatedMembers = updated.item.members; + if (originalMembers === undefined || updatedMembers === undefined) { + if (this.unsafeAllowCustomTypes) { + this.hasAllowedUncheckedCustomTypes = true; + return undefined; + } else { + return { kind: 'missing members', original, updated }; + } + } + assert(isEnumMembers(originalMembers) && isEnumMembers(updatedMembers)); + if (enumSize(originalMembers.length) !== enumSize(updatedMembers.length)) { + return { kind: 'enum resize', original, updated }; + } else { + const ops = levenshtein(originalMembers, updatedMembers, (a, b) => + a === b ? undefined : { kind: 'replace' as const, original: a, updated: b }, + ).filter(o => o.kind !== 'append'); + if (ops.length > 0) { + return { kind: 'enum members', ops, original, updated }; + } else { + return undefined; + } + } + } + + case 't_mapping': { + const [originalKey, originalValue] = original.args; + const [updatedKey, updatedValue] = updated.args; + // network files migrated from the OZ CLI have an unknown key type + // we allow it to match with any other key type, carrying over the semantics of OZ CLI + const keyMatches = originalKey.head === 'unknown' || originalKey.head === updatedKey.head; + // validate an invariant we assume from solidity: key types are always simple value types + assert(originalKey.args === undefined && updatedKey.args === undefined); + // mapping value types are allowed to grow + if (!keyMatches) { + return { kind: 'mapping key', original, updated }; + } else { + const inner = this.getTypeChange(originalValue, updatedValue, { allowAppend: true }); + if (inner) { + return { kind: 'mapping value', inner, original, updated }; + } else { + return undefined; + } + } + } + + case 't_array': { + const originalLength = original.tail?.match(/^(\d+|dyn)/)?.[0]; + const updatedLength = updated.tail?.match(/^(\d+|dyn)/)?.[0]; + assert(originalLength !== undefined && updatedLength !== undefined); + + if (originalLength === 'dyn' || updatedLength === 'dyn') { + if (originalLength !== updatedLength) { + return { kind: 'array dynamic', original, updated }; + } + } + + const originalLengthInt = parseInt(originalLength, 10); + const updatedLengthInt = parseInt(updatedLength, 10); + + if (updatedLengthInt < originalLengthInt) { + return { kind: 'array shrink', original, updated }; + } else if (!allowAppend && updatedLengthInt > originalLengthInt) { + return { kind: 'array grow', original, updated }; + } + + const inner = this.getTypeChange(original.args[0], updated.args[0], { allowAppend: false }); + + if (inner) { + return { kind: 'array value', inner, original, updated }; + } else { + return undefined; + } + } + + default: + return { kind: 'unknown', original, updated }; + } + } +} + +function enumSize(memberCount: number): number { + return Math.ceil(Math.log2(Math.max(2, memberCount)) / 8); +} diff --git a/packages/core/src/storage/compat.ts b/packages/core/src/storage/compat.ts new file mode 100644 index 000000000..d45908540 --- /dev/null +++ b/packages/core/src/storage/compat.ts @@ -0,0 +1,3 @@ +export { StorageItem, StorageLayout, TypeItem, TypeItemMembers, StructMember } from './layout'; +export { extractStorageLayout } from './extract'; +export { decodeTypeIdentifier, stabilizeTypeIdentifier } from '../utils/type-id'; diff --git a/packages/core/src/storage/extract.ts b/packages/core/src/storage/extract.ts new file mode 100644 index 000000000..3de6d0787 --- /dev/null +++ b/packages/core/src/storage/extract.ts @@ -0,0 +1,110 @@ +import assert from 'assert'; +import { ContractDefinition, StructDefinition, EnumDefinition, TypeDescriptions } from 'solidity-ast'; +import { isNodeType, findAll } from 'solidity-ast/utils'; + +import { StorageLayout, TypeItem } from './layout'; +import { normalizeTypeIdentifier } from '../utils/type-id'; +import { SrcDecoder } from '../src-decoder'; +import { ASTDereferencer } from '../ast-dereferencer'; + +const currentLayoutVersion = '1.1'; + +export function isCurrentLayoutVersion(layout: StorageLayout): boolean { + return layout?.layoutVersion === currentLayoutVersion; +} + +export function extractStorageLayout( + contractDef: ContractDefinition, + decodeSrc: SrcDecoder, + deref: ASTDereferencer, +): StorageLayout { + const layout: StorageLayout = { storage: [], types: {}, layoutVersion: currentLayoutVersion }; + + // Note: A UserDefinedTypeName can also refer to a ContractDefinition but we won't care about those. + const derefUserDefinedType = deref(['StructDefinition', 'EnumDefinition']); + + for (const varDecl of contractDef.nodes) { + if (isNodeType('VariableDeclaration', varDecl)) { + if (!varDecl.constant && varDecl.mutability !== 'immutable') { + const type = normalizeTypeIdentifier(typeDescriptions(varDecl).typeIdentifier); + + layout.storage.push({ + contract: contractDef.name, + label: varDecl.name, + type, + src: decodeSrc(varDecl), + }); + + assert(varDecl.typeName != null); + + // We will recursively look for all types involved in this variable declaration in order to store their type + // information. We iterate over a Map that is indexed by typeIdentifier to ensure we visit each type only once. + // Note that there can be recursive types. + const typeNames = new Map( + [...findTypeNames(varDecl.typeName)].map(n => [typeDescriptions(n).typeIdentifier, n]), + ); + + for (const typeName of typeNames.values()) { + const { typeIdentifier, typeString: label } = typeDescriptions(typeName); + + const type = normalizeTypeIdentifier(typeIdentifier); + + if (type in layout.types) { + continue; + } + + let members; + + if ('referencedDeclaration' in typeName && !/^t_contract\b/.test(type)) { + const typeDef = derefUserDefinedType(typeName.referencedDeclaration); + members = getTypeMembers(typeDef); + // Recursively look for the types referenced in this definition and add them to the queue. + for (const typeName of findTypeNames(typeDef)) { + const { typeIdentifier } = typeDescriptions(typeName); + if (!typeNames.has(typeIdentifier)) { + typeNames.set(typeIdentifier, typeName); + } + } + } + + layout.types[type] = { label, members }; + } + } + } + } + + return layout; +} + +const findTypeNames = findAll([ + 'ArrayTypeName', + 'ElementaryTypeName', + 'FunctionTypeName', + 'Mapping', + 'UserDefinedTypeName', +]); + +interface RequiredTypeDescriptions { + typeIdentifier: string; + typeString: string; +} + +function typeDescriptions(x: { typeDescriptions: TypeDescriptions }): RequiredTypeDescriptions { + assert(typeof x.typeDescriptions.typeIdentifier === 'string'); + assert(typeof x.typeDescriptions.typeString === 'string'); + return x.typeDescriptions as RequiredTypeDescriptions; +} + +function getTypeMembers(typeDef: StructDefinition | EnumDefinition): TypeItem['members'] { + if (typeDef.nodeType === 'StructDefinition') { + return typeDef.members.map(m => { + assert(typeof m.typeDescriptions.typeIdentifier === 'string'); + return { + label: m.name, + type: normalizeTypeIdentifier(m.typeDescriptions.typeIdentifier), + }; + }); + } else { + return typeDef.members.map(m => m.name); + } +} diff --git a/packages/core/src/storage/index.ts b/packages/core/src/storage/index.ts new file mode 100644 index 000000000..9b641419b --- /dev/null +++ b/packages/core/src/storage/index.ts @@ -0,0 +1,64 @@ +import chalk from 'chalk'; + +export * from './compat'; + +import { UpgradesError } from '../error'; +import { StorageLayout, getDetailedLayout } from './layout'; +import { StorageOperation, StorageItem, StorageLayoutComparator } from './compare'; +import { LayoutCompatibilityReport } from './report'; +import { ValidationOptions, isSilencingWarnings } from '../validate/overrides'; + +export function assertStorageUpgradeSafe( + original: StorageLayout, + updated: StorageLayout, + unsafeAllowCustomTypes = false, +): void { + const originalDetailed = getDetailedLayout(original); + const updatedDetailed = getDetailedLayout(updated); + const comparator = new StorageLayoutComparator(unsafeAllowCustomTypes); + const report = comparator.compareLayouts(originalDetailed, updatedDetailed); + + if (!isSilencingWarnings()) { + if (comparator.hasAllowedUncheckedCustomTypes) { + console.error( + chalk.keyword('orange').bold('Warning:') + + ` Potentially unsafe deployment\n\n` + + ` You are using \`unsafeAllowCustomTypes\` to force approve structs or enums with missing data.\n` + + ` Make sure you have manually checked the storage layout for incompatibilities.\n`, + ); + } else if (unsafeAllowCustomTypes) { + console.error( + chalk.keyword('yellow').bold('Note:') + + ` \`unsafeAllowCustomTypes\` is no longer necessary. Structs are enums are automatically checked.\n`, + ); + } + } + + if (!report.pass) { + throw new StorageUpgradeErrors(report); + } +} + +class StorageUpgradeErrors extends UpgradesError { + constructor(readonly report: LayoutCompatibilityReport) { + super(`New storage layout is incompatible`, () => report.explain()); + } +} + +// Kept for backwards compatibility and to avoid rewriting tests +export function getStorageUpgradeErrors( + original: StorageLayout, + updated: StorageLayout, + opts: ValidationOptions = {}, +): StorageOperation[] { + try { + assertStorageUpgradeSafe(original, updated, opts.unsafeAllowCustomTypes); + } catch (e) { + if (e instanceof StorageUpgradeErrors) { + return e.report.ops; + } else { + throw e; + } + } + return []; +} diff --git a/packages/core/src/storage/layout.ts b/packages/core/src/storage/layout.ts new file mode 100644 index 000000000..2beac9ad4 --- /dev/null +++ b/packages/core/src/storage/layout.ts @@ -0,0 +1,88 @@ +import { parseTypeId, ParsedTypeId } from '../utils/parse-type-id'; + +// The interfaces below are generic in the way types are represented (through the parameter `Type`). When stored on +// disk, the type is represented by a string: the type id. When loaded onto memory to run the storage layout comparisons +// found in this module, the type id is replaced by its parsed structure together with the corresponding TypeItem, e.g. +// the struct members if it is a struct type. + +export interface StorageLayout { + layoutVersion?: string; + storage: StorageItem[]; + types: Record; +} + +export interface StorageItem { + contract: string; + label: string; + type: Type; + src: string; +} + +export interface TypeItem { + label: string; + members?: TypeItemMembers; +} + +export type TypeItemMembers = StructMember[] | EnumMember[]; + +export interface StructMember { + label: string; + type: Type; +} + +export type EnumMember = string; + +export interface ParsedTypeDetailed extends ParsedTypeId { + item: TypeItem; + args?: ParsedTypeDetailed[]; + rets?: ParsedTypeDetailed[]; +} + +type Replace = Omit & Record; + +export function getDetailedLayout(layout: StorageLayout): StorageItem[] { + const cache: Record = {}; + + return layout.storage.map(parseWithDetails); + + function parseWithDetails(item: I): Replace { + const parsed = parseTypeId(item.type); + const withDetails = addDetailsToParsedType(parsed); + return { ...item, type: withDetails }; + } + + function addDetailsToParsedType(parsed: ParsedTypeId): ParsedTypeDetailed { + if (parsed.id in cache) { + return cache[parsed.id]; + } + + const item = layout.types[parsed.id]; + const detailed: ParsedTypeDetailed = { + ...parsed, + args: undefined, + rets: undefined, + item: { + ...item, + members: undefined, + }, + }; + + // store in cache before recursion below + cache[parsed.id] = detailed; + + detailed.args = parsed.args?.map(addDetailsToParsedType); + detailed.rets = parsed.args?.map(addDetailsToParsedType); + detailed.item.members = + item?.members && (isStructMembers(item?.members) ? item.members.map(parseWithDetails) : item?.members); + + return detailed; + } +} + +export function isEnumMembers(members: TypeItemMembers): members is EnumMember[] { + return members.length === 0 || typeof members[0] === 'string'; +} + +export function isStructMembers(members: TypeItemMembers): members is StructMember[] { + return members.length === 0 || typeof members[0] === 'object'; +} diff --git a/packages/core/src/storage/report.test.ts b/packages/core/src/storage/report.test.ts new file mode 100644 index 000000000..0b53489d2 --- /dev/null +++ b/packages/core/src/storage/report.test.ts @@ -0,0 +1,88 @@ +import _test, { TestInterface } from 'ava'; +import { ContractDefinition } from 'solidity-ast'; +import { findAll } from 'solidity-ast/utils'; +import { artifacts } from 'hardhat'; + +import { SolcOutput } from '../solc-api'; +import { astDereferencer } from '../ast-dereferencer'; +import { extractStorageLayout } from '../storage/extract'; +import { StorageLayoutComparator } from './compare'; +import { StorageLayout, getDetailedLayout } from './layout'; + +interface Context { + extractStorageLayout: (contract: string) => ReturnType; +} + +const test = _test as TestInterface; + +const dummyDecodeSrc = () => 'file.sol:1'; + +test.before(async t => { + const buildInfo = await artifacts.getBuildInfo('contracts/test/Storage.sol:Storage1'); + if (buildInfo === undefined) { + throw new Error('Build info not found'); + } + const solcOutput: SolcOutput = buildInfo.output; + const contracts: Record = {}; + for (const def of findAll('ContractDefinition', solcOutput.sources['contracts/test/Storage.sol'].ast)) { + contracts[def.name] = def; + } + const deref = astDereferencer(solcOutput); + t.context.extractStorageLayout = name => extractStorageLayout(contracts[name], dummyDecodeSrc, deref); +}); + +function getReport(original: StorageLayout, updated: StorageLayout) { + const originalDetailed = getDetailedLayout(original); + const updatedDetailed = getDetailedLayout(updated); + const comparator = new StorageLayoutComparator(); + return comparator.compareLayouts(originalDetailed, updatedDetailed); +} + +test('structs', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Struct_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Struct_V2_Bad'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); + +test('enums', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Enum_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Enum_V2_Bad'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); + +test('obvious mismatch', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_ObviousMismatch_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_ObviousMismatch_V2_Bad'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); + +test('array', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Array_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Array_V2_Bad'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); + +test('mapping', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Mapping_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Mapping_V2_Bad'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); + +test('rename', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Rename_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Rename_V2'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); + +test('replace', t => { + const v1 = t.context.extractStorageLayout('StorageUpgrade_Replace_V1'); + const v2 = t.context.extractStorageLayout('StorageUpgrade_Replace_V2'); + const report = getReport(v1, v2); + t.snapshot(report.explain()); +}); diff --git a/packages/core/src/storage/report.test.ts.md b/packages/core/src/storage/report.test.ts.md new file mode 100644 index 000000000..bb9e802f0 --- /dev/null +++ b/packages/core/src/storage/report.test.ts.md @@ -0,0 +1,135 @@ +# Snapshot report for `src/storage/report.test.ts` + +The actual snapshot is saved in `report.test.ts.snap`. + +Generated by [AVA](https://avajs.dev). + +## structs + +> Snapshot 1 + + `file.sol:1: Upgraded `data1` to an incompatible type␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.Struct1 to struct StorageUpgrade_Struct_V2_Bad.Struct2Minus␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Minus␊ + - Deleted `s1`␊ + Keep the variable even if unused␊ + ␊ + file.sol:1: Upgraded `data2` to an incompatible type␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.Struct1 to struct StorageUpgrade_Struct_V2_Bad.Struct2Plus␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Plus␊ + - Added `z`␊ + ␊ + file.sol:1: Upgraded `m` to an incompatible type␊ + - In mapping(uint256 => struct StorageUpgrade_Struct_V2_Bad.Struct2Minus)␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.Struct1 to struct StorageUpgrade_Struct_V2_Bad.Struct2Minus␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Minus␊ + - Deleted `s1`␊ + Keep the variable even if unused␊ + ␊ + file.sol:1: Upgraded `a1` to an incompatible type␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Minus[10]␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.Struct1 to struct StorageUpgrade_Struct_V2_Bad.Struct2Minus␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Minus␊ + - Deleted `s1`␊ + Keep the variable even if unused␊ + ␊ + file.sol:1: Upgraded `a2` to an incompatible type␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Plus[10]␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.Struct1 to struct StorageUpgrade_Struct_V2_Bad.Struct2Plus␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Plus␊ + - Added `z`␊ + ␊ + file.sol:1: Upgraded `data3` to an incompatible type␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.Struct1 to struct StorageUpgrade_Struct_V2_Bad.Struct2Changed␊ + - In struct StorageUpgrade_Struct_V2_Bad.Struct2Changed␊ + - Upgraded `x` to an incompatible type␊ + - Bad upgrade from uint256 to string␊ + - Upgraded `s1` to an incompatible type␊ + - Bad upgrade from string to uint256␊ + - Upgraded `inner` to an incompatible type␊ + - Bad upgrade from struct StorageUpgrade_Struct_V1.StructInner to struct StorageUpgrade_Struct_V2_Bad.StructInnerPlus␊ + - In struct StorageUpgrade_Struct_V2_Bad.StructInnerPlus␊ + - Added `z`` + +## enums + +> Snapshot 1 + + `file.sol:1: Upgraded `data1` to an incompatible type␊ + - Bad upgrade from enum StorageUpgrade_Enum_V1.Enum1 to enum StorageUpgrade_Enum_V2_Bad.Enum2Delete␊ + - In enum StorageUpgrade_Enum_V2_Bad.Enum2Delete␊ + - Deleted `B`␊ + ␊ + file.sol:1: Upgraded `data2` to an incompatible type␊ + - Bad upgrade from enum StorageUpgrade_Enum_V1.Enum1 to enum StorageUpgrade_Enum_V2_Bad.Enum2Replace␊ + - In enum StorageUpgrade_Enum_V2_Bad.Enum2Replace␊ + - Replaced `B` with `X`␊ + ␊ + file.sol:1: Upgraded `data3` to an incompatible type␊ + - Bad upgrade from enum StorageUpgrade_Enum_V1.Enum1 to enum StorageUpgrade_Enum_V2_Bad.Enum2Insert␊ + - In enum StorageUpgrade_Enum_V2_Bad.Enum2Insert␊ + - Inserted `X`␊ + ␊ + file.sol:1: Upgraded `data4` to an incompatible type␊ + - Bad upgrade from enum StorageUpgrade_Enum_V1.Enum1 to enum StorageUpgrade_Enum_V2_Bad.Enum2TooLarge␊ + Different representation sizes` + +## obvious mismatch + +> Snapshot 1 + + `file.sol:1: Upgraded `x1` to an incompatible type␊ + - Bad upgrade from uint256 to string␊ + ␊ + file.sol:1: Upgraded `s1` to an incompatible type␊ + - Bad upgrade from struct StorageUpgrade_ObviousMismatch_V1.S to uint256␊ + ␊ + file.sol:1: Upgraded `a1` to an incompatible type␊ + - Bad upgrade from uint256[] to mapping(uint256 => uint256)` + +## array + +> Snapshot 1 + + `file.sol:1: Upgraded `x1` to an incompatible type␊ + - Bad array resize from 20 to 15␊ + Size cannot decrease␊ + ␊ + file.sol:1: Upgraded `x2` to an incompatible type␊ + - Bad array resize from 20 to 25␊ + Size cannot increase here␊ + ␊ + file.sol:1: Upgraded `x3` to an incompatible type␊ + - Bad upgrade from fixed to dynamic size array␊ + ␊ + file.sol:1: Upgraded `x4` to an incompatible type␊ + - Bad upgrade from fixed to dynamic size array␊ + ␊ + file.sol:1: Upgraded `m` to an incompatible type␊ + - In mapping(uint256 => uint256[15])␊ + - Bad array resize from 20 to 15␊ + Size cannot decrease` + +## mapping + +> Snapshot 1 + + `file.sol:1: Upgraded `m1` to an incompatible type␊ + - In mapping(uint256 => uint256)␊ + - Bad upgrade from uint256[10] to uint256␊ + ␊ + file.sol:1: Upgraded `m2` to an incompatible type␊ + - In key of mapping(bool => uint256)␊ + - Bad upgrade from undefined to bool` + +## rename + +> Snapshot 1 + + 'file.sol:1: Renamed `x2` to `renamed`' + +## replace + +> Snapshot 1 + + 'file.sol:1: Replaced `x2` with `renamed` of incompatible type' diff --git a/packages/core/src/storage/report.test.ts.snap b/packages/core/src/storage/report.test.ts.snap new file mode 100644 index 000000000..26cc4e206 Binary files /dev/null and b/packages/core/src/storage/report.test.ts.snap differ diff --git a/packages/core/src/storage/report.ts b/packages/core/src/storage/report.ts new file mode 100644 index 000000000..ec333e366 --- /dev/null +++ b/packages/core/src/storage/report.ts @@ -0,0 +1,219 @@ +import chalk from 'chalk'; + +import type { BasicOperation } from '../levenshtein'; +import type { ParsedTypeDetailed } from './layout'; +import type { StorageOperation, StorageItem, StorageField, TypeChange, EnumOperation } from './compare'; +import { itemize, itemizeWith } from '../utils/itemize'; +import { indent } from '../utils/indent'; +import { assert } from '../utils/assert'; + +export class LayoutCompatibilityReport { + readonly pass: boolean; + + constructor(readonly ops: StorageOperation[]) { + this.pass = ops.length === 0; + } + + explain(): string { + const res = []; + + for (const op of this.ops) { + const src = 'updated' in op ? op.updated.src : op.original.contract; + res.push( + chalk.bold(src) + ':' + indent(explainStorageOperation(op, { kind: 'layout', allowAppend: true }), 2, 1), + ); + } + + return res.join('\n\n'); + } +} + +interface StorageOperationContext { + kind: 'struct' | 'layout'; + allowAppend: boolean; +} + +function explainStorageOperation(op: StorageOperation, ctx: StorageOperationContext): string { + switch (op.kind) { + case 'typechange': { + const basic = explainTypeChange(op.change); + const details = + ctx.kind === 'layout' // explain details for layout only + ? new Set( + getAllTypeChanges(op.change) + .map(explainTypeChangeDetails) + .filter((d?: string): d is string => d !== undefined), + ) + : []; + return `Upgraded ${label(op.updated)} to an incompatible type\n` + itemize(basic, ...details); + } + + case 'rename': + return `Renamed ${label(op.original)} to ${label(op.updated)}`; + + case 'replace': + return `Replaced ${label(op.original)} with ${label(op.updated)} of incompatible type`; + + default: { + const title = explainBasicOperation(op, t => t.label); + const hints = []; + + switch (op.kind) { + case 'insert': { + if (ctx.kind === 'struct') { + if (ctx.allowAppend) { + hints.push('New struct members should be placed after existing ones'); + } else { + hints.push('New struct members are not allowed here. Define a new struct'); + } + } else { + hints.push('New variables should be placed after all existing inherited variables'); + } + break; + } + + case 'delete': { + hints.push('Keep the variable even if unused'); + break; + } + } + + return title + '\n' + itemizeWith('>', ...hints); + } + } +} + +function explainTypeChange(ch: TypeChange): string { + switch (ch.kind) { + case 'obvious mismatch': + case 'struct members': + case 'enum members': + return `Bad upgrade ${describeTransition(ch.original, ch.updated)}`; + + case 'enum resize': + return `Bad upgrade ${describeTransition(ch.original, ch.updated)}\nDifferent representation sizes`; + + case 'mapping key': { + assert(ch.original.args && ch.updated.args); + const originalKey = ch.original.args[0]; + const updatedKey = ch.updated.args[0]; + return `In key of ${ch.updated.item.label}\n- Bad upgrade ${describeTransition(originalKey, updatedKey)}`; + } + + case 'mapping value': + case 'array value': + return `In ${ch.updated.item.label}\n` + itemize(explainTypeChange(ch.inner)); + + case 'array shrink': + case 'array grow': { + assert(ch.original.tail && ch.updated.tail); + const originalSize = parseInt(ch.original.tail, 10); + const updatedSize = parseInt(ch.updated.tail, 10); + const note = ch.kind === 'array shrink' ? 'Size cannot decrease' : 'Size cannot increase here'; + return `Bad array resize from ${originalSize} to ${updatedSize}\n${note}`; + } + + case 'array dynamic': { + assert(ch.original.tail && ch.updated.tail); + const [originalSize, updatedSize] = ch.original.tail === 'dyn' ? ['dynamic', 'fixed'] : ['fixed', 'dynamic']; + return `Bad upgrade from ${originalSize} to ${updatedSize} size array`; + } + + case 'missing members': { + const type = ch.updated.head.replace(/^t_/, ''); // t_struct, t_enum -> struct, enum + return `Insufficient data to compare ${type}s\nManually assess compatibility, then use option \`unsafeAllowCustomTypes: true\``; + } + + case 'unknown': + return `Unknown type ${ch.updated.item.label}`; + } +} + +function getAllTypeChanges(root: TypeChange): TypeChange[] { + const list = [root]; + + for (const ch of list) { + switch (ch.kind) { + case 'mapping value': + case 'array value': + list.push(ch.inner); + break; + + case 'struct members': { + for (const op of ch.ops) { + if (op.kind === 'typechange') { + list.push(op.change); + } + } + break; + } + + // We mention all other kinds explicitly to review any future new kinds + case 'obvious mismatch': + case 'enum members': + case 'enum resize': + case 'mapping key': + case 'array shrink': + case 'array grow': + case 'array dynamic': + case 'missing members': + case 'unknown': + break; + } + } + + return list; +} + +function explainTypeChangeDetails(ch: TypeChange): string | undefined { + switch (ch.kind) { + case 'struct members': { + const { allowAppend } = ch; + return ( + `In ${ch.updated.item.label}\n` + + itemize(...ch.ops.map(op => explainStorageOperation(op, { kind: 'struct', allowAppend }))) + ); + } + + case 'enum members': + return `In ${ch.updated.item.label}\n` + itemize(...ch.ops.map(explainEnumOperation)); + } +} + +function explainEnumOperation(op: EnumOperation): string { + switch (op.kind) { + case 'replace': + return `Replaced \`${op.original}\` with \`${op.updated}\``; + + default: + return explainBasicOperation(op, t => t); + } +} + +function explainBasicOperation(op: BasicOperation, getName: (t: T) => string): string { + switch (op.kind) { + case 'delete': + return `Deleted \`${getName(op.original)}\``; + + case 'insert': + return `Inserted \`${getName(op.updated)}\``; + + case 'append': + return `Added \`${getName(op.updated)}\``; + } +} + +function describeTransition(original: ParsedTypeDetailed, updated: ParsedTypeDetailed): string { + const originalLabel = original.item.label; + const updatedLabel = updated.item.label; + + if (originalLabel === updatedLabel) { + return `to ${updatedLabel}`; + } else { + return `from ${originalLabel} to ${updatedLabel}`; + } +} + +function label(variable: { label: string }): string { + return '`' + variable.label + '`'; +} diff --git a/packages/core/src/utils/assert.ts b/packages/core/src/utils/assert.ts new file mode 100644 index 000000000..0d010f7c2 --- /dev/null +++ b/packages/core/src/utils/assert.ts @@ -0,0 +1,5 @@ +export function assert(p: unknown): asserts p { + if (!p) { + throw new Error('An unexpected condition occurred. Please report this at https://zpl.in/upgrades/report'); + } +} diff --git a/packages/core/src/utils/curry.ts b/packages/core/src/utils/curry.ts new file mode 100644 index 000000000..648b876bc --- /dev/null +++ b/packages/core/src/utils/curry.ts @@ -0,0 +1,17 @@ +export interface Curried { + (a: A): (b: B) => T; + (a: A, b: B): T; +} + +export function curry2(fn: (a: A, b: B) => T): Curried { + function curried(a: A): (b: B) => T; + function curried(a: A, b: B): T; + function curried(a: A, ...b: [] | [B]): T | ((b: B) => T) { + if (b.length === 0) { + return b => fn(a, b); + } else { + return fn(a, ...b); + } + } + return curried; +} diff --git a/packages/core/src/utils/deep-array.test.ts b/packages/core/src/utils/deep-array.test.ts new file mode 100644 index 000000000..6995317f5 --- /dev/null +++ b/packages/core/src/utils/deep-array.test.ts @@ -0,0 +1,21 @@ +import test from 'ava'; + +import { deepEqual } from './deep-array'; + +test('depth 0', t => { + t.true(deepEqual('a', 'a')); + t.false(deepEqual('a', 'b')); + t.false(deepEqual('a', ['a'])); + t.false(deepEqual(['a'], 'a')); +}); + +test('depth 1', t => { + t.true(deepEqual(['a', 'b'], ['a', 'b'])); + t.false(deepEqual(['a'], ['a', 'b'])); + t.false(deepEqual(['a', 'b'], ['a', 'c'])); +}); + +test('depth 2', t => { + t.true(deepEqual([['a'], ['b']], [['a'], ['b']])); + t.false(deepEqual([['a'], ['b']], [['a'], ['c']])); +}); diff --git a/packages/core/src/utils/deep-array.ts b/packages/core/src/utils/deep-array.ts new file mode 100644 index 000000000..b5fe6b5de --- /dev/null +++ b/packages/core/src/utils/deep-array.ts @@ -0,0 +1,9 @@ +export type DeepArray = T | DeepArray[]; + +export function deepEqual(a: DeepArray, b: DeepArray): boolean { + if (Array.isArray(a) && Array.isArray(b)) { + return a.length === b.length && a.every((v, i) => deepEqual(v, b[i])); + } else { + return a === b; + } +} diff --git a/packages/core/src/utils/indent.ts b/packages/core/src/utils/indent.ts new file mode 100644 index 000000000..b25cb434a --- /dev/null +++ b/packages/core/src/utils/indent.ts @@ -0,0 +1,3 @@ +export function indent(text: string, amount: number, amount0 = amount): string { + return text.replace(/^/gm, (_, i) => ' '.repeat(i === 0 ? amount0 : amount)); +} diff --git a/packages/core/src/utils/itemize.ts b/packages/core/src/utils/itemize.ts new file mode 100644 index 000000000..5de11af4a --- /dev/null +++ b/packages/core/src/utils/itemize.ts @@ -0,0 +1,16 @@ +// itemize returns a markdown-like list of the items + +// itemize['a\nb', 'c'] = +// - a +// b +// - c + +import { indent } from './indent'; + +export function itemize(...items: string[]): string { + return itemizeWith('-', ...items); +} + +export function itemizeWith(bullet: string, ...items: string[]): string { + return items.map(item => bullet + indent(item, 2, 1)).join('\n'); +} diff --git a/packages/core/src/utils/parse-type-id.test.ts b/packages/core/src/utils/parse-type-id.test.ts new file mode 100644 index 000000000..b90bc13fd --- /dev/null +++ b/packages/core/src/utils/parse-type-id.test.ts @@ -0,0 +1,22 @@ +import test from 'ava'; + +import { parseTypeId } from './parse-type-id'; + +const fixtures = [ + 't_uint256', + 't_enum(MyEnum)', + 't_struct(MyComplexStruct)storage', + 't_array(t_uint256)3_storage', + 't_mapping(t_uint256,t_uint256)', + 't_mapping(unknown,t_uint256)', + 't_mapping(t_uint256,t_array(t_bool)dyn_storage)', + 't_mapping(t_uint256,t_mapping(t_string_memory_ptr,t_address))', + 't_function_internal_nonpayable(t_uint256)returns()', + 't_array(t_function_internal_nonpayable(t_uint256)returns(t_address))10_storage', +]; + +for (const f of fixtures) { + test(f, t => { + t.snapshot(parseTypeId(f)); + }); +} diff --git a/packages/core/src/utils/parse-type-id.test.ts.md b/packages/core/src/utils/parse-type-id.test.ts.md new file mode 100644 index 000000000..090418bd9 --- /dev/null +++ b/packages/core/src/utils/parse-type-id.test.ts.md @@ -0,0 +1,264 @@ +# Snapshot report for `src/utils/parse-type-id.test.ts` + +The actual snapshot is saved in `parse-type-id.test.ts.snap`. + +Generated by [AVA](https://avajs.dev). + +## t_array(t_function_internal_nonpayable(t_uint256)returns(t_address))10_storage + +> Snapshot 1 + + { + args: [ + { + args: [ + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + ], + head: 't_function_internal_nonpayable', + id: 't_function_internal_nonpayable(t_uint256)returns(t_address)', + rets: [ + { + args: undefined, + head: 't_address', + id: 't_address', + rets: undefined, + tail: undefined, + }, + ], + tail: undefined, + }, + ], + head: 't_array', + id: 't_array(t_function_internal_nonpayable(t_uint256)returns(t_address))10_storage', + rets: undefined, + tail: '10_storage', + } + +## t_array(t_uint256)3_storage + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + ], + head: 't_array', + id: 't_array(t_uint256)3_storage', + rets: undefined, + tail: '3_storage', + } + +## t_function_internal_nonpayable(t_uint256)returns() + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + ], + head: 't_function_internal_nonpayable', + id: 't_function_internal_nonpayable(t_uint256)returns()', + rets: [], + tail: undefined, + } + +## t_mapping(t_uint256,t_array(t_bool)dyn_storage) + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + { + args: [ + { + args: undefined, + head: 't_bool', + id: 't_bool', + rets: undefined, + tail: undefined, + }, + ], + head: 't_array', + id: 't_array(t_bool)dyn_storage', + rets: undefined, + tail: 'dyn_storage', + }, + ], + head: 't_mapping', + id: 't_mapping(t_uint256,t_array(t_bool)dyn_storage)', + rets: undefined, + tail: undefined, + } + +## t_mapping(t_uint256,t_mapping(t_string_memory_ptr,t_address)) + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + { + args: [ + { + args: undefined, + head: 't_string_memory_ptr', + id: 't_string_memory_ptr', + rets: undefined, + tail: undefined, + }, + { + args: undefined, + head: 't_address', + id: 't_address', + rets: undefined, + tail: undefined, + }, + ], + head: 't_mapping', + id: 't_mapping(t_string_memory_ptr,t_address)', + rets: undefined, + tail: undefined, + }, + ], + head: 't_mapping', + id: 't_mapping(t_uint256,t_mapping(t_string_memory_ptr,t_address))', + rets: undefined, + tail: undefined, + } + +## t_mapping(t_uint256,t_uint256) + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + ], + head: 't_mapping', + id: 't_mapping(t_uint256,t_uint256)', + rets: undefined, + tail: undefined, + } + +## t_uint256 + +> Snapshot 1 + + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + } + +## t_mapping(unknown,t_uint256) + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 'unknown', + id: 'unknown', + rets: undefined, + tail: undefined, + }, + { + args: undefined, + head: 't_uint256', + id: 't_uint256', + rets: undefined, + tail: undefined, + }, + ], + head: 't_mapping', + id: 't_mapping(unknown,t_uint256)', + rets: undefined, + tail: undefined, + } + +## t_enum(MyEnum) + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 'MyEnum', + id: 'MyEnum', + rets: undefined, + tail: undefined, + }, + ], + head: 't_enum', + id: 't_enum(MyEnum)', + rets: undefined, + tail: undefined, + } + +## t_struct(MyComplexStruct)storage + +> Snapshot 1 + + { + args: [ + { + args: undefined, + head: 'MyComplexStruct', + id: 'MyComplexStruct', + rets: undefined, + tail: undefined, + }, + ], + head: 't_struct', + id: 't_struct(MyComplexStruct)storage', + rets: undefined, + tail: 'storage', + } diff --git a/packages/core/src/utils/parse-type-id.test.ts.snap b/packages/core/src/utils/parse-type-id.test.ts.snap new file mode 100644 index 000000000..41adb0055 Binary files /dev/null and b/packages/core/src/utils/parse-type-id.test.ts.snap differ diff --git a/packages/core/src/utils/parse-type-id.ts b/packages/core/src/utils/parse-type-id.ts new file mode 100644 index 000000000..7600a64d4 --- /dev/null +++ b/packages/core/src/utils/parse-type-id.ts @@ -0,0 +1,104 @@ +import assert from 'assert'; + +// This module parses type identifiers. + +export interface ParsedTypeId { + id: string; + head: string; + args?: ParsedTypeId[]; + tail?: string; + rets?: ParsedTypeId[]; +} + +// The examples below illustrate the meaning of these properties. + +// 1) id = t_struct(MyStruct)storage +// └──────┘ └──────┘ └─────┘ +// head args[0] tail +// rets = undefined + +// 2) id = t_function_internal_nonpayable(t_uint256,t_uint256)returns(t_address) +// └────────────────────────────┘ └───────┘ └───────┘ └───────┘ +// head args[0] args[1] rets[0] +// tail = undefined + +// 3) id = t_uint256 +// └───────┘ +// head +// args = undefined +// tail = undefined +// rets = undefined + +export function parseTypeId(id: string): ParsedTypeId { + const matcher = new StatefulGlobalMatcher(id); + + function parseList(): ParsedTypeId[] { + const args = []; + + let i = matcher.index; + let delim; + + while ((delim = matcher.match(/[(),]/))) { + if (delim[0] === '(') { + let depth = 1; + while (depth > 0) { + const paren = matcher.match(/[()]/)?.[0]; + if (paren === '(') { + depth += 1; + } else if (paren === ')') { + depth -= 1; + } else { + throw new Error(`Malformed type id ${id}`); + } + } + } else { + const subtype = id.slice(i, delim.index); + if (subtype) { + args.push(parseTypeId(subtype)); + } + i = delim.index + 1; + if (delim[0] === ')') { + break; + } + } + } + + return args; + } + + const openArgs = matcher.match(/\(/); + const head = id.slice(0, openArgs?.index); + let args, tail, rets; + + if (openArgs) { + args = parseList(); + + const startTail = matcher.index; + const openRet = matcher.match(/\(/); + + if (!openRet) { + tail = id.slice(startTail) || undefined; + } else { + assert(id.slice(startTail, openRet.index) === 'returns', `Malformed type id ${id}`); + rets = parseList(); + } + } + + return { id, head, args, tail, rets }; +} + +class StatefulGlobalMatcher { + index = 0; + + constructor(private readonly text: string) {} + + match(re: RegExp | string): RegExpExecArray | null { + const gre = new RegExp(re, 'g'); + gre.lastIndex = this.index; + const match = gre.exec(this.text); + if (match !== null) { + this.index = gre.lastIndex; + } + return match; + } +} diff --git a/packages/core/src/utils/type-id.ts b/packages/core/src/utils/type-id.ts new file mode 100644 index 000000000..46bf8386e --- /dev/null +++ b/packages/core/src/utils/type-id.ts @@ -0,0 +1,57 @@ +import assert from 'assert'; + +// Type Identifiers in the AST are for some reason encoded so that they don't +// contain parentheses or commas, which have been substituted as follows: +// ( -> $_ +// ) -> _$ +// , -> _$_ +// This is particularly hard to decode because it is not a prefix-free code. +// Thus, the following regex has to perform a lookahead to make sure it gets +// the substitution right. +export function decodeTypeIdentifier(typeIdentifier: string): string { + return typeIdentifier.replace(/(\$_|_\$_|_\$)(?=(\$_|_\$_|_\$)*([^_$]|$))/g, m => { + switch (m) { + case '$_': + return '('; + case '_$': + return ')'; + case '_$_': + return ','; + default: + throw new Error('Unreachable'); + } + }); +} + +// Some Type Identifiers contain a _storage_ptr suffix, but the _ptr part +// appears in some places and not others. We remove it to get consistent type +// ids from the different places in the AST. +export function normalizeTypeIdentifier(typeIdentifier: string): string { + return decodeTypeIdentifier(typeIdentifier).replace(/_storage_ptr\b/g, '_storage'); +} + +// Type Identifiers contain AST id numbers, which makes them sensitive to +// unrelated changes in the source code. This function stabilizes a type +// identifier by removing all AST ids. +export function stabilizeTypeIdentifier(typeIdentifier: string): string { + let decoded = decodeTypeIdentifier(typeIdentifier); + const re = /(t_struct|t_enum|t_contract)\(/g; + let match; + while ((match = re.exec(decoded))) { + let i; + let d = 1; + for (i = match.index + match[0].length; d !== 0; i++) { + assert(i < decoded.length, 'index out of bounds'); + const c = decoded[i]; + if (c === '(') { + d += 1; + } else if (c === ')') { + d -= 1; + } + } + const re2 = /\d+_?/y; + re2.lastIndex = i; + decoded = decoded.replace(re2, ''); + } + return decoded; +} diff --git a/packages/core/src/validate.test.ts b/packages/core/src/validate.test.ts index c7a94c5ff..45ca00b03 100644 --- a/packages/core/src/validate.test.ts +++ b/packages/core/src/validate.test.ts @@ -19,7 +19,7 @@ interface Context { const test = _test as TestInterface; test.before(async t => { - const buildInfo = await artifacts.getBuildInfo('contracts/test/Validations.sol:HasStruct'); + const buildInfo = await artifacts.getBuildInfo('contracts/test/Validations.sol:HasEmptyConstructor'); if (buildInfo === undefined) { throw new Error('Build info not found'); } @@ -73,15 +73,6 @@ testValid('UsesExplicitUnsafeExternalLibrary', false); testValid('UsesImplicitSafeExternalLibrary', false); testValid('UsesExplicitSafeExternalLibrary', false); -// Custom types (structs and enums) are not yet supported -// see: https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/3 -testValid('HasStruct', false); -testValid('ParentHasStruct', false); -testValid('UsesLibraryWithStruct', false); -testValid('HasEnum', false); -testValid('ParentHasEnum', false); -testValid('UsesLibraryWithEnum', false); - test('inherited storage', t => { const version = getContractVersion(t.context.validation, 'StorageInheritChild'); const layout = getStorageLayout([t.context.validation], version); @@ -92,12 +83,5 @@ test('inherited storage', t => { } }); -testOverride('HasStruct', { unsafeAllowCustomTypes: true }, true); -testOverride('ParentHasStruct', { unsafeAllowCustomTypes: true }, true); -testOverride('UsesLibraryWithStruct', { unsafeAllowCustomTypes: true }, true); -testOverride('HasEnum', { unsafeAllowCustomTypes: true }, true); -testOverride('ParentHasEnum', { unsafeAllowCustomTypes: true }, true); -testOverride('UsesLibraryWithEnum', { unsafeAllowCustomTypes: true }, true); - testOverride('UsesImplicitSafeExternalLibrary', { unsafeAllowLinkedLibraries: true }, true); testOverride('UsesExplicitSafeExternalLibrary', { unsafeAllowLinkedLibraries: true }, true); diff --git a/packages/core/src/validate.ts b/packages/core/src/validate.ts deleted file mode 100644 index 1bcb4efc8..000000000 --- a/packages/core/src/validate.ts +++ /dev/null @@ -1,465 +0,0 @@ -import { isNodeType, findAll } from 'solidity-ast/utils'; -import type { ContractDefinition } from 'solidity-ast'; -import chalk from 'chalk'; - -import { SolcOutput, SolcBytecode } from './solc-api'; -import { Version, getVersion } from './version'; -import { extractStorageLayout, StorageLayout } from './storage'; -import { extractLinkReferences, unlinkBytecode, LinkReference } from './link-refs'; -import { UpgradesError, ErrorDescriptions } from './error'; -import { SrcDecoder } from './src-decoder'; -import { isNullish } from './utils/is-nullish'; - -export type ValidationLog = RunValidation[]; -export type RunValidation = Record; - -// upgrades-core@1.3.0 introduced ValidationLog but for compatibility with ^1.0.0 -// the functions exported by this module also accept a single RunValidation -type Validations = ValidationLog | RunValidation; - -// aliases for backwards compatibility with ^1.0.0 -export type Validation = RunValidation; -export type ValidationResult = ContractValidation; - -export interface ContractValidation { - version?: Version; - inherit: string[]; - libraries: string[]; - linkReferences: LinkReference[]; - errors: ValidationError[]; - layout: StorageLayout; -} - -type ValidationError = ValidationErrorConstructor | ValidationErrorOpcode | ValidationErrorWithName; - -interface ValidationErrorBase { - src: string; -} - -interface ValidationErrorWithName extends ValidationErrorBase { - name: string; - kind: - | 'state-variable-assignment' - | 'state-variable-immutable' - | 'external-library-linking' - | 'struct-definition' - | 'enum-definition'; -} - -interface ValidationErrorConstructor extends ValidationErrorBase { - kind: 'constructor'; - contract: string; -} - -interface ValidationErrorOpcode extends ValidationErrorBase { - kind: 'delegatecall' | 'selfdestruct'; -} - -export interface ValidationOptions { - unsafeAllowCustomTypes?: boolean; - unsafeAllowLinkedLibraries?: boolean; -} - -export function withValidationDefaults(opts: ValidationOptions): Required { - return { - unsafeAllowCustomTypes: opts.unsafeAllowCustomTypes ?? false, - unsafeAllowLinkedLibraries: opts.unsafeAllowLinkedLibraries ?? false, - }; -} - -export function validate(solcOutput: SolcOutput, decodeSrc: SrcDecoder): RunValidation { - const validation: RunValidation = {}; - const fromId: Record = {}; - const inheritIds: Record = {}; - const libraryIds: Record = {}; - - for (const source in solcOutput.contracts) { - for (const contractName in solcOutput.contracts[source]) { - const bytecode = solcOutput.contracts[source][contractName].evm.bytecode; - const version = bytecode.object === '' ? undefined : getVersion(bytecode.object); - const linkReferences = extractLinkReferences(bytecode); - - validation[contractName] = { - version, - inherit: [], - libraries: [], - linkReferences, - errors: [], - layout: { - storage: [], - types: {}, - }, - }; - } - - for (const contractDef of findAll('ContractDefinition', solcOutput.sources[source].ast)) { - fromId[contractDef.id] = contractDef.name; - - // May be undefined in case of duplicate contract names in Truffle - const bytecode = solcOutput.contracts[source][contractDef.name]?.evm.bytecode; - - if (contractDef.name in validation && bytecode !== undefined) { - inheritIds[contractDef.name] = contractDef.linearizedBaseContracts.slice(1); - libraryIds[contractDef.name] = getReferencedLibraryIds(contractDef); - - validation[contractDef.name].errors = [ - ...getConstructorErrors(contractDef, decodeSrc), - ...getDelegateCallErrors(contractDef, decodeSrc), - ...getStateVariableErrors(contractDef, decodeSrc), - // TODO: add support for structs and enums - // https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/3 - ...getStructErrors(contractDef, decodeSrc), - ...getEnumErrors(contractDef, decodeSrc), - // TODO: add linked libraries support - // https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/52 - ...getLinkingErrors(contractDef, bytecode), - ]; - - validation[contractDef.name].layout = extractStorageLayout(contractDef, decodeSrc); - } - } - } - - for (const contractName in inheritIds) { - validation[contractName].inherit = inheritIds[contractName].map(id => fromId[id]); - } - - for (const contractName in libraryIds) { - validation[contractName].libraries = libraryIds[contractName].map(id => fromId[id]); - } - - return validation; -} - -export function getContractVersion(validation: RunValidation, contractName: string): Version { - const { version } = validation[contractName]; - if (version === undefined) { - throw new Error(`Contract ${contractName} is abstract`); - } - return version; -} - -export function getContractNameAndRunValidation(validations: Validations, version: Version): [string, RunValidation] { - const validationLog = Array.isArray(validations) ? validations : [validations]; - - let runValidation; - let contractName; - - for (const validation of validationLog) { - contractName = Object.keys(validation).find( - name => validation[name].version?.withMetadata === version.withMetadata, - ); - if (contractName !== undefined) { - runValidation = validation; - break; - } - } - - if (contractName === undefined || runValidation === undefined) { - throw new Error('The requested contract was not found. Make sure the source code is available for compilation'); - } - - return [contractName, runValidation]; -} - -export function getStorageLayout(validations: Validations, version: Version): StorageLayout { - const [contractName, runValidation] = getContractNameAndRunValidation(validations, version); - const c = runValidation[contractName]; - const layout: StorageLayout = { storage: [], types: {} }; - for (const name of [contractName].concat(c.inherit)) { - layout.storage.unshift(...runValidation[name].layout.storage); - Object.assign(layout.types, runValidation[name].layout.types); - } - return layout; -} - -export function getUnlinkedBytecode(validations: Validations, bytecode: string): string { - const validationLog = Array.isArray(validations) ? validations : [validations]; - - for (const validation of validationLog) { - const linkableContracts = Object.keys(validation).filter(name => validation[name].linkReferences.length > 0); - - for (const name of linkableContracts) { - const { linkReferences } = validation[name]; - const unlinkedBytecode = unlinkBytecode(bytecode, linkReferences); - const version = getVersion(unlinkedBytecode); - - if (validation[name].version?.withMetadata === version.withMetadata) { - return unlinkedBytecode; - } - } - } - - return bytecode; -} - -export function assertUpgradeSafe(validations: Validations, version: Version, opts: ValidationOptions): void { - const [contractName] = getContractNameAndRunValidation(validations, version); - - let errors = getErrors(validations, version); - errors = processExceptions(contractName, errors, opts); - - if (errors.length > 0) { - throw new ValidationErrors(contractName, errors); - } -} - -function processExceptions( - contractName: string, - errorsToProcess: ValidationError[], - opts: ValidationOptions, -): ValidationError[] { - const { unsafeAllowCustomTypes, unsafeAllowLinkedLibraries } = withValidationDefaults(opts); - let errors: ValidationError[] = errorsToProcess; - - // Process `unsafeAllowCustomTypes` flag - if (unsafeAllowCustomTypes) { - errors = processOverride( - contractName, - errors, - ['enum-definition', 'struct-definition'], - ` You are using the \`unsafeAllowCustomTypes\` flag to skip storage checks for structs and enums.\n` + - ` Make sure you have manually checked the storage layout for incompatibilities.\n`, - ); - } - - // Process `unsafeAllowLinkedLibraries` flag - if (unsafeAllowLinkedLibraries) { - errors = processOverride( - contractName, - errors, - ['external-library-linking'], - ` You are using the \`unsafeAllowLinkedLibraries\` flag to include external libraries.\n` + - ` Make sure you have manually checked that the linked libraries are upgrade safe.\n`, - ); - } - - return errors; -} - -function processOverride( - contractName: string, - errorsToProcess: ValidationError[], - overrides: string[], - message: string, -): ValidationError[] { - let errors: ValidationError[] = errorsToProcess; - let exceptionsFound = false; - - errors = errors.filter(error => { - const isException = overrides.includes(error.kind); - exceptionsFound = exceptionsFound || isException; - return !isException; - }); - - if (exceptionsFound && !silenced) { - console.error( - '\n' + - chalk.keyword('orange').bold('Warning: ') + - `Potentially unsafe deployment of ${contractName}\n\n` + - message, - ); - } - - return errors; -} - -export class ValidationErrors extends UpgradesError { - constructor(contractName: string, readonly errors: ValidationError[]) { - super(`Contract \`${contractName}\` is not upgrade safe`, () => { - return errors.map(describeError).join('\n\n'); - }); - } -} - -const errorInfo: ErrorDescriptions = { - constructor: { - msg: e => `Contract \`${e.contract}\` has a constructor`, - hint: 'Define an initializer instead', - link: 'https://zpl.in/upgrades/error-001', - }, - delegatecall: { - msg: () => `Use of delegatecall is not allowed`, - link: 'https://zpl.in/upgrades/error-002', - }, - selfdestruct: { - msg: () => `Use of selfdestruct is not allowed`, - link: 'https://zpl.in/upgrades/error-003', - }, - 'state-variable-assignment': { - msg: e => `Variable \`${e.name}\` is assigned an initial value`, - hint: 'Move the assignment to the initializer', - link: 'https://zpl.in/upgrades/error-004', - }, - 'state-variable-immutable': { - msg: e => `Variable \`${e.name}\` is immutable`, - hint: `Use a constant or mutable variable instead`, - link: 'https://zpl.in/upgrades/error-005', - }, - 'external-library-linking': { - msg: e => `Linking external libraries like \`${e.name}\` is not yet supported`, - hint: - `Use libraries with internal functions only, or skip this check with the \`unsafeAllowLinkedLibraries\` flag \n` + - ` if you have manually checked that the libraries are upgrade safe`, - link: 'https://zpl.in/upgrades/error-006', - }, - 'struct-definition': { - msg: e => `Defining structs like \`${e.name}\` is not yet supported`, - hint: `If you have manually checked for storage layout compatibility, you can skip this check with the \`unsafeAllowCustomTypes\` flag`, - link: 'https://zpl.in/upgrades/error-007', - }, - 'enum-definition': { - msg: e => `Defining enums like \`${e.name}\` is not yet supported`, - hint: `If you have manually checked for storage layout compatibility, you can skip this check with the \`unsafeAllowCustomTypes\` flag`, - link: 'https://zpl.in/upgrades/error-007', - }, -}; - -function describeError(e: ValidationError): string { - const info = errorInfo[e.kind]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const log = [chalk.bold(e.src) + ': ' + info.msg(e as any)]; - if (info.hint) { - log.push(info.hint); - } - if (info.link) { - log.push(chalk.dim(info.link)); - } - return log.join('\n '); -} - -export function getErrors(validations: Validations, version: Version): ValidationError[] { - const [contractName, runValidation] = getContractNameAndRunValidation(validations, version); - const c = runValidation[contractName]; - return c.errors - .concat(...c.inherit.map(name => runValidation[name].errors)) - .concat(...c.libraries.map(name => runValidation[name].errors)); -} - -export function isUpgradeSafe(validations: Validations, version: Version): boolean { - return getErrors(validations, version).length == 0; -} - -function* getConstructorErrors(contractDef: ContractDefinition, decodeSrc: SrcDecoder): Generator { - for (const fnDef of findAll('FunctionDefinition', contractDef)) { - if (fnDef.kind === 'constructor' && ((fnDef.body?.statements.length ?? 0) > 0 || fnDef.modifiers.length > 0)) { - yield { - kind: 'constructor', - contract: contractDef.name, - src: decodeSrc(fnDef), - }; - } - } -} - -function* getDelegateCallErrors( - contractDef: ContractDefinition, - decodeSrc: SrcDecoder, -): Generator { - for (const fnCall of findAll('FunctionCall', contractDef)) { - const fn = fnCall.expression; - if (fn.typeDescriptions.typeIdentifier?.match(/^t_function_baredelegatecall_/)) { - yield { - kind: 'delegatecall', - src: decodeSrc(fnCall), - }; - } - if (fn.typeDescriptions.typeIdentifier?.match(/^t_function_selfdestruct_/)) { - yield { - kind: 'selfdestruct', - src: decodeSrc(fnCall), - }; - } - } -} - -function* getStateVariableErrors( - contractDef: ContractDefinition, - decodeSrc: SrcDecoder, -): Generator { - for (const varDecl of contractDef.nodes) { - if (isNodeType('VariableDeclaration', varDecl)) { - if (!varDecl.constant && !isNullish(varDecl.value)) { - yield { - kind: 'state-variable-assignment', - name: varDecl.name, - src: decodeSrc(varDecl), - }; - } - if (varDecl.mutability === 'immutable') { - yield { - kind: 'state-variable-immutable', - name: varDecl.name, - src: decodeSrc(varDecl), - }; - } - } - } -} - -function getReferencedLibraryIds(contractDef: ContractDefinition): number[] { - const implicitUsage = [...findAll('UsingForDirective', contractDef)].map( - usingForDirective => usingForDirective.libraryName.referencedDeclaration, - ); - - const explicitUsage = [...findAll('Identifier', contractDef)] - .filter(identifier => identifier.typeDescriptions.typeString?.match(/^type\(library/)) - .map(identifier => { - if (isNullish(identifier.referencedDeclaration)) { - throw new Error('Broken invariant: Identifier.referencedDeclaration should not be null'); - } - return identifier.referencedDeclaration; - }); - - return [...new Set(implicitUsage.concat(explicitUsage))]; -} - -function* getLinkingErrors( - contractDef: ContractDefinition, - bytecode: SolcBytecode, -): Generator { - const { linkReferences } = bytecode; - for (const source of Object.keys(linkReferences)) { - for (const libName of Object.keys(linkReferences[source])) { - yield { - kind: 'external-library-linking', - name: libName, - src: source, - }; - } - } -} - -function* getStructErrors(contractDef: ContractDefinition, decodeSrc: SrcDecoder): Generator { - for (const structDefinition of findAll('StructDefinition', contractDef)) { - yield { - kind: 'struct-definition', - name: structDefinition.name, - src: decodeSrc(structDefinition), - }; - } -} - -function* getEnumErrors(contractDef: ContractDefinition, decodeSrc: SrcDecoder): Generator { - for (const enumDefinition of findAll('EnumDefinition', contractDef)) { - yield { - kind: 'enum-definition', - name: enumDefinition.name, - src: decodeSrc(enumDefinition), - }; - } -} - -let silenced = false; - -export function silenceWarnings(): void { - if (!silenced) { - console.error( - '\n' + - chalk.keyword('orange').bold('Warning: ') + - `All subsequent Upgrades warnings will be silenced.\n\n` + - ` Make sure you have manually checked all uses of unsafe flags.\n`, - ); - silenced = true; - } -} diff --git a/packages/core/src/validate/compat.ts b/packages/core/src/validate/compat.ts new file mode 100644 index 000000000..112c16992 --- /dev/null +++ b/packages/core/src/validate/compat.ts @@ -0,0 +1,9 @@ +// Legacy interface for backwards compatibility + +import { ContractValidation, ValidationRunData } from './run'; + +export type RunValidation = ValidationRunData; +export type ValidationLog = RunValidation[]; +export type Validation = RunValidation; + +export type ValidationResult = ContractValidation; diff --git a/packages/core/src/validate/data.ts b/packages/core/src/validate/data.ts new file mode 100644 index 000000000..5aa3408e2 --- /dev/null +++ b/packages/core/src/validate/data.ts @@ -0,0 +1,46 @@ +import { ValidationRunData } from './run'; + +type ValidationDataV1 = ValidationRunData; + +type ValidationDataV2 = ValidationRunData[]; + +interface ValidationDataV3 { + version: '3'; + log: ValidationRunData[]; +} + +export type ValidationDataCurrent = ValidationDataV3; + +export type ValidationData = ValidationDataV1 | ValidationDataV2 | ValidationDataV3; + +export function normalizeValidationData(data: ValidationData): ValidationDataCurrent { + if (isCurrentValidationData(data)) { + return data; + } else if (Array.isArray(data)) { + return { version: '3', log: data }; + } else { + return { version: '3', log: [data] }; + } +} + +export function isCurrentValidationData(data: ValidationData): data is ValidationDataCurrent { + if (Array.isArray(data)) { + return false; + } else if (!('version' in data)) { + return false; + } else if (data.version === '3') { + return true; + } else { + throw new Error('Unknown version or malformed validation data'); + } +} + +export function concatRunData( + newRunData: ValidationRunData, + previousData?: ValidationDataCurrent, +): ValidationDataCurrent { + return { + version: '3', + log: [newRunData].concat(previousData?.log ?? []), + }; +} diff --git a/packages/core/src/validate/error.ts b/packages/core/src/validate/error.ts new file mode 100644 index 000000000..eda352998 --- /dev/null +++ b/packages/core/src/validate/error.ts @@ -0,0 +1,68 @@ +import chalk from 'chalk'; + +import { ValidationError } from './run'; +import { UpgradesError, ErrorDescriptions } from '../error'; + +export class ValidationErrors extends UpgradesError { + constructor(contractName: string, readonly errors: ValidationError[]) { + super(`Contract \`${contractName}\` is not upgrade safe`, () => { + return errors.map(describeError).join('\n\n'); + }); + } +} + +const errorInfo: ErrorDescriptions = { + constructor: { + msg: e => `Contract \`${e.contract}\` has a constructor`, + hint: () => 'Define an initializer instead', + link: 'https://zpl.in/upgrades/error-001', + }, + delegatecall: { + msg: () => `Use of delegatecall is not allowed`, + link: 'https://zpl.in/upgrades/error-002', + }, + selfdestruct: { + msg: () => `Use of selfdestruct is not allowed`, + link: 'https://zpl.in/upgrades/error-003', + }, + 'state-variable-assignment': { + msg: e => `Variable \`${e.name}\` is assigned an initial value`, + hint: () => 'Move the assignment to the initializer', + link: 'https://zpl.in/upgrades/error-004', + }, + 'state-variable-immutable': { + msg: e => `Variable \`${e.name}\` is immutable`, + hint: () => `Use a constant or mutable variable instead`, + link: 'https://zpl.in/upgrades/error-005', + }, + 'external-library-linking': { + msg: e => `Linking external libraries like \`${e.name}\` is not yet supported`, + hint: () => + `Use libraries with internal functions only, or skip this check with the \`unsafeAllowLinkedLibraries\` flag \n` + + ` if you have manually checked that the libraries are upgrade safe`, + link: 'https://zpl.in/upgrades/error-006', + }, + 'struct-definition': { + msg: e => `Structs like \`${e.name}\` are supported in the latest version of the plugin`, + hint: () => `Update your dependency and run again`, + }, + 'enum-definition': { + msg: e => `Enums like \`${e.name}\` are supported in the latest version of the plugin`, + hint: () => `Update your dependency and run again`, + }, +}; + +function describeError(e: ValidationError): string { + const info = errorInfo[e.kind]; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const log = [chalk.bold(e.src) + ': ' + info.msg(e as any)]; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const hint = info.hint?.(e as any); + if (hint) { + log.push(hint); + } + if (info.link) { + log.push(chalk.dim(info.link)); + } + return log.join('\n '); +} diff --git a/packages/core/src/validate/index.ts b/packages/core/src/validate/index.ts new file mode 100644 index 000000000..ece3e225d --- /dev/null +++ b/packages/core/src/validate/index.ts @@ -0,0 +1,14 @@ +export { validate, ValidationRunData, ContractValidation } from './run'; +export { ValidationOptions, withValidationDefaults, silenceWarnings } from './overrides'; +export { ValidationErrors } from './error'; +export { RunValidation, ValidationLog, Validation, ValidationResult } from './compat'; +export { ValidationData, ValidationDataCurrent, isCurrentValidationData, concatRunData } from './data'; +export { + getContractVersion, + getContractNameAndRunValidation, + getStorageLayout, + assertUpgradeSafe, + getUnlinkedBytecode, + getErrors, + isUpgradeSafe, +} from './query'; diff --git a/packages/core/src/validate/overrides.ts b/packages/core/src/validate/overrides.ts new file mode 100644 index 000000000..92c222d0a --- /dev/null +++ b/packages/core/src/validate/overrides.ts @@ -0,0 +1,89 @@ +import chalk from 'chalk'; + +import { ValidationError } from './run'; + +export interface ValidationOptions { + unsafeAllowCustomTypes?: boolean; + unsafeAllowLinkedLibraries?: boolean; +} + +export function withValidationDefaults(opts: ValidationOptions): Required { + return { + unsafeAllowCustomTypes: opts.unsafeAllowCustomTypes ?? false, + unsafeAllowLinkedLibraries: opts.unsafeAllowLinkedLibraries ?? false, + }; +} + +export function processExceptions( + contractName: string, + errorsToProcess: ValidationError[], + opts: ValidationOptions, +): ValidationError[] { + const { unsafeAllowCustomTypes, unsafeAllowLinkedLibraries } = withValidationDefaults(opts); + let errors: ValidationError[] = errorsToProcess; + + // Process `unsafeAllowCustomTypes` flag + if (unsafeAllowCustomTypes) { + errors = processOverride( + contractName, + errors, + ['enum-definition', 'struct-definition'], + ` \`unsafeAllowCustomTypes\` may not be necessary.\n` + + ` Update your plugin for automated struct and enum checks.\n`, + ); + } + + // Process `unsafeAllowLinkedLibraries` flag + if (unsafeAllowLinkedLibraries) { + errors = processOverride( + contractName, + errors, + ['external-library-linking'], + ` You are using the \`unsafeAllowLinkedLibraries\` flag to include external libraries.\n` + + ` Make sure you have manually checked that the linked libraries are upgrade safe.\n`, + ); + } + + return errors; +} + +function processOverride( + contractName: string, + errorsToProcess: ValidationError[], + overrides: string[], + message: string, +): ValidationError[] { + let errors: ValidationError[] = errorsToProcess; + let exceptionsFound = false; + + errors = errors.filter(error => { + const isException = overrides.includes(error.kind); + exceptionsFound = exceptionsFound || isException; + return !isException; + }); + + if (exceptionsFound && !silenced) { + console.error( + chalk.keyword('orange').bold('Warning:') + ` Potentially unsafe deployment of ${contractName}\n\n` + message, + ); + } + + return errors; +} + +let silenced = false; + +export function silenceWarnings(): void { + if (!silenced) { + console.error( + chalk.keyword('orange').bold('Warning:') + + ` All subsequent Upgrades warnings will be silenced.\n\n` + + ` Make sure you have manually checked all uses of unsafe flags.\n`, + ); + silenced = true; + } +} + +export function isSilencingWarnings(): boolean { + return silenced; +} diff --git a/packages/core/src/validate/query.ts b/packages/core/src/validate/query.ts new file mode 100644 index 000000000..5a01386f5 --- /dev/null +++ b/packages/core/src/validate/query.ts @@ -0,0 +1,115 @@ +import { Version, getVersion } from '../version'; +import { ValidationRunData, ValidationError } from './run'; +import { StorageLayout } from '../storage/layout'; +import { unlinkBytecode } from '../link-refs'; +import { ValidationOptions, processExceptions } from './overrides'; +import { ValidationErrors } from './error'; +import { ValidationData, normalizeValidationData } from './data'; + +export function assertUpgradeSafe(data: ValidationData, version: Version, opts: ValidationOptions): void { + const dataV3 = normalizeValidationData(data); + const [contractName] = getContractNameAndRunValidation(dataV3, version); + + let errors = getErrors(dataV3, version); + errors = processExceptions(contractName, errors, opts); + + if (errors.length > 0) { + throw new ValidationErrors(contractName, errors); + } +} + +export function getContractVersion(runData: ValidationRunData, contractName: string): Version { + const { version } = runData[contractName]; + if (version === undefined) { + throw new Error(`Contract ${contractName} is abstract`); + } + return version; +} + +export function getContractNameAndRunValidation(data: ValidationData, version: Version): [string, ValidationRunData] { + const dataV3 = normalizeValidationData(data); + + let runValidation; + let contractName; + + for (const validation of dataV3.log) { + contractName = Object.keys(validation).find( + name => validation[name].version?.withMetadata === version.withMetadata, + ); + if (contractName !== undefined) { + runValidation = validation; + break; + } + } + + if (contractName === undefined || runValidation === undefined) { + throw new Error('The requested contract was not found. Make sure the source code is available for compilation'); + } + + return [contractName, runValidation]; +} + +export function getStorageLayout(data: ValidationData, version: Version): StorageLayout { + const dataV3 = normalizeValidationData(data); + const [contractName, runValidation] = getContractNameAndRunValidation(dataV3, version); + return unfoldStorageLayout(runValidation, contractName); +} + +export function unfoldStorageLayout(runData: ValidationRunData, contractName: string): StorageLayout { + const c = runData[contractName]; + const layout: StorageLayout = { storage: [], types: {} }; + for (const name of [contractName].concat(c.inherit)) { + layout.storage.unshift(...runData[name].layout.storage); + Object.assign(layout.types, runData[name].layout.types); + } + return layout; +} + +export function* findVersionWithoutMetadataMatches( + data: ValidationData, + versionWithoutMetadata: string, +): Generator<[string, ValidationRunData]> { + const dataV3 = normalizeValidationData(data); + + for (const validation of dataV3.log) { + for (const contractName in validation) { + if (validation[contractName].version?.withoutMetadata === versionWithoutMetadata) { + yield [contractName, validation]; + } + } + } +} + +export function getUnlinkedBytecode(data: ValidationData, bytecode: string): string { + const dataV3 = normalizeValidationData(data); + + for (const validation of dataV3.log) { + const linkableContracts = Object.keys(validation).filter(name => validation[name].linkReferences.length > 0); + + for (const name of linkableContracts) { + const { linkReferences } = validation[name]; + const unlinkedBytecode = unlinkBytecode(bytecode, linkReferences); + const version = getVersion(unlinkedBytecode); + + if (validation[name].version?.withMetadata === version.withMetadata) { + return unlinkedBytecode; + } + } + } + + return bytecode; +} + +export function getErrors(data: ValidationData, version: Version): ValidationError[] { + const dataV3 = normalizeValidationData(data); + const [contractName, runValidation] = getContractNameAndRunValidation(dataV3, version); + const c = runValidation[contractName]; + return c.errors + .concat(...c.inherit.map(name => runValidation[name].errors)) + .concat(...c.libraries.map(name => runValidation[name].errors)); +} + +export function isUpgradeSafe(data: ValidationData, version: Version): boolean { + const dataV3 = normalizeValidationData(data); + return getErrors(dataV3, version).length == 0; +} diff --git a/packages/core/src/validate/run.ts b/packages/core/src/validate/run.ts new file mode 100644 index 000000000..a6b51e789 --- /dev/null +++ b/packages/core/src/validate/run.ts @@ -0,0 +1,199 @@ +import { isNodeType, findAll } from 'solidity-ast/utils'; +import type { ContractDefinition } from 'solidity-ast'; + +import { SolcOutput, SolcBytecode } from '../solc-api'; +import { SrcDecoder } from '../src-decoder'; +import { astDereferencer } from '../ast-dereferencer'; +import { isNullish } from '../utils/is-nullish'; +import { Version, getVersion } from '../version'; +import { extractLinkReferences, LinkReference } from '../link-refs'; +import { extractStorageLayout } from '../storage/extract'; +import { StorageLayout } from '../storage/layout'; + +export type ValidationRunData = Record; + +export interface ContractValidation { + version?: Version; + inherit: string[]; + libraries: string[]; + linkReferences: LinkReference[]; + errors: ValidationError[]; + layout: StorageLayout; +} + +export type ValidationError = ValidationErrorConstructor | ValidationErrorOpcode | ValidationErrorWithName; + +interface ValidationErrorBase { + src: string; +} + +interface ValidationErrorWithName extends ValidationErrorBase { + name: string; + kind: + | 'state-variable-assignment' + | 'state-variable-immutable' + | 'external-library-linking' + | 'struct-definition' + | 'enum-definition'; +} + +interface ValidationErrorConstructor extends ValidationErrorBase { + kind: 'constructor'; + contract: string; +} + +interface ValidationErrorOpcode extends ValidationErrorBase { + kind: 'delegatecall' | 'selfdestruct'; +} + +export function validate(solcOutput: SolcOutput, decodeSrc: SrcDecoder): ValidationRunData { + const validation: ValidationRunData = {}; + const fromId: Record = {}; + const inheritIds: Record = {}; + const libraryIds: Record = {}; + + const deref = astDereferencer(solcOutput); + + for (const source in solcOutput.contracts) { + for (const contractName in solcOutput.contracts[source]) { + const bytecode = solcOutput.contracts[source][contractName].evm.bytecode; + const version = bytecode.object === '' ? undefined : getVersion(bytecode.object); + const linkReferences = extractLinkReferences(bytecode); + + validation[contractName] = { + version, + inherit: [], + libraries: [], + linkReferences, + errors: [], + layout: { + storage: [], + types: {}, + }, + }; + } + + for (const contractDef of findAll('ContractDefinition', solcOutput.sources[source].ast)) { + fromId[contractDef.id] = contractDef.name; + + // May be undefined in case of duplicate contract names in Truffle + const bytecode = solcOutput.contracts[source][contractDef.name]?.evm.bytecode; + + if (contractDef.name in validation && bytecode !== undefined) { + inheritIds[contractDef.name] = contractDef.linearizedBaseContracts.slice(1); + libraryIds[contractDef.name] = getReferencedLibraryIds(contractDef); + + validation[contractDef.name].errors = [ + ...getConstructorErrors(contractDef, decodeSrc), + ...getDelegateCallErrors(contractDef, decodeSrc), + ...getStateVariableErrors(contractDef, decodeSrc), + // TODO: add linked libraries support + // https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/52 + ...getLinkingErrors(contractDef, bytecode), + ]; + + validation[contractDef.name].layout = extractStorageLayout(contractDef, decodeSrc, deref); + } + } + } + + for (const contractName in inheritIds) { + validation[contractName].inherit = inheritIds[contractName].map(id => fromId[id]); + } + + for (const contractName in libraryIds) { + validation[contractName].libraries = libraryIds[contractName].map(id => fromId[id]); + } + + return validation; +} + +function* getConstructorErrors(contractDef: ContractDefinition, decodeSrc: SrcDecoder): Generator { + for (const fnDef of findAll('FunctionDefinition', contractDef)) { + if (fnDef.kind === 'constructor' && ((fnDef.body?.statements.length ?? 0) > 0 || fnDef.modifiers.length > 0)) { + yield { + kind: 'constructor', + contract: contractDef.name, + src: decodeSrc(fnDef), + }; + } + } +} + +function* getDelegateCallErrors( + contractDef: ContractDefinition, + decodeSrc: SrcDecoder, +): Generator { + for (const fnCall of findAll('FunctionCall', contractDef)) { + const fn = fnCall.expression; + if (fn.typeDescriptions.typeIdentifier?.match(/^t_function_baredelegatecall_/)) { + yield { + kind: 'delegatecall', + src: decodeSrc(fnCall), + }; + } + if (fn.typeDescriptions.typeIdentifier?.match(/^t_function_selfdestruct_/)) { + yield { + kind: 'selfdestruct', + src: decodeSrc(fnCall), + }; + } + } +} + +function* getStateVariableErrors( + contractDef: ContractDefinition, + decodeSrc: SrcDecoder, +): Generator { + for (const varDecl of contractDef.nodes) { + if (isNodeType('VariableDeclaration', varDecl)) { + if (!varDecl.constant && !isNullish(varDecl.value)) { + yield { + kind: 'state-variable-assignment', + name: varDecl.name, + src: decodeSrc(varDecl), + }; + } + if (varDecl.mutability === 'immutable') { + yield { + kind: 'state-variable-immutable', + name: varDecl.name, + src: decodeSrc(varDecl), + }; + } + } + } +} + +function getReferencedLibraryIds(contractDef: ContractDefinition): number[] { + const implicitUsage = [...findAll('UsingForDirective', contractDef)].map( + usingForDirective => usingForDirective.libraryName.referencedDeclaration, + ); + + const explicitUsage = [...findAll('Identifier', contractDef)] + .filter(identifier => identifier.typeDescriptions.typeString?.match(/^type\(library/)) + .map(identifier => { + if (isNullish(identifier.referencedDeclaration)) { + throw new Error('Broken invariant: Identifier.referencedDeclaration should not be null'); + } + return identifier.referencedDeclaration; + }); + + return [...new Set(implicitUsage.concat(explicitUsage))]; +} + +function* getLinkingErrors( + contractDef: ContractDefinition, + bytecode: SolcBytecode, +): Generator { + const { linkReferences } = bytecode; + for (const source of Object.keys(linkReferences)) { + for (const libName of Object.keys(linkReferences[source])) { + yield { + kind: 'external-library-linking', + name: libName, + src: source, + }; + } + } +} diff --git a/packages/plugin-hardhat/contracts/Action.sol b/packages/plugin-hardhat/contracts/Action.sol index 3ff375a43..d322aaa06 100644 --- a/packages/plugin-hardhat/contracts/Action.sol +++ b/packages/plugin-hardhat/contracts/Action.sol @@ -4,7 +4,9 @@ contract Action { enum ActionType { UP, DOWN } event ActionEvent(ActionType actionType); - function log(ActionType action) public { + ActionType action; + + function log() public { emit ActionEvent(action); } diff --git a/packages/plugin-hardhat/contracts/ActionV2.sol b/packages/plugin-hardhat/contracts/ActionV2.sol index 1a642ad8d..a7c33107e 100644 --- a/packages/plugin-hardhat/contracts/ActionV2.sol +++ b/packages/plugin-hardhat/contracts/ActionV2.sol @@ -4,8 +4,21 @@ contract ActionV2 { enum ActionType { UP, DOWN, LEFT, RIGHT } event ActionEvent(ActionType actionType); - function log(ActionType action) public { + ActionType action; + + function log() public { emit ActionEvent(action); } } + +contract ActionV2Bad { + enum ActionType { UP, LEFT, RIGHT } + event ActionEvent(ActionType actionType); + + ActionType action; + + function log() public { + emit ActionEvent(action); + } +} diff --git a/packages/plugin-hardhat/contracts/PortfolioV2.sol b/packages/plugin-hardhat/contracts/PortfolioV2.sol index 2836c0578..811e5d255 100644 --- a/packages/plugin-hardhat/contracts/PortfolioV2.sol +++ b/packages/plugin-hardhat/contracts/PortfolioV2.sol @@ -24,23 +24,17 @@ contract PortfolioV2 { contract PortfolioV2Bad { struct Asset { - bool enabled; uint amount; } - uint insert; mapping (string => Asset) assets; function initialize() public view { } function enable(string memory name) public returns (bool) { - if (assets[name].enabled) { - return false; - } else { - assets[name] = Asset(true, 10); - return true; - } + assets[name] = Asset(10); + return true; } } diff --git a/packages/plugin-hardhat/src/index.ts b/packages/plugin-hardhat/src/index.ts index 5c3a9cf8e..f99d82180 100644 --- a/packages/plugin-hardhat/src/index.ts +++ b/packages/plugin-hardhat/src/index.ts @@ -3,12 +3,10 @@ import '@nomiclabs/hardhat-ethers'; import './type-extensions'; import { subtask, extendEnvironment } from 'hardhat/config'; -import { TASK_COMPILE_SOLIDITY_COMPILE } from 'hardhat/builtin-tasks/task-names'; +import { TASK_COMPILE_SOLIDITY, TASK_COMPILE_SOLIDITY_COMPILE } from 'hardhat/builtin-tasks/task-names'; import { lazyObject } from 'hardhat/plugins'; -import { validate, solcInputOutputDecoder, SolcInput } from '@openzeppelin/upgrades-core'; -import { writeValidations } from './validations'; -import type { silenceWarnings } from '@openzeppelin/upgrades-core'; +import type { silenceWarnings, SolcInput } from '@openzeppelin/upgrades-core'; import type { DeployFunction } from './deploy-proxy'; import type { UpgradeFunction, PrepareUpgradeFunction } from './upgrade-proxy'; import type { ChangeAdminFunction, TransferProxyAdminOwnershipFunction, GetInstanceFunction } from './admin'; @@ -29,7 +27,26 @@ interface RunCompilerArgs { input: SolcInput; } +subtask(TASK_COMPILE_SOLIDITY, async (args: { force: boolean }, hre, runSuper) => { + const { readValidations, ValidationsCacheOutdated, ValidationsCacheNotFound } = await import('./validations'); + + try { + await readValidations(hre); + } catch (e) { + if (e instanceof ValidationsCacheOutdated || e instanceof ValidationsCacheNotFound) { + args = { ...args, force: true }; + } else { + throw e; + } + } + + return runSuper(args); +}); + subtask(TASK_COMPILE_SOLIDITY_COMPILE, async (args: RunCompilerArgs, hre, runSuper) => { + const { validate, solcInputOutputDecoder } = await import('@openzeppelin/upgrades-core'); + const { writeValidations } = await import('./validations'); + // TODO: patch input const { output, solcBuild } = await runSuper(); diff --git a/packages/plugin-hardhat/src/upgrade-proxy.ts b/packages/plugin-hardhat/src/upgrade-proxy.ts index 3c6644288..8ab19fb17 100644 --- a/packages/plugin-hardhat/src/upgrade-proxy.ts +++ b/packages/plugin-hardhat/src/upgrade-proxy.ts @@ -12,6 +12,7 @@ import { getImplementationAddress, getAdminAddress, ValidationOptions, + getStorageLayoutForAddress, } from '@openzeppelin/upgrades-core'; import { getProxyAdminFactory } from './proxy-factory'; @@ -45,10 +46,10 @@ async function prepareUpgradeImpl( assertUpgradeSafe(validations, version, opts); const currentImplAddress = await getImplementationAddress(provider, proxyAddress); - const deployment = await manifest.getDeploymentFromAddress(currentImplAddress); + const deploymentLayout = await getStorageLayoutForAddress(manifest, validations, currentImplAddress); const layout = getStorageLayout(validations, version); - assertStorageUpgradeSafe(deployment.layout, layout, opts.unsafeAllowCustomTypes); + assertStorageUpgradeSafe(deploymentLayout, layout, opts.unsafeAllowCustomTypes); return await fetchOrDeploy(version, provider, async () => { const deployment = await deploy(ImplFactory); diff --git a/packages/plugin-hardhat/src/validations.ts b/packages/plugin-hardhat/src/validations.ts index e74153607..053f2ac3e 100644 --- a/packages/plugin-hardhat/src/validations.ts +++ b/packages/plugin-hardhat/src/validations.ts @@ -2,38 +2,62 @@ import { promises as fs } from 'fs'; import path from 'path'; import type { HardhatRuntimeEnvironment } from 'hardhat/types'; -import type { ValidationLog, RunValidation } from '@openzeppelin/upgrades-core'; +import { + ValidationDataCurrent, + ValidationRunData, + concatRunData, + isCurrentValidationData, +} from '@openzeppelin/upgrades-core'; + +export async function writeValidations(hre: HardhatRuntimeEnvironment, newRunData: ValidationRunData): Promise { + let storedData: ValidationDataCurrent | undefined; -export async function writeValidations(hre: HardhatRuntimeEnvironment, _validations: RunValidation): Promise { - let validations = [_validations]; try { - const previousValidations: RunValidation | RunValidation[] = JSON.parse( - await fs.readFile(getValidationsCachePath(hre), 'utf8'), - ); - if (previousValidations !== undefined) { - validations = validations.concat(previousValidations); - } + storedData = await readValidations(hre); } catch (e) { - if (e.code !== 'ENOENT') { + // If there is no previous data to append to, we ignore the error and write + // the file from scratch. + if (!(e instanceof ValidationsCacheNotFound)) { throw e; } } + + const validations = concatRunData(newRunData, storedData); + await fs.mkdir(hre.config.paths.cache, { recursive: true }); await fs.writeFile(getValidationsCachePath(hre), JSON.stringify(validations, null, 2)); } -export async function readValidations(hre: HardhatRuntimeEnvironment): Promise { +export async function readValidations(hre: HardhatRuntimeEnvironment): Promise { + const cachePath = getValidationsCachePath(hre); try { - return JSON.parse(await fs.readFile(getValidationsCachePath(hre), 'utf8')); + const data = JSON.parse(await fs.readFile(cachePath, 'utf8')); + if (!isCurrentValidationData(data)) { + await fs.unlink(cachePath); + throw new ValidationsCacheOutdated(); + } + return data; } catch (e) { if (e.code === 'ENOENT') { - throw new Error('Validations log not found. Recompile with `hardhat compile --force`'); + throw new ValidationsCacheNotFound(); } else { throw e; } } } +export class ValidationsCacheNotFound extends Error { + constructor() { + super('Validations cache not found. Recompile with `hardhat compile --force`'); + } +} + +export class ValidationsCacheOutdated extends Error { + constructor() { + super('Validations cache is outdated. Recompile with `hardhat compile --force`'); + } +} + function getValidationsCachePath(hre: HardhatRuntimeEnvironment): string { return path.join(hre.config.paths.cache, 'validations.json'); } diff --git a/packages/plugin-hardhat/test/happy-path-with-enums.js b/packages/plugin-hardhat/test/happy-path-with-enums.js index 51bf576c0..08e777a5d 100644 --- a/packages/plugin-hardhat/test/happy-path-with-enums.js +++ b/packages/plugin-hardhat/test/happy-path-with-enums.js @@ -7,26 +7,23 @@ upgrades.silenceWarnings(); test.before(async t => { t.context.Action = await ethers.getContractFactory('Action'); t.context.ActionV2 = await ethers.getContractFactory('ActionV2'); + t.context.ActionV2Bad = await ethers.getContractFactory('ActionV2Bad'); }); -test('deployProxy without flag', async t => { +test('deployProxy', async t => { const { Action } = t.context; - await t.throwsAsync(() => upgrades.deployProxy(Action)); + await upgrades.deployProxy(Action, []); }); -test('deployProxy with flag', async t => { - const { Action } = t.context; - await upgrades.deployProxy(Action, [], { unsafeAllowCustomTypes: true }); -}); - -test('upgradeProxy without flag', async t => { +test('upgradeProxy', async t => { const { Action, ActionV2 } = t.context; - const action = await upgrades.deployProxy(Action, [], { unsafeAllowCustomTypes: true }); - await t.throwsAsync(() => upgrades.upgradeProxy(action.address, ActionV2)); + const action = await upgrades.deployProxy(Action, []); + await upgrades.upgradeProxy(action.address, ActionV2); }); -test('upgradeProxy with flag', async t => { - const { Action, ActionV2 } = t.context; - const action = await upgrades.deployProxy(Action, [], { unsafeAllowCustomTypes: true }); - await upgrades.upgradeProxy(action.address, ActionV2, { unsafeAllowCustomTypes: true }); +test('upgradeProxy with incompatible layout', async t => { + const { Action, ActionV2Bad } = t.context; + const action = await upgrades.deployProxy(Action, []); + const error = await t.throwsAsync(() => upgrades.upgradeProxy(action.address, ActionV2Bad)); + t.true(error.message.includes('Upgraded `action` to an incompatible type')); }); diff --git a/packages/plugin-hardhat/test/happy-path-with-structs.js b/packages/plugin-hardhat/test/happy-path-with-structs.js index dff987772..1841c4afa 100644 --- a/packages/plugin-hardhat/test/happy-path-with-structs.js +++ b/packages/plugin-hardhat/test/happy-path-with-structs.js @@ -10,35 +10,22 @@ test.before(async t => { t.context.PortfolioV2Bad = await ethers.getContractFactory('PortfolioV2Bad'); }); -test('deployProxy without flag', async t => { +test('deployProxy', async t => { const { Portfolio } = t.context; - await t.throwsAsync(() => upgrades.deployProxy(Portfolio)); -}); - -test('deployProxy with flag', async t => { - const { Portfolio } = t.context; - const portfolio = await upgrades.deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); + const portfolio = await upgrades.deployProxy(Portfolio, []); await portfolio.enable('ETH'); }); -test('upgradeProxy without flag', async t => { - const { Portfolio, PortfolioV2 } = t.context; - const portfolio = await upgrades.deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); - await t.throwsAsync(() => upgrades.upgradeProxy(portfolio.address, PortfolioV2)); -}); - -test('upgradeProxy with flag', async t => { +test('upgradeProxy', async t => { const { Portfolio, PortfolioV2 } = t.context; - const portfolio = await upgrades.deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); - const portfolio2 = await upgrades.upgradeProxy(portfolio.address, PortfolioV2, { unsafeAllowCustomTypes: true }); + const portfolio = await upgrades.deployProxy(Portfolio, []); + const portfolio2 = await upgrades.upgradeProxy(portfolio.address, PortfolioV2); await portfolio2.enable('ETH'); }); -test('upgradeProxy with flag but incompatible layout', async t => { +test('upgradeProxy with incompatible layout', async t => { const { Portfolio, PortfolioV2Bad } = t.context; - const portfolio = await upgrades.deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); - const error = await t.throwsAsync(() => - upgrades.upgradeProxy(portfolio.address, PortfolioV2Bad, { unsafeAllowCustomTypes: true }), - ); - t.true(error.message.includes('Variable `assets` was replaced with `insert`')); + const portfolio = await upgrades.deployProxy(Portfolio, []); + const error = await t.throwsAsync(() => upgrades.upgradeProxy(portfolio.address, PortfolioV2Bad)); + t.true(error.message.includes('Upgraded `assets` to an incompatible type')); }); diff --git a/packages/plugin-truffle/package.json b/packages/plugin-truffle/package.json index 96bddb0f6..ea5a976db 100644 --- a/packages/plugin-truffle/package.json +++ b/packages/plugin-truffle/package.json @@ -25,7 +25,7 @@ "dependencies": { "@openzeppelin/upgrades-core": "^1.4.2", "@truffle/contract": "^4.2.12", - "solidity-ast": "^0.4.4" + "solidity-ast": "^0.4.15" }, "peerDependencies": { "truffle": "^5.1.35" diff --git a/packages/plugin-truffle/src/upgrade-proxy.ts b/packages/plugin-truffle/src/upgrade-proxy.ts index 62e2c7e6b..84dc49a77 100644 --- a/packages/plugin-truffle/src/upgrade-proxy.ts +++ b/packages/plugin-truffle/src/upgrade-proxy.ts @@ -8,6 +8,7 @@ import { getImplementationAddress, getAdminAddress, EthereumProvider, + getStorageLayoutForAddress, } from '@openzeppelin/upgrades-core'; import { ContractClass, ContractInstance, getTruffleConfig } from './truffle'; @@ -34,10 +35,10 @@ async function prepareUpgradeImpl( assertUpgradeSafe([validations], version, opts); const currentImplAddress = await getImplementationAddress(provider, proxyAddress); - const deployment = await manifest.getDeploymentFromAddress(currentImplAddress); + const deploymentLayout = await getStorageLayoutForAddress(manifest, validations, currentImplAddress); const layout = getStorageLayout([validations], version); - assertStorageUpgradeSafe(deployment.layout, layout, unsafeAllowCustomTypes); + assertStorageUpgradeSafe(deploymentLayout, layout, unsafeAllowCustomTypes); return await fetchOrDeploy(version, provider, async () => { const deployment = await deploy(Contract, deployer); diff --git a/packages/plugin-truffle/src/validate.ts b/packages/plugin-truffle/src/validate.ts index 1ed745fa8..55659fad6 100644 --- a/packages/plugin-truffle/src/validate.ts +++ b/packages/plugin-truffle/src/validate.ts @@ -6,13 +6,13 @@ import { solcInputOutputDecoder, EthereumProvider, getNetworkId, - RunValidation, + ValidationRunData, } from '@openzeppelin/upgrades-core'; import type { SolcInput, SolcOutput, SolcLinkReferences } from '@openzeppelin/upgrades-core/dist/solc-api'; import { TruffleArtifact, ContractClass, NetworkObject } from './truffle'; -export async function validateArtifacts(artifactsPath: string, sourcesPath: string): Promise { +export async function validateArtifacts(artifactsPath: string, sourcesPath: string): Promise { const artifacts = await readArtifacts(artifactsPath); const { input, output } = reconstructSolcInputOutput(artifacts); const srcDecoder = solcInputOutputDecoder(input, output, sourcesPath); diff --git a/packages/plugin-truffle/test/contracts/Action.sol b/packages/plugin-truffle/test/contracts/Action.sol index cf0f278a5..180a4257e 100644 --- a/packages/plugin-truffle/test/contracts/Action.sol +++ b/packages/plugin-truffle/test/contracts/Action.sol @@ -4,10 +4,12 @@ contract Action { enum ActionType { UP, DOWN } event ActionEvent(ActionType actionType); + ActionType action; + function initialize() public view { } - function log(ActionType action) public { + function log() public { emit ActionEvent(action); } diff --git a/packages/plugin-truffle/test/contracts/ActionV2.sol b/packages/plugin-truffle/test/contracts/ActionV2.sol index 464421231..6989d717d 100644 --- a/packages/plugin-truffle/test/contracts/ActionV2.sol +++ b/packages/plugin-truffle/test/contracts/ActionV2.sol @@ -4,11 +4,27 @@ contract ActionV2 { enum ActionType { UP, DOWN, LEFT, RIGHT } event ActionEvent(ActionType actionType); + ActionType action; function initialize() public view { } - function log(ActionType action) public { + function log() public { + emit ActionEvent(action); + } + +} + +contract ActionV2Bad { + enum ActionType { UP, LEFT, RIGHT } + event ActionEvent(ActionType actionType); + + ActionType action; + + function initialize() public view { + } + + function log() public { emit ActionEvent(action); } diff --git a/packages/plugin-truffle/test/contracts/PortfolioV2.sol b/packages/plugin-truffle/test/contracts/PortfolioV2.sol index 2836c0578..c039e26bf 100644 --- a/packages/plugin-truffle/test/contracts/PortfolioV2.sol +++ b/packages/plugin-truffle/test/contracts/PortfolioV2.sol @@ -24,23 +24,18 @@ contract PortfolioV2 { contract PortfolioV2Bad { struct Asset { - bool enabled; uint amount; } - uint insert; mapping (string => Asset) assets; function initialize() public view { } function enable(string memory name) public returns (bool) { - if (assets[name].enabled) { - return false; - } else { - assets[name] = Asset(true, 10); - return true; - } + assets[name] = Asset(10); + return true; } + } diff --git a/packages/plugin-truffle/test/migrations/4_deploy_action.js b/packages/plugin-truffle/test/migrations/4_deploy_action.js index 819551c98..0d1c8fa06 100644 --- a/packages/plugin-truffle/test/migrations/4_deploy_action.js +++ b/packages/plugin-truffle/test/migrations/4_deploy_action.js @@ -4,6 +4,6 @@ const ActionV2 = artifacts.require('ActionV2'); const { deployProxy, upgradeProxy } = require('@openzeppelin/truffle-upgrades'); module.exports = async function (deployer) { - const a = await deployProxy(Action, [], { deployer, unsafeAllowCustomTypes: true }); - await upgradeProxy(a.address, ActionV2, { deployer, unsafeAllowCustomTypes: true }); + const a = await deployProxy(Action, [], { deployer }); + await upgradeProxy(a.address, ActionV2, { deployer }); }; diff --git a/packages/plugin-truffle/test/migrations/5_deploy_portfolio.js b/packages/plugin-truffle/test/migrations/5_deploy_portfolio.js index 87adf2d1b..b7797cf50 100644 --- a/packages/plugin-truffle/test/migrations/5_deploy_portfolio.js +++ b/packages/plugin-truffle/test/migrations/5_deploy_portfolio.js @@ -4,6 +4,6 @@ const PortfolioV2 = artifacts.require('PortfolioV2'); const { deployProxy, upgradeProxy } = require('@openzeppelin/truffle-upgrades'); module.exports = async function (deployer) { - const a = await deployProxy(Portfolio, [], { deployer, unsafeAllowCustomTypes: true }); - await upgradeProxy(a.address, PortfolioV2, { deployer, unsafeAllowCustomTypes: true }); + const a = await deployProxy(Portfolio, [], { deployer }); + await upgradeProxy(a.address, PortfolioV2, { deployer }); }; diff --git a/packages/plugin-truffle/test/test/happy-path-with-enums.js b/packages/plugin-truffle/test/test/happy-path-with-enums.js index 1674ddc60..64dd08d21 100644 --- a/packages/plugin-truffle/test/test/happy-path-with-enums.js +++ b/packages/plugin-truffle/test/test/happy-path-with-enums.js @@ -1,21 +1,20 @@ const assert = require('assert'); - const { deployProxy, upgradeProxy } = require('@openzeppelin/truffle-upgrades'); const Action = artifacts.require('Action'); const ActionV2 = artifacts.require('ActionV2'); +const ActionV2Bad = artifacts.require('ActionV2Bad'); contract('Action', function () { - it('deployProxy', async function () { - await assert.rejects(deployProxy(Action)); - - // we need use the flag to deploy in order to have an address to upgrade - const action = await deployProxy(Action, [], { unsafeAllowCustomTypes: true }); - await assert.rejects(upgradeProxy(action.address, ActionV2)); + it('compatible enums', async function () { + const action = await deployProxy(Action, []); + await upgradeProxy(action.address, ActionV2); }); - it('deployProxy', async function () { - const action = await deployProxy(Action, [], { unsafeAllowCustomTypes: true }); - await upgradeProxy(action.address, ActionV2, { unsafeAllowCustomTypes: true }); + it('incompatible enums', async function () { + const action = await deployProxy(Action, []); + await assert.rejects(upgradeProxy(action.address, ActionV2Bad), error => + error.message.includes('Upgraded `action` to an incompatible type'), + ); }); }); diff --git a/packages/plugin-truffle/test/test/happy-path-with-structs.js b/packages/plugin-truffle/test/test/happy-path-with-structs.js index e7698aa1d..74b174033 100644 --- a/packages/plugin-truffle/test/test/happy-path-with-structs.js +++ b/packages/plugin-truffle/test/test/happy-path-with-structs.js @@ -6,26 +6,16 @@ const Portfolio = artifacts.require('Portfolio'); const PortfolioV2 = artifacts.require('PortfolioV2'); const PortfolioV2Bad = artifacts.require('PortfolioV2Bad'); -contract('PortfolioWithoutFlag', function () { - it('deployProxy', async function () { - await assert.rejects(deployProxy(Portfolio)); - - // we need use the flag to deploy in order to have an address to upgrade - const portfolio = await deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); - await assert.rejects(upgradeProxy(portfolio.address, PortfolioV2)); - }); -}); - -contract('PortfolioWithFlag', function () { - it('deployProxy', async function () { - const portfolio = await deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); - await upgradeProxy(portfolio.address, PortfolioV2, { unsafeAllowCustomTypes: true }); +contract('Portfolio', function () { + it('compatible struct', async function () { + const portfolio = await deployProxy(Portfolio, []); + await upgradeProxy(portfolio.address, PortfolioV2); }); - it('upgradeProxy with flag but incompatible layout', async function () { - const portfolio = await deployProxy(Portfolio, [], { unsafeAllowCustomTypes: true }); - await assert.rejects(upgradeProxy(portfolio.address, PortfolioV2Bad, { unsafeAllowCustomTypes: true }), error => - error.message.includes('Variable `assets` was replaced with `insert`'), + it('incompatible struct', async function () { + const portfolio = await deployProxy(Portfolio, []); + await assert.rejects(upgradeProxy(portfolio.address, PortfolioV2Bad), error => + error.message.includes('Upgraded `assets` to an incompatible type'), ); }); }); diff --git a/yarn.lock b/yarn.lock index fc4f9b655..a53a66622 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9831,10 +9831,10 @@ solc@0.7.3: semver "^5.5.0" tmp "0.0.33" -solidity-ast@^0.4.4: - version "0.4.17" - resolved "https://registry.yarnpkg.com/solidity-ast/-/solidity-ast-0.4.17.tgz#e857b4f64466f3eb94da78fe961c0d256c84b228" - integrity sha512-5jkkabmjPdy9W9c2DMQBlKobVBz7KDnipxn+0zW191uD6BML3w7dQ+ihUvwA9XOm9ILDECrb5Y8z2vu47STqCg== +solidity-ast@^0.4.15: + version "0.4.15" + resolved "https://registry.yarnpkg.com/solidity-ast/-/solidity-ast-0.4.15.tgz#d9f12760c1c8a69f2b0d72a56972fcc79f3b82d6" + integrity sha512-VFedJMLf5yTLKpJZ2bZBYnnrVYaxUTTvv7mpdWz9k9SuV2N/1kcHX/nBokdkI4lf3z05/3us+Q1pLP02EjJxqQ== sort-keys@^2.0.0: version "2.0.0"