diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..cacde6e --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +PRIVATE_KEY= +INFURA_KEY= +ETHERSCAN_API_KEY= +POLYGONSCAN_API_KEY= +POLYGONSCAN_ZKEVM_API_KEY= \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..832a92d --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +# This group is setup to ensure a security review is always required for PRs +/src/ @0xPolygon/internal-security +/script/**/*.sol @0xPolygon/internal-security +/deployments/ @0xPolygon/internal-security diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..28769d5 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,48 @@ +# Pull Request + +## Description + +Please include a summary of the change and which feature was implemented or which issue was fixed. Also, include relevant motivation and context. List any dependencies that are required for this change. + +Fixes # (issue) + +### How Has This Been Tested? + +Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. + +# Checklist: + +Before deployment + +- [ ] 100% test and branch coverage +- [ ] check slither for severe issues +- [ ] fuzz and invariant tests (when applicable) +- [ ] formal verification (when applicable) +- [ ] deployment or upgrade scripts ready +- [ ] version management agreed upon and implemented +- [ ] internal team review +- [ ] **Security Team review** + +After deployment + +- [ ] transfer ownership after deployments (when applicable) +- [ ] complete upgrade (when applicable) +- [ ] generate deployment/upgrade log files +- [ ] update [static](https://github.com/maticnetwork/static/tree/master/network) with new contract address and/or version + +--- + +### Considerations + +- I have followed the [contributing guidelines](../CONTRIBUTING.md). +- My code follows the style guidelines of this project and I have run `forge fmt` and prettier to ensure the code style is valid +- I have performed a self-review of my own code +- I have commented my code, particularly in hard-to-understand areas +- I have made corresponding changes to the documentation +- My changes generate no new warnings +- I have added tests that prove my fix is effective or that my feature works +- New and existing unit tests pass locally with my changes + +### Additional context + +Add any other context about the pull request here. diff --git a/.github/workflows/pr-check.yaml b/.github/workflows/pr-check.yaml new file mode 100644 index 0000000..fafbcd7 --- /dev/null +++ b/.github/workflows/pr-check.yaml @@ -0,0 +1,20 @@ +name: Source branch check +on: + pull_request: + branches: [main] + types: + - opened + - reopened + - synchronize + - edited +jobs: + check-main: + if: github.base_ref == 'main' + runs-on: ubuntu-latest + steps: + - name: Check branches + run: | + if [ ${{ github.head_ref }} != "staging" ]; then + echo "Merge requests to main branch are only allowed from staging branch." + exit 1 + fi diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml new file mode 100644 index 0000000..cc46bd4 --- /dev/null +++ b/.github/workflows/pre-commit.yaml @@ -0,0 +1,21 @@ +# checks that pre-commit hooks pass before allowing to merge a PR + +name: pre-commit +on: + pull_request: + branches: [main, master, staging, dev, feat/**, fix/**] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v2 + with: + submodules: recursive + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@v1 + - name: Install pre-commit + run: pip install pre-commit + - name: Run pre-commit + run: pre-commit run --all-files diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..42c3cda --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,36 @@ +name: test + +on: + pull_request: + branches: [main, master, staging, dev, feat/**, fix/**] + +env: + FOUNDRY_PROFILE: ci + +jobs: + check: + strategy: + fail-fast: true + + name: Foundry project + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@v1 + with: + version: nightly + + - name: Run Forge build + run: | + forge --version + forge build --sizes + id: build + + - name: Run Forge tests + run: | + forge test -vvv + id: test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..08411c1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +/target +/out +/cache +/coverage +lcov.info +.DS_Store +.env +.vscode + +broadcast/*/31337 +deployments/**/31337.* + +script/util/storage_check_cache +script/util/storage_check_report \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..c90cdcb --- /dev/null +++ b/.gitmodules @@ -0,0 +1,9 @@ +[submodule "lib/forge-std"] + path = lib/forge-std + url = https://github.com/foundry-rs/forge-std +[submodule "lib/openzeppelin-contracts"] + path = lib/openzeppelin-contracts + url = https://github.com/openzeppelin/openzeppelin-contracts +[submodule "lib/openzeppelin-contracts-upgradeable"] + path = lib/openzeppelin-contracts-upgradeable + url = https://github.com/openzeppelin/openzeppelin-contracts-upgradeable diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..3c79f30 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +18.16.0 \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..550c23c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,30 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: mixed-line-ending + args: ["--fix=lf"] + description: Forces to replace line ending by the UNIX 'lf' character. + exclude: "^docs/autogen" + - repo: local + hooks: + - id: format + name: Format solidity code + description: Format solidity code with `forge fmt` + language: system + entry: forge fmt + exclude: "^lib/" + pass_filenames: true + - id: doc + name: Generate documentation + description: Generate docs with `forge doc` + language: system + # generates docs and unstages files if only the commit hash changed within the file, this way only when the documentation is updated, the documentation needs to be regenerated and only the changed files are pushed + entry: "script/util/doc_gen.sh" + pass_filenames: false + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v3.0.3" + hooks: + - id: prettier + name: Format non solidity files with prettier + exclude: "^docs/autogen" diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..e92cb50 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,6 @@ +foundry.toml +out +lib/ +cache/ +docs/autogenerated +*.sol diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..0c4d091 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,25 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "useTabs": false, + "singleQuote": false, + "bracketSpacing": true, + "overrides": [ + { + "files": "*.sol", + "options": { + "printWidth": 120, + "tabWidth": 4, + "useTabs": false, + "singleQuote": false, + "bracketSpacing": false + } + }, + { + "files": "*.json", + "options": { + "tabWidth": 4 + } + } + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..8a1804e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,158 @@ +# Contributing + +- [Install](#install) +- [Pre-commit Hooks](#pre-commit-hooks) +- [Branching](#branching) + - [Main](#main) + - [Staging](#staging) + - [Dev](#dev) + - [Feature](#feature) + - [Fix](#fix) +- [Code Practices](#code-practices) + - [Interfaces](#interfaces) + - [NatSpec \& Comments](#natspec--comments) +- [Versioning](#versioning) +- [Testing](#testing) + - [Deployer Template](#deployer-template) +- [Deployment](#deployment) + - [Deployer Template](#deployer-template-1) + - [Deployment](#deployment-1) + - [Deployment Info Generation](#deployment-info-generation) +- [Deployer Template Script](#deployer-template-script) +- [Releases](#releases) + +## Install + +Follow these steps to set up your local environment for development: + +- [Install foundry](https://book.getfoundry.sh/getting-started/installation) +- Install dependencies: `forge install` +- [Install pre-commit](https://pre-commit.com/#post-commit) +- Install pre commit hooks: `pre-commit install` + +## Pre-commit Hooks + +Follow the [installation steps](#install) to enable pre-commit hooks. To ensure consistency in our formatting we use `pre-commit` to check whether code was formatted properly and the documentation is up to date. Whenever a commit does not meet the checks implemented by pre-commit, the commit will fail and the pre-commit checks will modify the files to make the commits pass. Include these changes in your commit for the next commit attempt to succeed. On pull requests the CI checks whether all pre-commit hooks were run correctly. +This repo includes the following pre-commit hooks that are defined in the `.pre-commit-config.yaml`: + +- `mixed-line-ending`: This hook ensures that all files have the same line endings (LF). +- `format`: This hook uses `forge fmt` to format all Solidity files. +- `doc`: This hook uses `forge doc` to automatically generate documentation for all Solidity files whenever the NatSpec documentation changes. The `script/util/doc_gen.sh` script is used to generate documentation. Forge updates the commit hash in the documentation automatically. To only generate new documentation when the documentation has actually changed, the script checks whether more than just the hash has changed in the documentation and discard all changes if only the hash has changed. +- `prettier`: All remaining files are formatted using prettier. + +## Branching + +This section outlines the branching strategy of this repo. + +### Main + +The main branch is supposed to reflect the deployed state on all networks. Any pull requests into this branch MUST come from the staging branch. The main branch is protected and requires a separate code review by the security team. Whenever the main branch is updated, a new release is created with the latest version. For more information on versioning, check [here](#versioning). + +### Staging + +The staging branch reflects new code complete deployments or upgrades containing fixes and/or features. Any pull requests into this branch MUST come from the dev branch. The staging branch is used for security audits and deployments. Once the deployment is complete and deployment log files are generated, the branch can be merged into main. For more information on the deployment and log file generation check [here](#deployment--versioning). + +### Dev + +This is the active development branch. All pull requests into this branch MUST come from fix or feature branches. Upon code completion this branch is merged into staging for auditing and deployment. + +### Feature + +Any new feature should be developed on a separate branch. The naming convention for these branches is `feat/*`. Once the feature is complete, a pull request into the dev branch can be created. + +### Fix + +Any bug fixes should be developed on a separate branch. The naming convention for these branches is `fix/*`. Once the fix is complete, a pull request into the dev branch can be created. + +## Code Practices + +### Interfaces + +Every contract MUST implement their corresponding interface that includes all externally callable functions, errors and events. + +### NatSpec & Comments + +Interfaces should be the entrypoint for all contracts. When exploring the a contract within the repository, the interface MUST contain all relevant information to understand the functionality of the contract in the form of NatSpec comments. This includes all externally callable functions, errors and events. The NatSpec documentation MUST be added to the functions, errors and events within the interface. This allows a reader to understand the functionality of a function before moving on to the implementation. The implementing functions MUST point to the NatSpec documentation in the interface using `@inheritdoc`. Internal and private functions shouldn't have NatSpec documentation except for `@dev` comments, whenever more context is needed. Additional comments within a function should only be used to give more context to more complex operations, otherwise the code should be kept readable and self-explanatory. + +## Versioning + +This repo utilizes [semantic versioning](https://semver.org/) for smart contracts. An `IVersioned` interface is included in the [interfaces directory](src/interface/IVersioned.sol) exposing a unified versioning interface for all contracts. This version MUST be included in all contracts, whether they are upgradeable or not, to be able to easily match deployed versions. For example, in the case of a non-upgradeable contract one version could be deployed to a network and later a new version might be deployed to another network. The exposed `version()` function is also used by the [Deployment Info Generator](#deployment-info-generation) to extract information about the version. + +Whenever contracts are modified, only the version of the changed contracts should be updated. Unmodified contracts should remain on the version of their last change. + +## Testing + +### Deployer Template + +This repo provides a deployer template for consistency between scripts and unit tests. For more information on how to use the template, check [here](#deployer-template-script). + +## Deployment + +This repo utilizes versioned deployments. Any changes to a contract should update the version of this specific contract. To deploy a new version of a contract, create a new deployment script in a directory named after the new version of the modified contracts (e.g., `1.0.0`). A script is provided that extracts deployment information from the `run-latest.json` file within the `broadcast` directory generated while the forge script runs. From this information a JSON and markdown file is generated containing various information about the deployment itself as well as past deployments. + +### Deployer Template + +This repo provides a deployer template for consistency between scripts and unit tests. For more information on how to use the template, check [here](#deployer-template-script). + +### Deployment + +This repo set up the following RPCs in the `foundry.toml` file: + +- mainnet: Ethereum Mainnet +- goerli: Ethereum Goerli +- sepolia: Ethereum Sepolia +- polygon_pos: Polygon PoS +- mumbai: Polygon Mumbai +- polygon_zkevm: Polygon zkEVM +- polygon_zkevm_testnet: Polygon zkEVM Testnet + +To deploy the contracts, provide the `--broadcast` flag to the forge script command. Should the etherscan verification time out, it can be picked up again by replacing the `--broadcast` flag with `--resume`. +Deploy the contracts to one of the predefined networks by providing the according key with the `--rpc-url` flag. Most of the predefined networks require the `INFURA_KEY` environment variable to be set in the `.env` file. +Including the `--verify` flag will verify deployed contracts on Etherscan. Define the appropriate environment variable for the Etherscan api key in the `.env` file. + +```shell +forge script script/1.0.0/Deploy.s.sol --broadcast --rpc-url --verify +``` + +### Deployment Info Generation + +A JSON and Markdown file can be generated in the `deployments` directory containing various information about the deployment itself as well as past deployments using the following command. To find out more about versioning of contracts within this repo, check [here](CONTRIBUTING.md#versioning). + +```shell +node script/util/extract.js +``` + +As the `chainId`, provide the chainId of the network the contracts were deployed to as a number. The supplied `version` should be the version of the modified contracts and the sub directory the deployment script is located in (e.g., `1.0.0`). The `scriptName` should be the file name of the script used in the deployment (e.g., `Deploy.s.sol`). + +When upgrading a contract, most of the times just the new implementation is deployed and the actual upgrade is triggered by a governance process or a multisig. The script will check whether the implementation of the upgraded contract was updated to the deployed version and if not, it will fail and not generate any files. + +## Deployer Template Script + +This repo provides a deployer template for consistency between scripts and unit tests. + +A deployer is an `abstract` contract, meant to be inherited in scripts and tests. The deployer consists of two functions: `deploy` and `deploy_NoInit`. It handels the creation, proxification, and initialization of the contract. + +To generate a new deployer: + +``` +node script/util/generateDeployer.js [params] +``` + +## Releases + +Releases should be created whenever the code on the main branch is updated to reflect a deployment or an upgrade on a network. The release should be named after the version of the contracts deployed or upgraded. +The release should include the following: + +- In case of a MAJOR version + - changelog + - summary of new features + - summary of breaking changes +- In case of a MINOR version + - changelog + - summary of new features + - summary of fixes +- In case of a PATCH version + - changelog + - summary of fixes +- Deployment information +- TODO diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..95817e3 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2023 PT Services DMCC + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md index 497407f..2859bb4 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,48 @@ -# foundry-template -Contracts team, template repo +## Template Repo (Foundry) + +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +[![CI Status](../../actions/workflows/test.yaml/badge.svg)](../../actions) + +TODO: summary of the features of the template repo + +#### Table of Contents + +- [Setup](#setup) +- [Deployment](#deployment) +- [Docs](#docs) +- [Contributing](#contributing) + +## Setup + +Follow these steps to set up your local environment: + +- [Install foundry](https://book.getfoundry.sh/getting-started/installation) +- Install dependencies: `forge install` +- Build contracts: `forge build` +- Test contracts: `forge test` + +If you intend to develop on this repo, follow the steps outlined in [CONTRIBUTING.md](CONTRIBUTING.md#install). + +## Deployment + +This repo utilizes versioned deployments. For more information on how to use forge scripts within the repo, check [here](CONTRIBUTING.md#deployment). + +Smart contracts are deployed or upgraded using the following command: + +```shell +forge script script/1.0.0/Deploy.s.sol --broadcast --rpc-url --verify +``` + +## Docs + +The documentation and architecture diagrams for the contracts within this repo can be found [here](docs/). +Detailed documentation generated from the NatSpec documentation of the contracts can be found [here](docs/autogen/src/src/). +When exploring the contracts within this repository, it is recommended to start with the interfaces first and then move on to the implementation as outlined [here](CONTRIBUTING.md#natspec--comments) + +## Contributing + +If you want to contribute to this project, please check [CONTRIBUTING.md](CONTRIBUTING.md) first. + +--- + +© 2023 PT Services DMCC diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..53191ae --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,22 @@ +# Polygon Technology Security Information + +## Link to vulnerability disclosure details (Bug Bounty). + +- Websites and Applications: https://hackerone.com/polygon-technology +- Smart Contracts: https://immunefi.com/bounty/polygon + +## Languages that our team speaks and understands. + +Preferred-Languages: en + +## Security-related job openings at Polygon. + +https://polygon.technology/careers + +## Polygon security contact details. + +security@polygon.technology + +## The URL for accessing the security.txt file. + +Canonical: https://polygon.technology/security.txt diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..c6b7910 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,3 @@ +# Documentation + +TODO diff --git a/docs/autogen/.gitignore b/docs/autogen/.gitignore new file mode 100644 index 0000000..4e42a1b --- /dev/null +++ b/docs/autogen/.gitignore @@ -0,0 +1 @@ +book/ \ No newline at end of file diff --git a/docs/autogen/book.css b/docs/autogen/book.css new file mode 100644 index 0000000..b5ce903 --- /dev/null +++ b/docs/autogen/book.css @@ -0,0 +1,13 @@ +table { + margin: 0 auto; + border-collapse: collapse; + width: 100%; +} + +table td:first-child { + width: 15%; +} + +table td:nth-child(2) { + width: 25%; +} \ No newline at end of file diff --git a/docs/autogen/book.toml b/docs/autogen/book.toml new file mode 100644 index 0000000..05beb66 --- /dev/null +++ b/docs/autogen/book.toml @@ -0,0 +1,12 @@ +[book] +src = "src" +title = "" + +[output.html] +no-section-label = true +additional-js = ["solidity.min.js"] +additional-css = ["book.css"] +git-repository-url = "https://github.com/0xPolygon/foundry-template" + +[output.html.fold] +enable = true diff --git a/docs/autogen/solidity.min.js b/docs/autogen/solidity.min.js new file mode 100644 index 0000000..1924932 --- /dev/null +++ b/docs/autogen/solidity.min.js @@ -0,0 +1,74 @@ +hljs.registerLanguage("solidity",(()=>{"use strict";function e(){try{return!0 +}catch(e){return!1}} +var a=/-?(\b0[xX]([a-fA-F0-9]_?)*[a-fA-F0-9]|(\b[1-9](_?\d)*(\.((\d_?)*\d)?)?|\.\d(_?\d)*)([eE][-+]?\d(_?\d)*)?|\b0)(?!\w|\$)/ +;e()&&(a=a.source.replace(/\\b/g,"(?{ +var a=r(e),o=l(e),c=/[A-Za-z_$][A-Za-z_$0-9.]*/,d=e.inherit(e.TITLE_MODE,{ +begin:/[A-Za-z$_][0-9A-Za-z$_]*/,lexemes:c,keywords:n}),u={className:"params", +begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,lexemes:c,keywords:n, +contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,o,s]},_={ +className:"operator",begin:/:=|->/};return{keywords:n,lexemes:c, +contains:[a,o,i,t,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,_,{ +className:"function",lexemes:c,beginKeywords:"function",end:"{",excludeEnd:!0, +contains:[d,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,_]}]}}, +solAposStringMode:r,solQuoteStringMode:l,HEX_APOS_STRING_MODE:i, +HEX_QUOTE_STRING_MODE:t,SOL_NUMBER:s,isNegativeLookbehindAvailable:e} +;const{baseAssembly:c,solAposStringMode:d,solQuoteStringMode:u,HEX_APOS_STRING_MODE:_,HEX_QUOTE_STRING_MODE:m,SOL_NUMBER:b,isNegativeLookbehindAvailable:E}=o +;return e=>{for(var a=d(e),s=u(e),n=[],i=0;i<32;i++)n[i]=i+1 +;var t=n.map((e=>8*e)),r=[];for(i=0;i<=80;i++)r[i]=i +;var l=n.map((e=>"bytes"+e)).join(" ")+" ",o=t.map((e=>"uint"+e)).join(" ")+" ",g=t.map((e=>"int"+e)).join(" ")+" ",M=[].concat.apply([],t.map((e=>r.map((a=>e+"x"+a))))),p={ +keyword:"var bool string int uint "+g+o+"byte bytes "+l+"fixed ufixed "+M.map((e=>"fixed"+e)).join(" ")+" "+M.map((e=>"ufixed"+e)).join(" ")+" enum struct mapping address new delete if else for while continue break return throw emit try catch revert unchecked _ function modifier event constructor fallback receive error virtual override constant immutable anonymous indexed storage memory calldata external public internal payable pure view private returns import from as using pragma contract interface library is abstract type assembly", +literal:"true false wei gwei szabo finney ether seconds minutes hours days weeks years", +built_in:"self this super selfdestruct suicide now msg block tx abi blockhash gasleft assert require Error Panic sha3 sha256 keccak256 ripemd160 ecrecover addmod mulmod log0 log1 log2 log3 log4" +},O={className:"operator",begin:/[+\-!~*\/%<>&^|=]/ +},C=/[A-Za-z_$][A-Za-z_$0-9]*/,N={className:"params",begin:/\(/,end:/\)/, +excludeBegin:!0,excludeEnd:!0,lexemes:C,keywords:p, +contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,s,b,"self"]},f={ +begin:/\.\s*/,end:/[^A-Za-z0-9$_\.]/,excludeBegin:!0,excludeEnd:!0,keywords:{ +built_in:"gas value selector address length push pop send transfer call callcode delegatecall staticcall balance code codehash wrap unwrap name creationCode runtimeCode interfaceId min max" +},relevance:2},y=e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/, +lexemes:C,keywords:p}),w={className:"built_in", +begin:(E()?"(? --verify +``` + +## Docs + +The documentation and architecture diagrams for the contracts within this repo can be found [here](docs/). +Detailed documentation generated from the NatSpec documentation of the contracts can be found [here](docs/autogen/src/src/). +When exploring the contracts within this repository, it is recommended to start with the interfaces first and then move on to the implementation as outlined [here](CONTRIBUTING.md#natspec--comments) + +## Contributing + +If you want to contribute to this project, please check [CONTRIBUTING.md](CONTRIBUTING.md) first. + +--- + +Copyright (C) 2023 PT Services DMCC diff --git a/docs/autogen/src/SUMMARY.md b/docs/autogen/src/SUMMARY.md new file mode 100644 index 0000000..1c37b0f --- /dev/null +++ b/docs/autogen/src/SUMMARY.md @@ -0,0 +1,7 @@ +# Summary +- [Home](README.md) +# src + - [❱ interface](src/interface/README.md) + - [ICounter](src/interface/ICounter.sol/interface.ICounter.md) + - [IVersioned](src/interface/IVersioned.sol/interface.IVersioned.md) + - [Counter](src/Counter.sol/contract.Counter.md) diff --git a/docs/autogen/src/src/Counter.sol/contract.Counter.md b/docs/autogen/src/src/Counter.sol/contract.Counter.md new file mode 100644 index 0000000..27b0485 --- /dev/null +++ b/docs/autogen/src/src/Counter.sol/contract.Counter.md @@ -0,0 +1,60 @@ +# Counter +[Git Source](https://github.com/0xPolygon/foundry-template/blob/55b07186cd4779cbe55cc2f262f992aeabaf34ad/src/Counter.sol) + +**Inherits:** +[ICounter](/docs/autogen/src/src/interface/ICounter.sol/interface.ICounter.md), Initializable + + +## State Variables +### number + +```solidity +uint256 public number; +``` + + +## Functions +### initialize + + +```solidity +function initialize(uint256 initialNumber) public initializer; +``` + +### setNumber + +Sets the number + + +```solidity +function setNumber(uint256 newNumber) public; +``` +**Parameters** + +|Name|Type|Description| +|----|----|-----------| +|`newNumber`|`uint256`|The new number| + + +### increment + +Increments the number by 1 + + +```solidity +function increment() public; +``` + +### version + + +```solidity +function version() external pure returns (string memory); +``` +**Returns** + +|Name|Type|Description| +|----|----|-----------| +|``|`string`|The version of the contract| + + diff --git a/docs/autogen/src/src/README.md b/docs/autogen/src/src/README.md new file mode 100644 index 0000000..1b1cc23 --- /dev/null +++ b/docs/autogen/src/src/README.md @@ -0,0 +1,5 @@ + + +# Contents +- [interface](/src/interface) +- [Counter](Counter.sol/contract.Counter.md) diff --git a/docs/autogen/src/src/interface/ICounter.sol/interface.ICounter.md b/docs/autogen/src/src/interface/ICounter.sol/interface.ICounter.md new file mode 100644 index 0000000..d207307 --- /dev/null +++ b/docs/autogen/src/src/interface/ICounter.sol/interface.ICounter.md @@ -0,0 +1,45 @@ +# ICounter +[Git Source](https://github.com/0xPolygon/foundry-template/blob/55b07186cd4779cbe55cc2f262f992aeabaf34ad/src/interface/ICounter.sol) + +**Inherits:** +[IVersioned](/docs/autogen/src/src/interface/IVersioned.sol/interface.IVersioned.md) + + +## Functions +### number + + +```solidity +function number() external view returns (uint256); +``` +**Returns** + +|Name|Type|Description| +|----|----|-----------| +|``|`uint256`|The current number| + + +### setNumber + +Sets the number + + +```solidity +function setNumber(uint256 newNumber) external; +``` +**Parameters** + +|Name|Type|Description| +|----|----|-----------| +|`newNumber`|`uint256`|The new number| + + +### increment + +Increments the number by 1 + + +```solidity +function increment() external; +``` + diff --git a/docs/autogen/src/src/interface/IVersioned.sol/interface.IVersioned.md b/docs/autogen/src/src/interface/IVersioned.sol/interface.IVersioned.md new file mode 100644 index 0000000..c319a0e --- /dev/null +++ b/docs/autogen/src/src/interface/IVersioned.sol/interface.IVersioned.md @@ -0,0 +1,18 @@ +# IVersioned +[Git Source](https://github.com/0xPolygon/foundry-template/blob/55b07186cd4779cbe55cc2f262f992aeabaf34ad/src/interface/IVersioned.sol) + + +## Functions +### version + + +```solidity +function version() external pure returns (string memory); +``` +**Returns** + +|Name|Type|Description| +|----|----|-----------| +|``|`string`|The version of the contract| + + diff --git a/docs/autogen/src/src/interface/README.md b/docs/autogen/src/src/interface/README.md new file mode 100644 index 0000000..b060352 --- /dev/null +++ b/docs/autogen/src/src/interface/README.md @@ -0,0 +1,5 @@ + + +# Contents +- [ICounter](ICounter.sol/interface.ICounter.md) +- [IVersioned](IVersioned.sol/interface.IVersioned.md) diff --git a/foundry.toml b/foundry.toml new file mode 100644 index 0000000..46e9911 --- /dev/null +++ b/foundry.toml @@ -0,0 +1,49 @@ +[profile.default] +src = "src" +out = "out" +libs = ["lib"] +optimizer = true +optimizer_runs = 999999 +via_ir = true +solc = "0.8.22" +verbosity = 2 +ffi = true +fs_permissions = [ + { access = "read", path = "scripts/config.json" }, + { access = "read", path = "script/1.0.0/input.json" } +] + +remappings = [ + "forge-std=lib/forge-std/src", + "@openzeppelin/contracts=lib/openzeppelin-contracts/contracts", + "@openzeppelin/contracts-upgradeable=lib/openzeppelin-contracts-upgradeable/contracts" +] + +[profile.intense.fuzz] +runs = 10000 +max_test_rejects = 999999 + +[fmt] +line_length = 160 +number_underscore = "thousands" + +[rpc_endpoints] +anvil = "http://127.0.0.1:8545" +mainnet = "https://mainnet.infura.io/v3/${INFURA_KEY}" +goerli = "https://goerli.infura.io/v3/${INFURA_KEY}" +sepolia = "https://sepolia.infura.io/v3/${INFURA_KEY}" +polygon_pos = "https://polygon-mainnet.infura.io/v3/${INFURA_KEY}" +mumbai = "https://polygon-mumbai.infura.io/v3/${INFURA_KEY}" +polygon_zkevm = "https://zkevm-rpc.com" +polygon_zkevm_testnet = "https://rpc.public.zkevm-test.net" + +[etherscan] +mainnet = { key = "${ETHERSCAN_API_KEY}" } +goerli = { key = "${ETHERSCAN_API_KEY}" } +sepolia = { key = "${ETHERSCAN_API_KEY}" } +polygon_pos = { key = "${POLYGONSCAN_API_KEY}" } +mumbai = { key = "${POLYGONSCAN_API_KEY}" } +polygon_zkevm = { key = "${POLYGONSCAN_ZKEVM_API_KEY}" } +polygon_zkevm_testnet = { key = "${POLYGONSCAN_ZKEVM_API_KEY}" } + +# See more config options https://github.com/foundry-rs/foundry/tree/master/config \ No newline at end of file diff --git a/lib/forge-std b/lib/forge-std new file mode 160000 index 0000000..f73c73d --- /dev/null +++ b/lib/forge-std @@ -0,0 +1 @@ +Subproject commit f73c73d2018eb6a111f35e4dae7b4f27401e9421 diff --git a/lib/openzeppelin-contracts b/lib/openzeppelin-contracts new file mode 160000 index 0000000..932fddf --- /dev/null +++ b/lib/openzeppelin-contracts @@ -0,0 +1 @@ +Subproject commit 932fddf69a699a9a80fd2396fd1a2ab91cdda123 diff --git a/lib/openzeppelin-contracts-upgradeable b/lib/openzeppelin-contracts-upgradeable new file mode 160000 index 0000000..625fb3c --- /dev/null +++ b/lib/openzeppelin-contracts-upgradeable @@ -0,0 +1 @@ +Subproject commit 625fb3c2b2696f1747ba2e72d1e1113066e6c177 diff --git a/script/1.0.0/Deploy.s.sol b/script/1.0.0/Deploy.s.sol new file mode 100644 index 0000000..3eea3c1 --- /dev/null +++ b/script/1.0.0/Deploy.s.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import "forge-std/Script.sol"; +import "script/util/ScriptHelpers.sol"; + +import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol"; + +import "script/deployers/DeployCounter.s.sol"; + +contract Deploy is Script, ScriptHelpers, CounterDeployer { + using stdJson for string; + + ProxyAdmin internal proxyAdmin; + + Counter internal counter; + + function run() public { + uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); + string memory input = vm.readFile("script/1.0.0/input.json"); + + vm.broadcast(deployerPrivateKey); + proxyAdmin = new ProxyAdmin(input.readAddress($("ProxyAdmin.initialOwner"))); + + (counter,) = deployCounter(address(proxyAdmin), input.readUint($("Counter.number"))); + } +} diff --git a/script/1.0.0/input.json b/script/1.0.0/input.json new file mode 100644 index 0000000..5e2102b --- /dev/null +++ b/script/1.0.0/input.json @@ -0,0 +1,14 @@ +{ + "1": {}, + + "5": {}, + + "31337": { + "ProxyAdmin": { + "initialOwner": "0x48789ECd4317eCc8dBdB1d06EF39F01c5862a941" + }, + "Counter": { + "number": 10 + } + } +} diff --git a/script/config.json b/script/config.json new file mode 100644 index 0000000..77253a4 --- /dev/null +++ b/script/config.json @@ -0,0 +1,7 @@ +{ + "defaultRpc": { + "31337": "http://127.0.0.1:8545", + "1": "https://eth.llamarpc.com", + "5": "https://ethereum-goerli.publicnode.com" + } +} diff --git a/script/deployers/DeployCounter.s.sol b/script/deployers/DeployCounter.s.sol new file mode 100644 index 0000000..2b9b6e0 --- /dev/null +++ b/script/deployers/DeployCounter.s.sol @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import "forge-std/Script.sol"; + +import "src/Counter.sol"; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; + +abstract contract CounterDeployer is Script { + function deployCounter(address proxyAdmin, uint256 number) internal returns (Counter proxyAsCounter, address logic) { + bytes memory initData = abi.encodeCall(Counter.initialize, (number)); + + return _deployCounter(proxyAdmin, initData); + } + + function deployCounter_NoInit(address proxyAdmin) internal returns (Counter proxyAsCounter, address logic) { + return _deployCounter(proxyAdmin, ""); + } + + function _deployCounter(address proxyAdmin, bytes memory initData) private returns (Counter proxyAsCounter, address logic) { + vm.startBroadcast(vm.envUint("PRIVATE_KEY")); + + logic = address(new Counter()); + proxyAsCounter = Counter(address(new TransparentUpgradeableProxy(logic, proxyAdmin, initData))); + + vm.stopBroadcast(); + } +} diff --git a/script/util/ScriptHelpers.sol b/script/util/ScriptHelpers.sol new file mode 100644 index 0000000..6eb218e --- /dev/null +++ b/script/util/ScriptHelpers.sol @@ -0,0 +1,14 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import "forge-std/Script.sol"; + +abstract contract ScriptHelpers is Script { + using stdJson for string; + + ///@notice Returns the JSON field for the current chain ID. + function $(string memory field) internal view returns (string memory) { + string memory chainIdSlug = string.concat('["', vm.toString(block.chainid), '"]'); + return string.concat(chainIdSlug, ".", field); + } +} diff --git a/script/util/_storageCheckReporter.js b/script/util/_storageCheckReporter.js new file mode 100644 index 0000000..d012e10 --- /dev/null +++ b/script/util/_storageCheckReporter.js @@ -0,0 +1,133 @@ +// PREPARE JSONS + +const oldObject = JSON.parse(process.argv[2]); +const newObject = JSON.parse(process.argv[3]); + +function removeField(jsonArray, fieldToRemove) { + jsonArray.forEach((json) => { + if (json.hasOwnProperty("astId")) { + delete json[fieldToRemove]; + } + }); +} + +removeField(oldObject.storage, "astId"); +removeField(newObject.storage, "astId"); + +function jsonsSame(json1, json2) { + return JSON.stringify(json1) === JSON.stringify(json2); +} + +if (jsonsSame(oldObject, newObject)) { + process.exit(0); +} + +let oldStorage = oldObject.storage; +let newStorage = newObject.storage; + +// COMPARE + +let reportOld = ""; +let reportNew = ""; + +function calcStart(item) { + let slot = parseInt(item.slot); + let offset = item.offset; + let result = slot * 256 + 1 + offset * 8; + if (slot + offset == 0) result = 0; + return result; +} + +function startsSame(item1, item2) { + return calcStart(item1) === calcStart(item2); +} + +function startsBefore(item1, item2) { + return calcStart(item1) < calcStart(item2); +} + +function isSame(item1, item2) { + return ( + item1.label === item2.label && + item1.type === item2.type && + jsonsSame(oldObject.types[item1.type], newObject.types[item2.type]) + ); +} + +let o = 0; +let n = 0; + +while (true) { + const item1 = oldStorage[o]; + const item2 = newStorage[n]; + + if (item1 && item2 && startsSame(item1, item2)) { + printOld(true); + printNew(true, isSame(item1, item2) ? " " : "❗️"); + o++; + n++; + } else if (item1 && (!item2 || startsBefore(item1, item2))) { + printOld(true); + printNew(false, "🗑️"); + o++; + } else if (item2 && (!item1 || startsBefore(item2, item1))) { + printOld(false); + printNew(true, "✨"); + n++; + } else { + break; + } +} + +function printOld(notEmpty) { + if (!notEmpty) reportOld += "\n"; + else reportOld += formatLine(" ", oldStorage[o]); +} + +function printNew(notEmpty, emoji) { + if (!notEmpty) reportNew += emoji + "\n"; + else reportNew += formatLine(emoji, newStorage[n]); +} + +function formatLine(emoji, item) { + emoji = emoji.padEnd(1, " "); + let slot = item.slot; + let offset = item.offset.toString().padEnd(2, " "); + let label = item.label.padEnd(25, " "); + let type = item.type; + + if (item.offset == 0) { + slot = slot.padEnd(8, " "); + offset = ""; + } else { + slot += ": "; + } + + return `${emoji} ${slot}${offset} ${label} ${type}\n`; +} + +// REPORT + +// remove \n from report +reportOld = reportOld.slice(0, -1); +reportNew = reportNew.slice(0, -1); + +const fs = require("fs"); +const path = require("path"); + +const filePath = path.parse(process.argv[4]); +const directoryPath = path.join("script/util/storage_check_report", filePath.dir); + +// Create directories recursively +try { + fs.mkdirSync(directoryPath, { recursive: true }); +} catch (err) { + if (err.code !== "EEXIST") throw err; +} + +const reportOldPath = path.join(directoryPath, filePath.name + "-OLD"); +const reportNewPath = path.join(directoryPath, filePath.name + "-NEW"); + +// Write files +fs.writeFileSync(reportOldPath, reportOld); +fs.writeFileSync(reportNewPath, reportNew); diff --git a/script/util/check_storage.sh b/script/util/check_storage.sh new file mode 100644 index 0000000..0b9da2a --- /dev/null +++ b/script/util/check_storage.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +# CLONE OLD VERSION + +# Check if the commit hash argument is provided +if [ -z "$1" ]; then + echo "Please provide the commit hash or tag as an argument." + exit 1 +fi + +# Define the path to the new subdirectory +old_version="script/util/storage_check_cache" + +# Check if the directory exists, then remove it +exists=0 +if [ -d "$old_version" ]; then + # Check if the current commit matches the target commit hash + prev_dir=$(pwd) + cd "$old_version" + if [ "$(git rev-parse HEAD)" = "$1" ]; then + exists=1 + fi + cd "$prev_dir" + if [ "$exists" -eq 0 ]; then + rm -rf "$old_version" + fi +fi + +if [ "$exists" -eq 0 ]; then + current_dir=$(pwd) + # Clone the current directory to the new subdirectory + git clone "file://$current_dir" "$old_version" + cd "$old_version" + + # Reset to a certain commit + git reset --hard "$1" + + forge install + + cd "$current_dir" +fi + +# ======================================================================== + +# GET FILE NAMES + +# Define a function to find .sol files +find_sol_files() { + local dir="$1" + local array_name="$2" + local filesWithPath=() + + while IFS= read -r -d $'\0' file; do + # Append the file name to the array + filesWithPath+=("$file") + done < <(find "$dir" -type f -name "*.sol" -print0) + + # Assign the array to the variable name specified by the second argument + eval "$array_name"='("${filesWithPath[@]}")' +} + +# Specify the directory where you want to search for .sol files +search_directory="src" + +# Declare empty arrays to store the file names +filesWithPath_old=() +filesWithPath_new=() + +current_dir=$(pwd) + +# Call the function for the old version directory +cd $old_version +find_sol_files "$search_directory" "filesWithPath_old" + +# Call the function for the new version directory +cd "$current_dir" +find_sol_files "$search_directory" "filesWithPath_new" + +# ======================================================================== + +# REPORT DELETED ONES + +if [ -d "script/util/storage_check_report" ]; then + rm -rf "script/util/storage_check_report" +fi + +differences=() +for item in "${filesWithPath_old[@]}"; do + skip= + for itemB in "${filesWithPath_new[@]}"; do + [[ $item == $itemB ]] && { skip=1; break; } + done + [[ -n $skip ]] || differences+=("$item") +done + +if [ ${#differences[@]} -gt 0 ]; then + mkdir -p "script/util/storage_check_report" + printf "%s\n" "${differences[@]}" > "script/util/storage_check_report.removed" +fi + +# ======================================================================== + +# COMPARE STORAGE LAYOUTS + +# Loop through each item in the array +for line in "${filesWithPath_old[@]}"; do + # Check if the line is not empty + if [ -n "$line" ] && [[ ! " ${differences[@]} " =~ " ${line} " ]]; then + # Run the 'forge inspect' command with the current item from the array + formated_name=${line}:$(basename "${line%.*}") + cd "$old_version" + output_old=$(forge inspect $formated_name storage) + cd "$current_dir" + output_new=$(forge inspect $formated_name storage) + + node ./script/util/_storageCheckReporter.js "$output_old" "$output_new" ${line} + fi +done \ No newline at end of file diff --git a/script/util/deployer_template b/script/util/deployer_template new file mode 100644 index 0000000..5401406 --- /dev/null +++ b/script/util/deployer_template @@ -0,0 +1,36 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import "forge-std/Script.sol"; + +import ""; +import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; + +abstract contract Deployer is Script { + function deploy( + address proxyAdmin, + + ) internal returns ( proxyAs, address logic) { + bytes memory initData = abi.encodeCall(.initialize, ()); + + return _deploy(proxyAdmin, initData); + } + + function deploy_NoInit( + address proxyAdmin + ) internal returns ( proxyAs, address logic) { + return _deploy(proxyAdmin, ""); + } + + function _deploy( + address proxyAdmin, + bytes memory initData + ) private returns ( proxyAs, address logic) { + vm.startBroadcast(vm.envUint("PRIVATE_KEY")); + + logic = address(new ()); + proxyAs = (address(new TransparentUpgradeableProxy(logic, proxyAdmin, initData))); + + vm.stopBroadcast(); + } +} diff --git a/script/util/doc_gen.sh b/script/util/doc_gen.sh new file mode 100755 index 0000000..5098b44 --- /dev/null +++ b/script/util/doc_gen.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# generate docs +forge doc -b -o docs/autogen + +# Unstage all docs where only the commit hash changed +# Get a list of all unstaged files in the directory +files=$(git diff --name-only -- 'docs/autogen/*') + +# Loop over each file +for file in $files; do + # Get the diff for the file, only lines that start with - or + + diff=$(git diff $file | grep '^[+-][^+-]') + echo $file + echo "$diff" | wc -l + # Check if there are any other changes in the diff besides the commit hash (in that case the file has more than 1 line that changed, one minus one plus) + if [[ $(echo "$diff" | wc -l) -eq 2 ]]; then + # If there are no other changes, discard the changes for the file + git reset HEAD $file + git checkout -- $file + fi +done \ No newline at end of file diff --git a/script/util/extract.js b/script/util/extract.js new file mode 100644 index 0000000..f1f8ffa --- /dev/null +++ b/script/util/extract.js @@ -0,0 +1,502 @@ +const { readFileSync, existsSync, writeFileSync, mkdirSync } = require("fs"); +const { execSync } = require("child_process"); +const { join } = require("path"); + +/** + * @description Extracts contract deployment data from run-latest.json (foundry broadcast output) and writes to deployments/{chainId}.json + * @usage node script/utils/extract.js {chainId} [version = "1.0.0"] [scriptName = "Deploy.s.sol"] + * @dev + * currently only supports TransparentUpgradeableProxy pattern + */ +async function main() { + validateInputs(); + let [chainId, version, scriptName] = process.argv.slice(2); + if (!version?.length) version = "1.0.0"; + if (!scriptName?.length) scriptName = "Deploy.s.sol"; + const commitHash = getCommitHash(); + const data = JSON.parse( + readFileSync(join(__dirname, `../../broadcast/${scriptName}/${chainId}/run-latest.json`), "utf-8"), + ); + const config = JSON.parse(readFileSync(join(__dirname, "../config.json"), "utf-8")); + const input = JSON.parse(readFileSync(join(__dirname, `../${version}/input.json`), "utf-8")); + const rpcUrl = config.defaultRpc[chainId] || process.env.RPC_URL || "http://127.0.0.1:8545"; + const deployments = data.transactions.filter(({ transactionType }) => transactionType === "CREATE"); + + const outPath = join(__dirname, `../../deployments/json/${chainId}.json`); + if (!existsSync(join(__dirname, "../../deployments/"))) mkdirSync(join(__dirname, "../../deployments/")); + if (!existsSync(join(__dirname, "../../deployments/json/"))) mkdirSync(join(__dirname, "../../deployments/json/")); + const out = JSON.parse( + (existsSync(outPath) && readFileSync(outPath, "utf-8")) || JSON.stringify({ chainId, latest: {}, history: [] }), + ); + + const timestamp = data.timestamp; + let latestContracts = {}; + if (Object.keys(out.latest).length === 0) { + const deployedContractsMap = new Map( + [...deployments].map(({ contractAddress, contractName }) => [contractAddress, contractName]), + ); + + // first deployment + // todo(future): add support for other proxy patterns + const proxies = await Promise.all( + deployments + .filter(({ contractName }) => contractName === "TransparentUpgradeableProxy") + .map(async ({ arguments, contractAddress, hash }) => ({ + implementation: arguments[0], + proxyAdmin: arguments[1], + address: contractAddress, + contractName: deployedContractsMap.get(arguments[0]), + proxy: true, + ...(await getVersion(contractAddress, rpcUrl)), + proxyType: "TransparentUpgradeableProxy", + timestamp, + deploymentTxn: hash, + commitHash, + })), + ); + const nonProxies = await Promise.all( + deployments + .filter( + ({ contractName }) => + contractName !== "TransparentUpgradeableProxy" && !proxies.find((p) => p.contractName === contractName), + ) + .map(async ({ contractName, contractAddress, hash }) => ({ + address: contractAddress, + contractName, + proxy: false, + ...(await getVersion(contractAddress, rpcUrl)), + timestamp, + deploymentTxn: hash, + commitHash, + })), + ); + const contracts = [...proxies, ...nonProxies].reduce((obj, { contractName, ...rest }) => { + obj[contractName] = rest; + return obj; + }, {}); + latestContracts = contracts; + out.history.push({ + contracts: Object.entries(contracts).reduce((obj, [key, { timestamp, commitHash, ...rest }]) => { + obj[key] = rest; + return obj; + }, {}), + input: input[chainId], + timestamp, + commitHash, + }); + } else { + if (out.history.find((h) => h.commitHash === commitHash)) return console.log("warn: commitHash already deployed"); // if commitHash already exists in history, return + + for (const { contractName, contractAddress } of deployments) { + if (Object.keys(out.latest).includes(contractName) && out.latest[contractName].proxy) { + // new deployment, check if implementation changed on chain + if (out.latest[contractName].proxyType !== "TransparentUpgradeableProxy") continue; // only support TransparentUpgradeableProxy pattern + const currentImplementation = getImplementationAddress( + out.latest[contractName].address, + "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc", + rpcUrl, + ); + if (currentImplementation === out.latest[contractName].implementation) + throw new Error( + `Implementation for ${contractName}(${out.latest[contractName].address}) did not change - ${currentImplementation}, deployed - ${contractAddress}`, + ); + if (currentImplementation !== contractAddress) + throw new Error( + `Implementation mismatch for ${contractName}(${out.latest[contractName].address}), onchain - ${currentImplementation}, deployed - ${contractAddress}`, + ); + + // currentImplementation === contractAddress + // implementation changed, update latestContracts + latestContracts[contractName] = { + ...out.latest[contractName], + implementation: toChecksumAddress(currentImplementation), + version: (await getVersion(currentImplementation, rpcUrl))?.version || version, + timestamp, + commitHash, + }; + out.history.unshift({ + contracts: Object.entries(latestContracts).reduce((obj, [key, { timestamp, commitHash, ...rest }]) => { + obj[key] = rest; + return obj; + }, {}), + input: input[chainId], + timestamp, + commitHash, + }); + } + } + + const deployedContractsMap = new Map( + Object.entries(out.latest).map(([contractName, { address }]) => [address.toLowerCase(), contractName]), + ); + + for (const { transaction, transactionType } of data.transactions) { + if ( + transactionType === "CALL" && + deployedContractsMap.get(transaction.to.toLowerCase()) === "ProxyAdmin" && + transaction.data.startsWith("0x99a88ec4") // upgrade(address, address) + ) { + const proxyAddress = "0x" + transaction.data.slice(34, 74); + const newImplementationAddress = "0x" + transaction.data.slice(98, 138); + const contractName = deployedContractsMap.get(proxyAddress.toLowerCase()); + + latestContracts[contractName] = { + ...out.latest[contractName], + implementation: toChecksumAddress(newImplementationAddress), + version: (await getVersion(newImplementationAddress, rpcUrl))?.version || version, + timestamp, + commitHash, + }; + out.history.unshift({ + contracts: Object.entries(latestContracts).reduce((obj, [key, { timestamp, commitHash, ...rest }]) => { + obj[key] = rest; + return obj; + }, {}), + input: input[chainId], + timestamp, + commitHash, + }); + } + } + } + + // overwrite latest with changed contracts + out.latest = { + ...out.latest, + ...latestContracts, + }; + + writeFileSync(outPath, JSON.stringify(out, null, 2)); + generateMarkdown(out); +} + +function getCommitHash() { + return execSync("git rev-parse HEAD").toString().trim(); // note: update if not using git +} + +function toChecksumAddress(address) { + try { + return execSync(`cast to-check-sum-address ${address}`).toString().trim(); // note: update if not using cast + } catch (e) { + console.log("ERROR", e); + return address; + } +} + +function getImplementationAddress(proxyAddress, implementationSlot, rpcUrl) { + try { + const implementationAddress = execSync(`cast storage ${proxyAddress} ${implementationSlot} --rpc-url ${rpcUrl}`) + .toString() + .trim(); // note: update if not using cast + if (implementationAddress === "0x0000000000000000000000000000000000000000000000000000000000000000") + throw new Error(`empty implementation address for ${proxyAddress} at slot ${implementationSlot}`); + const trimmedAddress = "0x" + implementationAddress.substring(66 - 40, 66); + return toChecksumAddress(trimmedAddress); + } catch (e) { + console.log("ERROR", e); + return "0x0000000000000000000000000000000000000000"; + } +} + +async function getVersion(contractAddress, rpcUrl) { + try { + const res = await ( + await fetch(rpcUrl, { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + jsonrpc: "2.0", + id: Date.now(), + method: "eth_call", + params: [{ to: contractAddress, data: "0x54fd4d50" }, "latest"], // version()(string) + }), + }) + ).json(); + if (res.error) throw new Error(res.error.message); + return { version: hexToAscii(res.result)?.trim() || res.result }; + } catch (e) { + if (e.message === "execution reverted") return { version: undefined }; // contract does not implement getVersion() + if (e.message.includes("fetch is not defined")) { + console.warn("use node 18+"); + } + throw e; + } +} + +function generateMarkdown(input) { + let out = `# Polygon Ecosystem Token\n\n`; + // read name from foundry.toml + + out += `\n### Table of Contents\n- [Summary](#summary)\n- [Contracts](#contracts)\n\t- `; + out += Object.keys(input.latest) + .map( + (c) => + `[${c.replace(/([A-Z])/g, " $1").trim()}](#${c + .replace(/([A-Z])/g, "-$1") + .trim() + .slice(1) + .toLowerCase()})`, + ) + .join("\n\t- "); + out += `\n- [Deployment History](#deployment-history)`; + const { deploymentHistoryMd, allVersions } = generateDeploymentHistory(input.history, input.latest, input.chainId); + out += Object.keys(allVersions) + .map((v) => `\n\t- [${v}](#${v.replace(/\./g, "")})`) + .join(""); + + out += `\n\n## Summary + + + + + + `; + out += Object.entries(input.latest) + .map( + ([contractName, { address, version }]) => + ` + + + + `, + ) + .join("\n"); + out += `
ContractAddressVersion
${contractName}${address}${version || `N/A`}
\n`; + + out += `\n## Contracts\n\n`; + + out += Object.entries(input.latest) + .map( + ([ + contractName, + { address, deploymentTxn, version, commitHash, timestamp, proxyType, implementation, proxyAdmin }, + ]) => `### ${contractName.replace(/([A-Z])/g, " $1").trim()} + +Address: ${getEtherscanLinkMd(input.chainId, address)} + +Deployment Txn: ${getEtherscanLinkMd(input.chainId, deploymentTxn, "tx")} + +${ + typeof version === "undefined" + ? "" + : `Version: [${version}](https://github.com/0xPolygon/pol-token/releases/tag/${version})` +} + +Commit Hash: [${commitHash.slice(0, 7)}](https://github.com/0xPolygon/pol-token/commit/${commitHash}) + +${prettifyTimestamp(timestamp)} +${generateProxyInformationIfProxy({ + address, + contractName, + proxyType, + implementation, + proxyAdmin, + history: input.history, + chainId: input.chainId, +})}`, + ) + .join("\n\n --- \n\n"); + + out += ` + +---- + + +### Deployment History + +${deploymentHistoryMd}`; + + writeFileSync(join(__dirname, `../../deployments/${input.chainId}.md`), out, "utf-8"); +} + +function getEtherscanLink(chainId, address, slug = "address") { + chainId = parseInt(chainId); + switch (chainId) { + case 1: + return `https://etherscan.io/${slug}/${address}`; + case 5: + return `https://goerli.etherscan.io/${slug}/${address}`; + default: + return ``; + // return `https://blockscan.com/${slug}/${address}`; + } +} +function getEtherscanLinkMd(chainId, address, slug = "address") { + const etherscanLink = getEtherscanLink(chainId, address, slug); + return etherscanLink.length ? `[${address}](${etherscanLink})` : address; +} + +function generateProxyInformationIfProxy({ + address, + contractName, + proxyType, + implementation, + proxyAdmin, + history, + chainId, +}) { + let out = ``; + if (typeof proxyType === "undefined") return out; + out += `\n\n_Proxy Information_\n\n`; + out += `\n\nProxy Type: ${proxyType}\n\n`; + out += `\n\nImplementation: ${getEtherscanLinkMd(chainId, implementation)}\n\n`; + out += `\n\nProxy Admin: ${getEtherscanLinkMd(chainId, proxyAdmin)}\n\n`; + + const historyOfProxy = history.filter((h) => h?.contracts[contractName]?.address === address); + if (historyOfProxy.length === 0) return out; + out += `\n`; + out += ` +
+Implementation History + + + + + + ${historyOfProxy + .map( + ({ + contracts: { + [contractName]: { implementation, version }, + }, + commitHash, + }) => ` + + + + + `, + ) + .join("")} +
VersionAddressCommit Hash
${version}${implementation}${commitHash.slice( + 0, + 7, + )}
+
+ `; + return out; +} + +function generateDeploymentHistory(history, latest, chainId) { + let allVersions = {}; + if (history.length === 0) { + const inputPath = join(__dirname, "../1.0.0/input.json"); + const input = JSON.parse((existsSync(inputPath) && readFileSync(inputPath, "utf-8")) || `{"${chainId}":{}}`)[ + chainId + ]; + allVersions = Object.entries(latest).reduce((obj, [contractName, contract]) => { + if (typeof contract.version === "undefined") return obj; + if (!obj[contract.version]) obj[contract.version] = []; + obj[contract.version].push({ contract, contractName, input }); + return obj; + }, {}); + } else { + allVersions = history.reduce((obj, { contracts, input, timestamp, commitHash }) => { + Object.entries(contracts).forEach(([contractName, contract]) => { + if (typeof contract.version === "undefined") return; + if (!obj[contract.version]) obj[contract.version] = []; + obj[contract.version].push({ contract: { ...contract, timestamp, commitHash }, contractName, input }); + }); + return obj; + }, {}); + } + + let out = ``; + out += Object.entries(allVersions) + .map( + ([version, contractInfos]) => ` +### [${version}](https://github.com/0xPolygon/pol-token/releases/tag/${version}) + +${prettifyTimestamp(contractInfos[0].contract.timestamp)} + +Commit Hash: [${contractInfos[0].contract.commitHash.slice(0, 7)}](https://github.com/0xPolygon/pol-token/commit/${ + contractInfos[0].contract.commitHash + }) + +Deployed contracts: + +- ${contractInfos + .map( + ({ contract, contractName }) => + `[${contractName.replace(/([A-Z])/g, " $1").trim()}](${ + getEtherscanLink(chainId, contract.address) || contract.address + })${ + contract.proxyType + ? ` ([Implementation](${ + getEtherscanLink(chainId, contract.implementation) || contract.implementation + }))` + : `` + }`, + ) + .join("\n- ")} + +
+Inputs + + + + + + ${Object.entries(contractInfos[0].input) + .map( + ([key, value]) => ` + + + +`, + ) + .join("\n")} +
ParameterValue
${key}${value}
+
+ `, + ) + .join("\n\n"); + + return { deploymentHistoryMd: out, allVersions }; +} + +function prettifyTimestamp(timestamp) { + return new Date(timestamp * 1000).toUTCString().replace("GMT", "UTC"); +} + +const hexToAscii = (str) => hexToUtf8(str).replace(/[\u0000-\u0008,\u000A-\u001F,\u007F-\u00A0]+/g, ""); // remove non-ascii chars +const hexToUtf8 = (str) => new TextDecoder().decode(hexToUint8Array(str)); // note: TextDecoder present in node, update if not using nodejs +function hexToUint8Array(hex) { + const value = hex.toLowerCase().startsWith("0x") ? hex.slice(2) : hex; + return new Uint8Array(Math.ceil(value.length / 2)).map((_, i) => parseInt(value.substring(i * 2, i * 2 + 2), 16)); +} + +function validateInputs() { + let [chainId, version, scriptName] = process.argv.slice(2); + let printUsageAndExit = false; + if ( + !( + typeof chainId === "string" && + ["string", "undefined"].includes(typeof version) && + ["string", "undefined"].includes(typeof scriptName) + ) || + chainId === "help" + ) { + if (chainId !== "help") + console.log(`error: invalid inputs: ${JSON.stringify({ chainId, version, scriptName }, null, 0)}\n`); + printUsageAndExit = true; + } + if ( + version && + !( + existsSync(join(__dirname, `../${version}/input.json`)) && + existsSync(join(__dirname, `../${version}/${scriptName}`)) + ) + ) { + console.log( + `error: script/${version}/input.json or script/${version}/${scriptName || ""} does not exist\n`, + ); + printUsageAndExit = true; + } + if (printUsageAndExit) { + console.log(`usage: node script/utils/extract.js {chainId} [version = "1.0.0"] [scriptName = "Deploy.s.sol"]`); + process.exit(1); + } +} + +main(); diff --git a/script/util/generateDeployer.js b/script/util/generateDeployer.js new file mode 100644 index 0000000..45a71c8 --- /dev/null +++ b/script/util/generateDeployer.js @@ -0,0 +1,97 @@ +const fs = require("fs"); +const path = require("path"); + +// Function to replace occurrences of 'Example', 'uint256 arg', and 'arg' in a file +const replaceInFile = (filePath, newFilePath, replacementExample, replacementArgs, replacementPathToExample) => { + fs.readFile(filePath, "utf8", (err, data) => { + if (err) { + return console.error(err); + } + + let regexExample = new RegExp("", "g"); + let regexArgs = new RegExp("", "g"); + let regexArgsNames = new RegExp("", "g"); + let regexPathToExample = new RegExp("", "g"); + + let updatedData = data.replace(regexExample, replacementExample); + updatedData = updatedData.replace(regexArgs, replacementArgs); + updatedData = updatedData.replace(regexArgsNames, processString(replacementArgs)); + updatedData = updatedData.replace(regexPathToExample, replacementPathToExample); + + fs.writeFile(newFilePath, updatedData, "utf8", (err) => { + if (err) { + console.error(err); + } else { + console.log("Deployer generated."); + + if (replacementArgs.length === 0) { + fs.readFile(newFilePath, "utf8", (err, data) => { + if (err) { + return console.error(err); + } + + // Find the index of the first occurrence of "address proxyAdmin" and replace it + const index = data.indexOf("address proxyAdmin,\n"); + if (index !== -1) { + const newData = + data.slice(0, index) + "address proxyAdmin" + data.slice(index + "address proxyAdmin,\n".length); + // Write the updated content back to the file + fs.writeFile(newFilePath, newData, "utf8", (err) => { + if (err) return console.error(err); + }); + } else { + console.log('No match found for "address proxyAdmin," in the file.'); + } + }); + } + } + }); + }); +}; + +function processString(inputString) { + if (inputString.includes(",")) { + const words = inputString.split(","); + const lastWords = words.map((word) => word.trim().split(" ").pop()); + return lastWords.join(", "); + } else { + return inputString.trim().split(" ").pop(); + } +} + +// Replace occurrences in the specified file with the provided arguments +const filePath = "script/util/deployer_template"; +const replacementPathToExample = process.argv[2]; +const replacementArgs = process.argv[3]; +const newFilePath = process.argv[4]; +let replacementExample; + +// Extract the file name from the path by splitting the string based on the '/' delimiter +const parts = replacementPathToExample.split("/"); +// Get the last part of the path, which is the file name with the extension +const fileNameWithExtension = parts[parts.length - 1]; +// Split the file name by the dot('.') to get the name and the extension separately +const fileNameParts = fileNameWithExtension.split("."); +// Check if there is more than one element in the fileNameParts array +if (fileNameParts.length > 1) { + // Join the parts of the file name excluding the last element (the extension) + replacementExample = fileNameParts.slice(0, -1).join("."); +} else { + // The file name as it is if no extension is found + replacementExample = fileNameParts[0]; +} + +if (!replacementPathToExample || !newFilePath) { + console.error( + "Usage: node script/util/generateDeployer.js [type arg, ...] ", + ); + process.exit(1); +} + +let filePathPrefix = newFilePath; + +const formattedPath = path.join(filePathPrefix, "Deploy" + replacementExample + ".s.sol"); + +replaceInFile(filePath, formattedPath, replacementExample, replacementArgs, replacementPathToExample); + +// TODO: Format the new file diff --git a/slither.config.json b/slither.config.json new file mode 100644 index 0000000..e0d7e2a --- /dev/null +++ b/slither.config.json @@ -0,0 +1,3 @@ +{ + "filter_paths": "(lib/|test/|scripts/)" +} diff --git a/src/Counter.sol b/src/Counter.sol new file mode 100644 index 0000000..23c6eb1 --- /dev/null +++ b/src/Counter.sol @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import {ICounter, IVersioned} from "./interface/ICounter.sol"; +import {Initializable} from "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; + +contract Counter is ICounter, Initializable { + uint256 public number; + + function initialize(uint256 initialNumber) public initializer { + number = initialNumber; + } + + /// @inheritdoc ICounter + function setNumber(uint256 newNumber) public { + number = newNumber; + } + + /// @inheritdoc ICounter + function increment() public { + number++; + } + + /// @inheritdoc IVersioned + function version() external pure returns (string memory) { + return "1.0.0"; + } +} diff --git a/src/interface/ICounter.sol b/src/interface/ICounter.sol new file mode 100644 index 0000000..d499905 --- /dev/null +++ b/src/interface/ICounter.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import {IVersioned} from "./IVersioned.sol"; + +interface ICounter is IVersioned { + /// @return The current number + function number() external view returns (uint256); + + /// @notice Sets the number + /// @param newNumber The new number + function setNumber(uint256 newNumber) external; + + /// @notice Increments the number by 1 + function increment() external; +} diff --git a/src/interface/IVersioned.sol b/src/interface/IVersioned.sol new file mode 100644 index 0000000..dc6caae --- /dev/null +++ b/src/interface/IVersioned.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +interface IVersioned { + /// @return The version of the contract + function version() external pure returns (string memory); +} diff --git a/test/1.0.0/Counter.t.sol b/test/1.0.0/Counter.t.sol new file mode 100644 index 0000000..0c2615d --- /dev/null +++ b/test/1.0.0/Counter.t.sol @@ -0,0 +1,53 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import "forge-std/Test.sol"; +import "test/util/TestHelpers.sol"; + +import "script/1.0.0/Deploy.s.sol"; + +abstract contract BeforeScript is Test, TestHelpers, CounterDeployer { + Counter internal counter; + + function setUp() public { + (counter,) = deployCounter_NoInit(makeAddr("")); + } +} + +contract CounterTest_Zero is BeforeScript { + function test_Initializes(uint256 number) public { + counter.initialize(number); + assertEq(counter.number(), number); + } +} + +abstract contract AfterScript is Test, TestHelpers, Deploy { + function setUp() public virtual { + run(); + } +} + +contract CounterTest_Initialized is AfterScript { + function test_IsInitialized() public { + assertEq(counter.number(), 10); + } + + function test_RevertsIf_InitializedAgain() public { + vm.expectRevert(Initializable.InvalidInitialization.selector); + counter.initialize(1); + } + + function test_IncrementsNumber() public { + counter.increment(); + assertEq(counter.number(), 11); + } + + function testFuzz_SetsNumber(uint256 x) public { + counter.setNumber(x); + assertEq(counter.number(), x); + } + + function test_ReturnsVersion() public { + assertEq(counter.version(), "1.0.0"); + } +} diff --git a/test/util/TestHelpers.sol b/test/util/TestHelpers.sol new file mode 100644 index 0000000..1d03de4 --- /dev/null +++ b/test/util/TestHelpers.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.22; + +import "forge-std/Test.sol"; + +abstract contract TestHelpers is Test { + Account internal DEPLOYER; + + constructor() { + DEPLOYER = makeAccount("DEPLOYER"); + vm.setEnv("PRIVATE_KEY", vm.toString(DEPLOYER.key)); + } +}