├── .editorconfig ├── .github ├── CODEOWNERS └── workflows │ ├── ci.yml │ ├── codeql-analysis.yml │ └── deploy.yml ├── .gitignore ├── .prettierrc ├── LICENSE ├── README.md ├── action.yml ├── babel.config.js ├── jest.config.js ├── mass_generate_mock_records.sh ├── package.json ├── src ├── __tests__ │ ├── Integration.test.ts │ └── main.test.ts ├── domain │ ├── Constants.ts │ ├── Regex.ts │ ├── Types.ts │ ├── __tests__ │ │ └── constants.test.ts │ ├── exceptions.ts │ ├── index.ts │ └── typeDeclaratives.ts ├── index.ts ├── infra │ ├── github.ts │ └── index.ts ├── main.ts ├── modules │ ├── approvals │ │ ├── index.ts │ │ └── modules │ │ │ ├── get_approvals.ts │ │ │ └── request_reviewers.ts │ ├── assertions │ │ ├── Domain │ │ │ └── types.ts │ │ ├── __tests__ │ │ │ ├── Assertions.test.ts │ │ │ ├── assert_has_authors.test.ts │ │ │ ├── assert_valid_filename.test.ts │ │ │ ├── require_authors.test.ts │ │ │ ├── require_editors.test.ts │ │ │ ├── require_file_preexisting.test.ts │ │ │ └── require_filename_eip_num.test.ts │ │ ├── assert_constant_eip_number.ts │ │ ├── assert_constant_status.ts │ │ ├── assert_eip1_editor_approvals.ts │ │ ├── assert_eip_editor_approval.ts │ │ ├── assert_filename_and_file_numbers_match.ts │ │ ├── assert_has_authors.ts │ │ ├── assert_is_approved_by_authors.ts │ │ ├── assert_valid_filename.ts │ │ ├── assert_valid_status.ts │ │ ├── index.ts │ │ ├── require_authors.ts │ │ ├── require_editors.ts │ │ ├── require_file_preexisting.ts │ │ ├── require_filename_eip_num.ts │ │ ├── require_files.ts │ │ ├── require_max_file_number.ts │ │ ├── require_pr.ts │ │ └── require_pull_number.ts │ ├── file │ │ ├── domain │ │ │ └── types.ts │ │ ├── index.ts │ │ └── modules │ │ │ ├── file_diff_infra.ts │ │ │ └── get_parsed_content.ts │ ├── main │ │ └── modules │ │ │ ├── get_comment_message.ts │ │ │ ├── get_type │ │ │ ├── __tests__ │ │ │ │ └── getType.test.ts │ │ │ ├── index.ts │ │ │ ├── logs.ts │ │ │ ├── new_eip_file.ts │ │ │ ├── status_change.ts │ │ │ └── update_eip.ts │ │ │ ├── purify_test_results.ts │ │ │ └── test_file.ts │ ├── pull_request │ │ ├── domain │ │ │ └── types.ts │ │ ├── infra │ │ │ └── github_api │ │ │ │ ├── __tests__ │ │ │ │ └── updateLabels.test.ts │ │ │ │ ├── github_pull_request.ts │ │ │ │ └── log.ts │ │ └── use_cases │ │ │ ├── index.ts │ │ │ ├── post_comment.ts │ │ │ └── update_labels.ts │ ├── purifiers │ │ ├── __tests__ │ │ │ ├── editor_approval.test.ts │ │ │ ├── eip1.test.ts │ │ │ └── withdrawn_exceptions.test.ts │ │ ├── editor_approval.ts │ │ ├── eip1.ts │ │ ├── index.ts │ │ ├── status_change_allowed.ts │ │ └── withdrawn_exceptions.ts │ └── utils │ │ ├── debug.ts │ │ └── index.ts └── tests │ ├── assets │ ├── mockPR.ts │ └── records │ │ ├── 3654 │ │ ├── 1.json │ │ └── 2.json │ │ ├── 3768 │ │ ├── 1.json │ │ └── 2.json │ │ ├── 3596.json │ │ ├── 3612.json │ │ ├── 3623.json │ │ ├── 3676.json │ │ ├── 3767.json │ │ ├── 4189.json │ │ ├── 4192.json │ │ ├── 4361.json │ │ ├── 4478.json │ │ ├── 4499.json │ │ ├── 4506.json │ │ └── index.ts │ ├── factories │ ├── envFactory.ts │ ├── fileDiffFactory.ts │ ├── fileFactory.ts │ ├── prFactory.ts │ └── testResultsFactory.ts │ └── testutils.ts ├── tsconfig.build.json ├── tsconfig.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | root = false 2 | 3 | [*.ts] 4 | charset = utf-8 5 | end_of_line = lf 6 | indent_size = 2 7 | indent_style = space 8 | insert_final_newline = true 9 | tab_width = 2 10 | trim_trailing_whitespace = true 11 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Default codeowners 2 | * @Pandapip1 @alita-moore 3 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Node.js CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | 13 | strategy: 14 | matrix: 15 | node-version: [16.x] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Use Node.js ${{ matrix.node-version }} 20 | uses: actions/setup-node@v2 21 | with: 22 | node-version: ${{ matrix.node-version }} 23 | - run: yarn install 24 | - run: yarn build 25 | - run: yarn test 26 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "master", await-ci-pass, delete-failed-on-review, dist, mark-eips-stale ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ "master" ] 20 | schedule: 21 | - cron: '37 15 * * 1' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'javascript' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v3 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v2 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | 52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 53 | # queries: security-extended,security-and-quality 54 | 55 | 56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 57 | # If this step fails, then you should remove it and run the build manually (see below) 58 | - name: Autobuild 59 | uses: github/codeql-action/autobuild@v2 60 | 61 | # ℹ️ Command-line programs to run using the OS shell. 62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 63 | 64 | # If the Autobuild fails above, remove it and uncomment the following three lines. 65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 66 | 67 | # - run: | 68 | # echo "Run, Build Application using script" 69 | # ./location_of_script_within_repo/buildscript.sh 70 | 71 | - name: Perform CodeQL Analysis 72 | uses: github/codeql-action/analyze@v2 73 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | 6 | jobs: 7 | deploy: 8 | name: Build and Deploy 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b 14 | with: 15 | persist-credentials: false 16 | fetch-depth: 0 17 | 18 | - name: Setup 19 | uses: actions/setup-node@094c36e88e2a3ffdf4d3a5bb935088ac39a46acc 20 | with: 21 | node-version: 16 22 | 23 | - name: Install 24 | run: yarn install --immutable --immutable-cache --check-cache 25 | 26 | - name: Build 27 | run: yarn run build 28 | 29 | - name: Remove Gitignore 30 | run: rm .gitignore 31 | 32 | - name: Commit & Push changes 33 | uses: actions-js/push@5f565701a8b9f9aa6b7efc25f28994eabfcf5312 34 | with: 35 | github_token: ${{ secrets.GITHUB_TOKEN }} 36 | message: Submit Build 37 | branch: dist 38 | force: true 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Optional REPL history 57 | .node_repl_history 58 | 59 | # Output of 'npm pack' 60 | *.tgz 61 | 62 | # Yarn Integrity file 63 | .yarn-integrity 64 | 65 | # dotenv environment variables file 66 | .env 67 | .env.test 68 | 69 | # parcel-bundler cache (https://parceljs.org/) 70 | .cache 71 | 72 | # next.js build output 73 | .next 74 | 75 | # nuxt.js build output 76 | .nuxt 77 | 78 | # vuepress build output 79 | .vuepress/dist 80 | 81 | # Serverless directories 82 | .serverless/ 83 | 84 | # FuseBox cache 85 | .fusebox/ 86 | 87 | # DynamoDB Local files 88 | .dynamodb/ 89 | 90 | # build 91 | build/ 92 | *.DS_Store 93 | package-lock.json 94 | .idea/ -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "none", 3 | "tabWidth": 2, 4 | "semi": true, 5 | "singleQuote": false 6 | } 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Creative Commons Legal Code 2 | 3 | CC0 1.0 Universal 4 | 5 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE 6 | LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN 7 | ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS 8 | INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES 9 | REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS 10 | PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM 11 | THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED 12 | HEREUNDER. 13 | 14 | Statement of Purpose 15 | 16 | The laws of most jurisdictions throughout the world automatically confer 17 | exclusive Copyright and Related Rights (defined below) upon the creator 18 | and subsequent owner(s) (each and all, an "owner") of an original work of 19 | authorship and/or a database (each, a "Work"). 20 | 21 | Certain owners wish to permanently relinquish those rights to a Work for 22 | the purpose of contributing to a commons of creative, cultural and 23 | scientific works ("Commons") that the public can reliably and without fear 24 | of later claims of infringement build upon, modify, incorporate in other 25 | works, reuse and redistribute as freely as possible in any form whatsoever 26 | and for any purposes, including without limitation commercial purposes. 27 | These owners may contribute to the Commons to promote the ideal of a free 28 | culture and the further production of creative, cultural and scientific 29 | works, or to gain reputation or greater distribution for their Work in 30 | part through the use and efforts of others. 31 | 32 | For these and/or other purposes and motivations, and without any 33 | expectation of additional consideration or compensation, the person 34 | associating CC0 with a Work (the "Affirmer"), to the extent that he or she 35 | is an owner of Copyright and Related Rights in the Work, voluntarily 36 | elects to apply CC0 to the Work and publicly distribute the Work under its 37 | terms, with knowledge of his or her Copyright and Related Rights in the 38 | Work and the meaning and intended legal effect of CC0 on those rights. 39 | 40 | 1. Copyright and Related Rights. A Work made available under CC0 may be 41 | protected by copyright and related or neighboring rights ("Copyright and 42 | Related Rights"). Copyright and Related Rights include, but are not 43 | limited to, the following: 44 | 45 | i. the right to reproduce, adapt, distribute, perform, display, 46 | communicate, and translate a Work; 47 | ii. moral rights retained by the original author(s) and/or performer(s); 48 | iii. publicity and privacy rights pertaining to a person's image or 49 | likeness depicted in a Work; 50 | iv. rights protecting against unfair competition in regards to a Work, 51 | subject to the limitations in paragraph 4(a), below; 52 | v. rights protecting the extraction, dissemination, use and reuse of data 53 | in a Work; 54 | vi. database rights (such as those arising under Directive 96/9/EC of the 55 | European Parliament and of the Council of 11 March 1996 on the legal 56 | protection of databases, and under any national implementation 57 | thereof, including any amended or successor version of such 58 | directive); and 59 | vii. other similar, equivalent or corresponding rights throughout the 60 | world based on applicable law or treaty, and any national 61 | implementations thereof. 62 | 63 | 2. Waiver. To the greatest extent permitted by, but not in contravention 64 | of, applicable law, Affirmer hereby overtly, fully, permanently, 65 | irrevocably and unconditionally waives, abandons, and surrenders all of 66 | Affirmer's Copyright and Related Rights and associated claims and causes 67 | of action, whether now known or unknown (including existing as well as 68 | future claims and causes of action), in the Work (i) in all territories 69 | worldwide, (ii) for the maximum duration provided by applicable law or 70 | treaty (including future time extensions), (iii) in any current or future 71 | medium and for any number of copies, and (iv) for any purpose whatsoever, 72 | including without limitation commercial, advertising or promotional 73 | purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each 74 | member of the public at large and to the detriment of Affirmer's heirs and 75 | successors, fully intending that such Waiver shall not be subject to 76 | revocation, rescission, cancellation, termination, or any other legal or 77 | equitable action to disrupt the quiet enjoyment of the Work by the public 78 | as contemplated by Affirmer's express Statement of Purpose. 79 | 80 | 3. Public License Fallback. Should any part of the Waiver for any reason 81 | be judged legally invalid or ineffective under applicable law, then the 82 | Waiver shall be preserved to the maximum extent permitted taking into 83 | account Affirmer's express Statement of Purpose. In addition, to the 84 | extent the Waiver is so judged Affirmer hereby grants to each affected 85 | person a royalty-free, non transferable, non sublicensable, non exclusive, 86 | irrevocable and unconditional license to exercise Affirmer's Copyright and 87 | Related Rights in the Work (i) in all territories worldwide, (ii) for the 88 | maximum duration provided by applicable law or treaty (including future 89 | time extensions), (iii) in any current or future medium and for any number 90 | of copies, and (iv) for any purpose whatsoever, including without 91 | limitation commercial, advertising or promotional purposes (the 92 | "License"). The License shall be deemed effective as of the date CC0 was 93 | applied by Affirmer to the Work. Should any part of the License for any 94 | reason be judged legally invalid or ineffective under applicable law, such 95 | partial invalidity or ineffectiveness shall not invalidate the remainder 96 | of the License, and in such case Affirmer hereby affirms that he or she 97 | will not (i) exercise any of his or her remaining Copyright and Related 98 | Rights in the Work or (ii) assert any associated claims and causes of 99 | action with respect to the Work, in either case contrary to Affirmer's 100 | express Statement of Purpose. 101 | 102 | 4. Limitations and Disclaimers. 103 | 104 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 105 | surrendered, licensed or otherwise affected by this document. 106 | b. Affirmer offers the Work as-is and makes no representations or 107 | warranties of any kind concerning the Work, express, implied, 108 | statutory or otherwise, including without limitation warranties of 109 | title, merchantability, fitness for a particular purpose, non 110 | infringement, or the absence of latent or other defects, accuracy, or 111 | the present or absence of errors, whether or not discoverable, all to 112 | the greatest extent permissible under applicable law. 113 | c. Affirmer disclaims responsibility for clearing rights of other persons 114 | that may apply to the Work or any use thereof, including without 115 | limitation any person's Copyright and Related Rights in the Work. 116 | Further, Affirmer disclaims responsibility for obtaining any necessary 117 | consents, permissions or other rights required for any use of the 118 | Work. 119 | d. Affirmer understands and acknowledges that Creative Commons is not a 120 | party to this document and has no duty or obligation with respect to 121 | this CC0 or use of the Work. 122 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # EIP Linting Bot 2 | 3 | This Github Actions integrated bot lints EIPs and provides feedback for authors; its goal is to catch simple problems, notify the relevant individuals to review, and merge simple changes automatically. 4 | 5 | # Usage 6 | 7 | ```yml 8 | on: 9 | workflow_run: 10 | workflows: 11 | - Auto Review Bot Trigger 12 | types: 13 | - completed 14 | 15 | name: Auto Review Bot 16 | jobs: 17 | auto-review-bot: 18 | runs-on: ubuntu-latest 19 | name: Run 20 | steps: 21 | - name: Fetch PR Number 22 | uses: dawidd6/action-download-artifact@6765a42d86407a3d532749069ac03705ad82ebc6 23 | with: 24 | name: pr-number 25 | workflow: auto-review-trigger.yml 26 | run_id: ${{ github.event.workflow_run.id }} 27 | 28 | - name: Save PR Number 29 | id: save-pr-number 30 | run: echo "::set-output name=pr::$(cat pr-number.txt)" 31 | 32 | - name: Checkout 33 | uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b 34 | with: 35 | repository: ethereum/EIPs # Default, but best to be explicit here 36 | ref: master 37 | 38 | - name: Setup Node.js Environment 39 | uses: actions/setup-node@2fddd8803e2f5c9604345a0b591c3020ee971a93 40 | with: 41 | node-version: 16 42 | 43 | - name: Auto Review Bot 44 | id: auto-review-bot 45 | uses: ethereum/EIP-Bot@1e1bb6a58e02d28e9afa9462b00a518d9b47860e # dist 46 | with: 47 | GITHUB-TOKEN: ${{ secrets.TOKEN }} 48 | PR_NUMBER: ${{ steps.save-pr-number.outputs.pr }} 49 | CORE_EDITORS: '@MicahZoltu,@lightclient,@axic,@gcolvin,@SamWilsn,@Pandapip1' 50 | ERC_EDITORS: '@lightclient,@axic,@SamWilsn,@Pandapip1' 51 | NETWORKING_EDITORS: '@MicahZoltu,@lightclient,@axic,@SamWilsn' 52 | INTERFACE_EDITORS: '@lightclient,@axic,@SamWilsn,@Pandapip1' 53 | META_EDITORS: '@lightclient,@axic,@gcolvin,@SamWilsn,@Pandapip1' 54 | INFORMATIONAL_EDITORS: '@lightclient,@axic,@gcolvin,@SamWilsn,@Pandapip1' 55 | MAINTAINERS: '@alita-moore,@mryalamanchi' 56 | 57 | - name: Enable Auto-Merge 58 | uses: reitermarkus/automerge@a25ea0de41019ad13380d22e01db8f5638f1bcdc 59 | with: 60 | token: ${{ secrets.TOKEN }} 61 | pull-request: ${{ steps.save-pr-number.outputs.pr }} 62 | 63 | - name: Submit Approval 64 | uses: hmarr/auto-approve-action@24ec4c8cc344fe1cdde70ff37e55ace9e848a1d8 65 | with: 66 | github-token: ${{ secrets.TOKEN }} 67 | pull-request-number: ${{ steps.save-pr-number.outputs.pr }} 68 | ``` 69 | 70 | # Contributing 71 | 72 | ## Standard Practices 73 | 74 | ### Function Naming 75 | 76 | This library uses concepts that may appear strange, 77 | 78 | - **require...** : functions that start with `require` are used to guarantee it responds with the resource you're looking for or else it will error 79 | - **assert...** : functions that start with `assert` are used to test something and if that test fails it'll respond with some kind of error message string. This is where the errors that the bot tells the author comes from. 80 | - **...Purifier** : functions that end in `purifier` are used to _purify_ test results, they help to keep the logic of assertions clean and handle cross error dependencies like the fact that if you change the status you need an editor approval, but then once you actually get that approval we don't want to show the error for changing the status (i.e. `if (changedStatus && !approvedByEditor) { return error } else if (changedStatus && approvedByEditor) { return }`). 81 | 82 | These practices are applied to make things easier to understand. If you're not careful, then the logic can get tangled very quick, and then it's really hard to read and change things. 83 | 84 | ### Testing 85 | 86 | This bot employs two types of tests 87 | 88 | - functional 89 | - integration 90 | 91 | A functional test is your standard unit test. Take a small function and test its behavior thoroughly. You don't need anything more than jest to do this, and your code should be organized such that the sub functions are abstracted and tested. It also uses dependency injection for this reason (it's typically easier to mock that way). Everything should have unit tests. 92 | 93 | An integration test is a test that considers the behavior as a whole. In this bot, we mock a network response from the github api using `nock`. When you do this for every network request you're able to get a snapshot and test the whole's behavior. All integration tests were once bugs that were fixed, so if you implement a feature you don't need to add an integration test. It's easier to manage this way, and it serves the purpose of reducing code regression. Integration tests tend to be brittle because of the number of different facets. So the code uses several homebrewed tools to maximize reliability. 94 | 95 | Feel free to share ideas on how to improve testing procedures. 96 | 97 | ## Getting Started 98 | 99 | ### Requirements 100 | 101 | 1. node package manager (npm) 102 | 2. Github Token 103 | 3. Forked Repo 104 | 4. nodejs 105 | 106 | ### Quick Start (npm run it) 107 | 108 | `npm run it` runs the bot end to end; which means you can integrate and test with github directly. It uses the typescript built script so don't forget to build that by using `npm run build` or `npm run watch`. 109 | 110 | 1. Download your forked `EIPS` repo 111 | 2. Create a [Github Token](/creating-a-personal-access-token) 112 | 3. Create a PR in your forked repo doing anything, I recommend just editing a couple lines in an already existing EIPs 113 | 4. Create a .env variable in the root dir with the following information defined: 114 | 115 | ``` 116 | GITHUB_TOKEN = 117 | NODE_ENV = development 118 | 119 | PULL_NUMBER = 120 | BASE_SHA = 121 | HEAD_SHA = 122 | REPO_OWNER_NAME = 123 | REPO_NAME = EIPs 124 | GITHUB_REPOSITORY = /EIPs 125 | ``` 126 | 127 | 5. `npm run build && npm run it` 128 | 129 | ### Quick Start (npm run mock) 130 | 131 | `npm run mock` is a tool built for writing integration tests, but it can also be used to develop. `npm run mock` uses the saved network data of previous pull requests and states of those pull requests. Try this by mocking [pull 3670](https://github.com/ethereum/EIPs/pull/3670).. 132 | 133 | 1. Clone this repo 134 | 2. Setup your local environment (requires node > 14.x): `npm install` 135 | 3. Create a .env variable in the root dir with the following information: 136 | 137 | ``` 138 | GITHUB_TOKEN = anything 139 | 140 | PULL_NUMBER = 3670 141 | REPO_OWNER_NAME = ethereum 142 | REPO_NAME = EIPs 143 | GITHUB_REPOSITORY = ethereum/EIPs 144 | EVENT_TYPE = pull_request_target 145 | ``` 146 | 147 | 4. Then run the mock `npm run mock` 148 | 5. You should get a response like the following 149 | 150 | ```bash 151 | alitamoore@Alitas-MBP EIP-Bot % npm run mock 152 | 153 | > auto-merge-eip@1.0.0 mock /Users/alitamoore/ethereum/EIP-Bot 154 | > NODE_ENV=MOCK node -r dotenv/config build/src/index.js 155 | 156 | failed to pass tests with the following errors: 157 | - File with name EIPS/eip-3670.md is new and new files must be reviewed 158 | - This PR requires review from one of [@micahzoltu, @lightclient, @arachnid, @cdetrio, @souptacular, @vbuterin, @nicksavers, @wanderer, @gcolvin] 159 | ::error::failed to pass tests with the following errors:%0A - File with name EIPS/eip-3670.md is new and new files must be reviewed%0A - This PR requires review from one of [@micahzoltu, @lightclient, @arachnid, @cdetrio, @souptacular, @vbuterin, @nicksavers, @wanderer, @gcolvin] 160 | npm ERR! code ELIFECYCLE 161 | npm ERR! errno 1 162 | npm ERR! auto-merge-eip@1.0.0 mock: `NODE_ENV=MOCK node -r dotenv/config build/src/index.js` 163 | npm ERR! Exit status 1 164 | npm ERR! 165 | npm ERR! Failed at the auto-merge-eip@1.0.0 mock script. 166 | npm ERR! This is probably not a problem with npm. There is likely additional logging output above. 167 | 168 | npm ERR! A complete log of this run can be found in: 169 | npm ERR! /Users/alitamoore/.npm/_logs/2021-07-25T06_43_54_229Z-debug.log 170 | ``` 171 | 172 | In this case, an error was expected because the bug in question was if the editors were mentioned if a status error occurred (if the status wasn't one of the allowed types) 173 | 174 | ### Troubleshooting 175 | 176 | - When I run it, I'm getting unexplainable errors with my github requests. 177 | - Github limits the number of requests from a given IP, this may be avoidable if you only use the `octokit` but a VPN also works just fine 178 | 179 | ## Code Style Guidelines (in no particular order) 180 | 181 | This repo is a living repo, and it will grow with the EIP drafting and editing process. It's important to maintain code quality. 182 | 183 | 1. Define every type (including octokit) 184 | 2. Make clean and clear error messages 185 | 3. Avoid abstraction 186 | 4. Use [enums](https://www.sohamkamani.com/javascript/enums/) as much as possible 187 | 188 | ## Explanations of Style Guidelines 189 | 190 | A couple things to keep in mind if you end up making changes to this 191 | 192 | #### 1. Define every type 193 | 194 | Define every type, no `any` types. The time it takes to define a type now will save you or someone else later a lot of time. If you make assumptions about types, protect those assumptions (throw exception if they are false). 195 | 196 | Sometimes [Octokit types](https://www.npmjs.com/package/@octokit/types) can be difficult to index, but it's important that whenever possible the types are defined and assumptions protected. 197 | 198 | #### 2. Make clean and clear error messages 199 | 200 | This bot has a single goal: catch simple mistakes automatically and save the editors time. So clear error messages that allow the PR author to change it themselves are very important. 201 | 202 | #### 3. Avoid Abstraction 203 | 204 | Only abstract if necessary, keep things in one file where applicable; other examples of okay abstraction are types, regex, and methods used more than 3 times. Otherwise, it's often cleaner to just re-write things. 205 | 206 | ```javascript 207 | // DON'T DO THIS 208 | ** src/lib.ts ** 209 | export const baz = () => "baz" 210 | 211 | ** src/foo.ts ** 212 | import { baz } from "./lib" 213 | export const foo = () => baz(); 214 | 215 | ** src/bar.ts ** 216 | import { baz } from "./lib" 217 | export const bar = () => baz(); 218 | 219 | // DO THIS 220 | ** src/foo.ts ** 221 | const baz = () => "baz" 222 | export const foo = () => baz(); 223 | 224 | ** src/bar.ts ** 225 | const baz = () => "baz" 226 | export const bar = () => baz(); 227 | ``` 228 | 229 | #### 4. Always use enum when defining restricted string types 230 | 231 | In short, enums make code easier to read, trace, and maintain. 232 | 233 | But here's a brief info if you haven't worked with them before 234 | 235 | ```typescript 236 | enum EnumFoo { 237 | bar = "BAR", 238 | baz = "BAZ" 239 | } 240 | type Foo = "BAR" | "BAZ"; 241 | ``` 242 | 243 | Inline declaration is maintained 244 | 245 | ```typescript 246 | const foo: EnumFoo; 247 | const bar: Foo; 248 | // foo and bar both must be either "BAR" or "BAZ" 249 | ``` 250 | 251 | Use case is slightly different 252 | 253 | ```typescript 254 | const foo: EnumFoo = EnumFoo.baz; // you can't directly assign "BAZ" 255 | const bar: Foo = "BAZ"; 256 | ``` 257 | 258 | But comparisons are maintained 259 | 260 | ```typescript 261 | // taking variables from above 262 | ("BAZ" === foo) === ("BAZ" === bar) && 263 | ("BAZ" === EnumFoo.baz) === ("BAZ" === "BAZ"); 264 | ``` 265 | 266 | In addition to the above use case and string eradication it centralizes the strings to be matched so they can be easily changed. So, making life much easier if you wanted to change the names of statuses on an EIP. 267 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Auto Merge EIP" 2 | description: "A bot that lints EIP edits, finds common errors, and can auto-merge" 3 | inputs: 4 | GITHUB-TOKEN: 5 | description: |- 6 | The Github token to be used. This is optional because if it's not provided the bot will use a token from a brand new account not associated with any repositories and the key is gone 7 | required: false 8 | PR_NUMBER: 9 | description: The Pull Request number 10 | required: false 11 | CORE_EDITORS: 12 | description: a list of editors in the following format "@github_username,@..." to be mentioned for core eip changes 13 | required: true 14 | ERC_EDITORS: 15 | description: a list of editors in the following format "@github_username,@..." to be mentioned for ERC type changes 16 | required: true 17 | NETWORKING_EDITORS: 18 | description: a list of editors in the following format "@github_username,@..." to be mentioned for NETWORKING type changes 19 | required: true 20 | INTERFACE_EDITORS: 21 | description: a list of editors in the following format "@github_username,@..." to be mentioned for INTERFACE type changes 22 | required: true 23 | META_EDITORS: 24 | description: a list of editors in the following format "@github_username,@..." to be mentioned for META type changes 25 | required: true 26 | INFORMATIONAL_EDITORS: 27 | description: a list of editors in the following format "@github_username,@..." to be mentioned for INFORMATIONAL type changes 28 | required: true 29 | MAINTAINERS: 30 | required: true 31 | description: a list of users or users or gorups in the following format "@github_username,@..."; these users will be called when the bot experiences an unexpected failure 32 | 33 | runs: 34 | using: composite 35 | steps: 36 | - run: node --trace-warnings ${{github.action_path}}/build/src/index.js 37 | shell: bash 38 | env: 39 | ERC_EDITORS: ${{ inputs.ERC_EDITORS }} 40 | CORE_EDITORS: ${{ inputs.CORE_EDITORS }} 41 | NETWORKING_EDITORS: ${{ inputs.NETWORKING_EDITORS }} 42 | INTERFACE_EDITORS: ${{ inputs.INTERFACE_EDITORS }} 43 | META_EDITORS: ${{ inputs.META_EDITORS }} 44 | INFORMATIONAL_EDITORS: ${{ inputs.INFORMATIONAL_EDITORS }} 45 | GITHUB_TOKEN: ${{ inputs.GITHUB-TOKEN }} 46 | MAINTAINERS: ${{ inputs.MAINTAINERS }} 47 | PR_NUMBER: ${{ inputs.PR_NUMBER }} 48 | NODE_ENV: production 49 | 50 | -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [ 3 | ["@babel/preset-env", { targets: { node: "current" } }], 4 | "@babel/preset-typescript" 5 | ] 6 | }; 7 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /* 2 | * For a detailed explanation regarding each configuration property, visit: 3 | * https://jestjs.io/docs/en/configuration.html 4 | */ 5 | const tsconfig = require("./tsconfig.json"); 6 | const moduleNameMapper = require("tsconfig-paths-jest")(tsconfig); 7 | 8 | module.exports = { 9 | clearMocks: true, 10 | moduleDirectories: ["node_modules", "./"], 11 | moduleNameMapper, 12 | testEnvironment: "node", 13 | testMatch: [ 14 | // "**/__tests__/**/*.[jt]s?(x)", 15 | "**/?(*.)+(spec|test).[tj]s?(x)" 16 | ] 17 | }; 18 | -------------------------------------------------------------------------------- /mass_generate_mock_records.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # add GITHUB_TOKEN to your local call (e.g. GITHUB_TOKEN="token" sh ./mass...) 3 | export NODE_ENV="development" 4 | 5 | export EVENT_TYPE="pull_request_target" 6 | export REPO_OWNER_NAME="ethereum" 7 | export REPO_NAME="EIPs" 8 | export GITHUB_REPOSITORY="ethereum/EIPs" 9 | export CORE_EDITORS="@alita-moore" 10 | export ERC_EDITORS="@lightclient,@axic" 11 | export NETWORKING_EDITORS="@alita-moore" 12 | export INTERFACE_EDITORS="@alita-moore" 13 | export META_EDITORS="@alita-moore" 14 | export INFORMATIONAL_EDITORS="@alita-moore" 15 | export MAINTAINERS="@alita-moore, @fake-alita-moore" 16 | 17 | ALL=("3654_1" "3654_2" "3768_1" "3768_2" "3581" "3596" "3612" "3623" "3670" "3676" "3767" "4189" "4192" "4361" "4393" "4478" "4499" "4506" "4506") 18 | REMAINING=("3768") 19 | 20 | for VARIABLE in "${ALL[@]}"; do 21 | export PULL_NUMBER=$VARIABLE 22 | yarn mock 23 | done 24 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "auto-merge-eip", 3 | "version": "1.0.0", 4 | "description": "auto merge eip", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "NODE_ENV=test jest", 8 | "build": "npx tsc --p ./tsconfig.build.json ", 9 | "watch": "npx tsc --watch --p ./tsconfig.build.json", 10 | "it": "NODE_ENV=development node -r dotenv/config build/src/index.js", 11 | "mock": "NODE_ENV=MOCK node -r dotenv/config build/src/index.js", 12 | "prettier-changed": "prettier --write", 13 | "prettier:all": "prettier --write src assets __tests__" 14 | }, 15 | "author": "", 16 | "license": "CC0-1.0", 17 | "dependencies": { 18 | "@actions/core": "^1.9.1", 19 | "@actions/github": "^5.0.0", 20 | "@octokit/plugin-retry": "^3.0.7", 21 | "@octokit/rest": "^18.10.0", 22 | "@types/faker": "^5.5.7", 23 | "@types/lodash": "^4.14.173", 24 | "@types/nock": "^11.1.0", 25 | "@types/underscore": "^1.11.4", 26 | "depcheck": "^1.4.2", 27 | "dotenv": "^10.0.0", 28 | "email-addresses": "^5.0.0", 29 | "faker": "^5.5.3", 30 | "front-matter": "4.0.2", 31 | "http-status": "^1.5.0", 32 | "lodash": "^4.17.21", 33 | "nock": "^13.1.1", 34 | "prettier": "^2.4.1", 35 | "tsconfig-paths-jest": "^0.0.1", 36 | "type-fest": "^2.5.4", 37 | "underscore": "^1.13.2" 38 | }, 39 | "devDependencies": { 40 | "@babel/preset-env": "^7.15.6", 41 | "@babel/preset-typescript": "^7.13.0", 42 | "@octokit/types": "^6.28.1", 43 | "@types/jest": "^28.1.6", 44 | "@types/node": "^18.7.1", 45 | "@types/node-fetch": "^3.0.3", 46 | "@typescript-eslint/parser": "^4.31.1", 47 | "eslint": "^7.32.0", 48 | "husky": "^7.0.2", 49 | "jest": "^27.2.0", 50 | "mocked-env": "^1.3.5", 51 | "module-alias": "^2.2.2", 52 | "pretty-quick": "^3.1.1", 53 | "typescript": "^4.4.3" 54 | }, 55 | "husky": { 56 | "hooks": { 57 | "pre-commit": "pretty-quick --changed" 58 | } 59 | }, 60 | "_moduleAliases": { 61 | "#": "build/src/modules", 62 | "src": "build/src" 63 | }, 64 | "repository": { 65 | "type": "git", 66 | "url": "git+https://github.com/alita-moore/EIP-Bot.git" 67 | }, 68 | "keywords": [], 69 | "bugs": { 70 | "url": "https://github.com/alita-moore/EIP-Bot/issues" 71 | }, 72 | "homepage": "https://github.com/alita-moore/EIP-Bot#readme" 73 | } 74 | -------------------------------------------------------------------------------- /src/__tests__/Integration.test.ts: -------------------------------------------------------------------------------- 1 | // integration tests in this repo are previously fixed bugs 2 | import { SavedRecord } from "src/tests/assets/records"; 3 | import { envFactory } from "src/tests/factories/envFactory"; 4 | import { __MAIN_MOCK__, mockPR } from "src/tests/assets/mockPR"; 5 | import { EIPCategory, EIPTypeOrCategoryToResolver, EIPTypes } from "src/domain"; 6 | import { assertDefined } from "src/domain/typeDeclaratives"; 7 | import { github } from "src/infra"; 8 | import { RequireFilenameEIPNum } from "#/assertions/require_filename_eip_num"; 9 | import { getApprovals } from "#/approvals"; 10 | import { getParsedContent } from "#/file/modules/get_parsed_content"; 11 | import { Exceptions } from "src/domain/exceptions"; 12 | import { getSetFailedMock, initGeneralTestEnv } from "src/tests/testutils"; 13 | 14 | const getPullRequestFiles = github.getPullRequestFiles; 15 | 16 | describe("integration testing edgecases associated with editors", () => { 17 | initGeneralTestEnv(); 18 | const setFailedMock = getSetFailedMock(); 19 | 20 | describe("Pull 3654", () => { 21 | it("should mention editors if there's a valid status error and no editor approval", async () => { 22 | process.env = envFactory({ 23 | PULL_NUMBER: SavedRecord.PR3654_2, 24 | [EIPTypeOrCategoryToResolver[EIPTypes.informational]]: 25 | "@micahzoltu, @lighclient" 26 | }); 27 | 28 | await __MAIN_MOCK__(); 29 | const Domain = await import("src/domain"); 30 | 31 | // collect the call 32 | expect(setFailedMock).toHaveBeenCalledTimes(1); 33 | const call = setFailedMock.mock.calls[0]; 34 | 35 | expect(call).toBeDefined(); 36 | expect(call![0]).toContain(Domain.INFORMATIONAL_EDITORS()[0]); 37 | expect(call![0]).toContain(Domain.INFORMATIONAL_EDITORS()[1]); 38 | }); 39 | 40 | it("should pass with editor approval", async () => { 41 | process.env = envFactory({ 42 | PULL_NUMBER: SavedRecord.PR3654_1, 43 | [EIPTypeOrCategoryToResolver[EIPTypes.informational]]: 44 | "@micahzoltu, @lighclient" 45 | }); 46 | 47 | await __MAIN_MOCK__(); 48 | expect(setFailedMock).not.toBeCalled(); 49 | }); 50 | }); 51 | 52 | describe("Pull 3767", () => { 53 | it("should pass", async () => { 54 | process.env = envFactory({ PULL_NUMBER: SavedRecord.PR3767 }); 55 | 56 | await __MAIN_MOCK__(); 57 | expect(setFailedMock).not.toBeCalled(); 58 | }); 59 | }); 60 | 61 | describe("Pull 3612", () => { 62 | it("should pass", async () => { 63 | process.env = envFactory({ PULL_NUMBER: SavedRecord.PR3612 }); 64 | 65 | await __MAIN_MOCK__(); 66 | expect(setFailedMock).not.toBeCalled(); 67 | }); 68 | }); 69 | 70 | describe("Pull 4192", () => { 71 | it("should not pass either files", async () => { 72 | process.env = envFactory({ PULL_NUMBER: SavedRecord.PR4192 }); 73 | 74 | await __MAIN_MOCK__(); 75 | expect(setFailedMock).toBeCalled(); 76 | const call = setFailedMock.mock.calls[0] as NonNullable< 77 | typeof setFailedMock.mock.calls[0] 78 | >; 79 | expect(call[0]).not.toMatch(/passed/); 80 | }); 81 | 82 | it("should mention multiple expected files", async () => { 83 | process.env = envFactory({ PULL_NUMBER: SavedRecord.PR4192 }); 84 | 85 | await __MAIN_MOCK__(); 86 | expect(setFailedMock).toBeCalled(); 87 | const call = setFailedMock.mock.calls[0] as NonNullable< 88 | typeof setFailedMock.mock.calls[0] 89 | >; 90 | expect(call[0]).toMatch(/eip-1010.md/); 91 | expect(call[0]).toMatch(/eip-1056.md/); 92 | }); 93 | }); 94 | 95 | describe("Pull 3768", () => { 96 | it("(variant 1) should pass", async () => { 97 | process.env = envFactory({ 98 | PULL_NUMBER: SavedRecord.PR3768_1, 99 | [EIPTypeOrCategoryToResolver[EIPCategory.erc]]: "@micahzoltu" 100 | }); 101 | 102 | await __MAIN_MOCK__(); 103 | expect(setFailedMock).not.toBeCalled(); 104 | }); 105 | 106 | it("(variant 2) should fail", async () => { 107 | process.env = envFactory({ 108 | PULL_NUMBER: SavedRecord.PR3768_2, 109 | [EIPTypeOrCategoryToResolver[EIPCategory.erc]]: "@micahzoltu" 110 | }); 111 | 112 | await __MAIN_MOCK__(); 113 | expect(setFailedMock).toBeCalled(); 114 | }); 115 | 116 | it("should not mention authors with emails", async () => { 117 | process.env = envFactory({ 118 | PULL_NUMBER: SavedRecord.PR3768_2, 119 | [EIPTypeOrCategoryToResolver[EIPCategory.erc]]: "@micahzoltu" 120 | }); 121 | 122 | await __MAIN_MOCK__(); 123 | const call = setFailedMock.mock.calls[0]; 124 | expect(call).toBeDefined(); 125 | assertDefined(call); 126 | expect(call[0]).not.toMatch(/dete@axiomzen.co/); 127 | }); 128 | }); 129 | 130 | describe("Pull 3623", () => { 131 | it("should pass", async () => { 132 | process.env = envFactory({ 133 | PULL_NUMBER: SavedRecord.PR3623, 134 | [EIPTypeOrCategoryToResolver[EIPCategory.erc]]: "@micahzoltu" 135 | }); 136 | 137 | await __MAIN_MOCK__(); 138 | expect(setFailedMock).not.toBeCalled(); 139 | }); 140 | }); 141 | 142 | describe("Pull 4189", () => { 143 | it("should pass", async () => { 144 | process.env = envFactory({ 145 | PULL_NUMBER: SavedRecord.PR4189 146 | }); 147 | 148 | await __MAIN_MOCK__(); 149 | expect(setFailedMock).not.toBeCalled(); 150 | }); 151 | }); 152 | 153 | describe("Pull 4478", () => { 154 | it("should fail", async () => { 155 | process.env = envFactory({ 156 | PULL_NUMBER: SavedRecord.PR4478 157 | }); 158 | 159 | await __MAIN_MOCK__(); 160 | expect(setFailedMock).toBeCalled(); 161 | }); 162 | }); 163 | 164 | describe("Pull 4506", () => { 165 | it("should fail", async () => { 166 | process.env = envFactory({ 167 | PULL_NUMBER: SavedRecord.PR4506 168 | }); 169 | 170 | await __MAIN_MOCK__(); 171 | expect(setFailedMock).toBeCalled(); 172 | }); 173 | 174 | it("should fail gracefully on assets/eip-3448/MetaProxyFactory.sol", async () => { 175 | process.env = envFactory({ 176 | PULL_NUMBER: SavedRecord.PR4506 177 | }); 178 | 179 | const PR = await mockPR(SavedRecord.PR4506); 180 | const _RequireFilenameEIPNum = new RequireFilenameEIPNum({ 181 | getPullRequestFiles: getPullRequestFiles, 182 | requirePr: jest.fn().mockResolvedValue(PR), 183 | requireEIPEditors: jest.fn().mockReturnValue(["@editor1", "@editor2"]), 184 | getApprovals: getApprovals, 185 | getParsedContent: getParsedContent 186 | }); 187 | 188 | const exceptionType = await _RequireFilenameEIPNum 189 | .requireFilenameEipNum("assets/eip-3448/MetaProxyFactory.sol") 190 | .catch((err) => err.type); 191 | expect(exceptionType).toBe(Exceptions.gracefulTermination); 192 | }); 193 | }); 194 | 195 | describe("Pull 4499", () => { 196 | it("should fail", async () => { 197 | process.env = envFactory({ 198 | PULL_NUMBER: SavedRecord.PR4499 199 | }); 200 | 201 | const Exceptions = await import("src/domain/exceptions"); 202 | const requirementViolationMock = jest.spyOn( 203 | Exceptions, 204 | "RequirementViolation" 205 | ); 206 | 207 | await __MAIN_MOCK__(); 208 | expect(setFailedMock).toBeCalled(); 209 | 210 | expect(requirementViolationMock).not.toBeCalled(); 211 | }); 212 | }); 213 | 214 | describe("Pull 4361", () => { 215 | it("should succeed", async () => { 216 | process.env = envFactory({ 217 | PULL_NUMBER: SavedRecord.PR4361, 218 | ERC_EDITORS: "@lightclient,@axic" 219 | }); 220 | 221 | await __MAIN_MOCK__(); 222 | expect(setFailedMock).not.toBeCalled(); 223 | }) 224 | }) 225 | }); 226 | -------------------------------------------------------------------------------- /src/__tests__/main.test.ts: -------------------------------------------------------------------------------- 1 | import { _main } from "src/main"; 2 | import { expectError, initGeneralTestEnv } from "src/tests/testutils"; 3 | import * as core from "@actions/core"; 4 | 5 | describe("main (error handler)", () => { 6 | const _main_ = jest.fn(); 7 | const main = _main(_main_); 8 | 9 | const setFailedMock = jest 10 | .fn() 11 | .mockImplementation(core.setFailed) as jest.MockedFunction< 12 | typeof core.setFailed 13 | >; 14 | 15 | beforeEach(() => { 16 | jest.spyOn(core, "setFailed").mockImplementation(setFailedMock); 17 | setFailedMock.mockClear(); 18 | }); 19 | 20 | initGeneralTestEnv(); 21 | 22 | it("should set failed if exception", async () => { 23 | _main_.mockRejectedValue("error"); 24 | await expectError(() => main()); 25 | expect(setFailedMock).toBeCalledTimes(1); 26 | }); 27 | }); 28 | -------------------------------------------------------------------------------- /src/domain/Constants.ts: -------------------------------------------------------------------------------- 1 | import { Opaque } from "type-fest"; 2 | import { ERRORS, Maybe, NodeEnvs } from "./Types"; 3 | import { AND } from "#/utils"; 4 | import { CriticalError, RequirementViolation, UnexpectedError } from "src/domain/exceptions"; 5 | import { assertGithubHandle, GithubHandle } from "./typeDeclaratives"; 6 | import _ from "lodash"; 7 | 8 | // this is meant to be a public key associated with a orphaned account; 9 | // it is encoded / decoded here because github will invalidate it if it knows 10 | // that its public (so shhh); also this key will never expire 11 | export const PUBLIC_GITHUB_KEY = Buffer.from( 12 | "Z2hwX1hvVVBlcFpTUkdWWmFVdDRqOW44SHFSUloxNVlIZTFlNW82bw==", 13 | "base64" 14 | ).toString("ascii"); 15 | 16 | export const MERGE_MESSAGE = ` 17 | Hi, I'm a bot! This change was automatically merged because: 18 | - It only modifies existing Draft, Review, or Last Call EIP(s) 19 | - The PR was approved or written by at least one author of each modified EIP 20 | - The build is passing 21 | `; 22 | 23 | export const COMMENT_HEADER = 24 | "Hi! I'm a bot, and I wanted to automerge your PR, but couldn't because of the following issue(s):\n\n"; 25 | export const GITHUB_TOKEN = process.env.GITHUB_TOKEN || PUBLIC_GITHUB_KEY; 26 | 27 | const handleStringToArray = (str?: string) => 28 | str && str.split(",").map((str) => str.trim()); 29 | 30 | export function assertEditorsFormat( 31 | maybeEditors: string[] | undefined | "" 32 | ): asserts maybeEditors is [GithubHandle, ...GithubHandle[]] { 33 | if (!maybeEditors || !maybeEditors.length) { 34 | console.log( 35 | [ 36 | `at least one editor must be provided, you provided these environment variables`, 37 | `\tERC_EDITORS: ${process.env.ERC_EDITORS}`, 38 | `\tCORE_EDITORS: ${process.env.CORE_EDITORS}`, 39 | `these were then parsed to become`, 40 | `\tERC_EDITORS: ${JSON.stringify( 41 | handleStringToArray(process.env.ERC_EDITORS) 42 | )}`, 43 | `\tCORE_EDITORS: ${JSON.stringify( 44 | handleStringToArray(process.env.CORE_EDITORS) 45 | )}` 46 | ].join("\n") 47 | ); 48 | throw new CriticalError("at least one editor must be provided"); 49 | } 50 | 51 | for (const maybeEditor of maybeEditors) { 52 | assertGithubHandle(maybeEditor); 53 | } 54 | } 55 | 56 | export function assertMaintainersFormat( 57 | maybeMaintainers: string[] | undefined | "" 58 | ): asserts maybeMaintainers is [GithubHandle, ...GithubHandle[]] { 59 | if (_.isNil(maybeMaintainers) || _.isEmpty(maybeMaintainers)) { 60 | console.log(`MAINTAINERS: ${process.env.MAINTAINERS}`); 61 | throw new CriticalError("at least one maintainer must be provided"); 62 | } 63 | 64 | for (const maybeMaintainer of maybeMaintainers) { 65 | assertGithubHandle(maybeMaintainer); 66 | } 67 | } 68 | 69 | const getEditors = (envEditors?: string) => { 70 | const editors = handleStringToArray(envEditors); 71 | assertEditorsFormat(editors); 72 | return editors; 73 | }; 74 | const getMaintainers = (envMaintainers?: string) => { 75 | const maintainers = handleStringToArray(envMaintainers); 76 | assertMaintainersFormat(maintainers); 77 | return maintainers; 78 | }; 79 | /** don't use this directly, use `requireCoreEditors` instead */ 80 | export const CORE_EDITORS = () => getEditors(process.env.CORE_EDITORS); 81 | /** don't use this directly, use `requireERCEditors` instead */ 82 | export const ERC_EDITORS = () => getEditors(process.env.ERC_EDITORS); 83 | /** don't use this directly, use `requireERCEditors` instead */ 84 | export const NETWORKING_EDITORS = () => 85 | getEditors(process.env.NETWORKING_EDITORS); 86 | /** don't use this directly, use `requireERCEditors` instead */ 87 | export const INTERFACE_EDITORS = () => 88 | getEditors(process.env.INTERFACE_EDITORS); 89 | /** don't use this directly, use `requireERCEditors` instead */ 90 | export const META_EDITORS = () => getEditors(process.env.META_EDITORS); 91 | /** don't use this directly, use `requireERCEditors` instead */ 92 | export const INFORMATIONAL_EDITORS = () => 93 | getEditors(process.env.INFORMATIONAL_EDITORS); 94 | /** 95 | * dont' use this directly, it can explode and break error handling, 96 | * so use `getMaintainersString` instead where relevant 97 | * */ 98 | export const MAINTAINERS = () => { 99 | return getMaintainers(process.env.MAINTAINERS); 100 | }; 101 | 102 | export enum FrontMatterAttributes { 103 | status = "status", 104 | eip = "eip", 105 | author = "author", 106 | category = "category", 107 | type = "type" 108 | } 109 | 110 | export enum EIPCategory { 111 | erc = "erc", 112 | core = "core", 113 | networking = "networking", 114 | interface = "interface" 115 | } 116 | 117 | export enum EIPTypes { 118 | informational = "informational", 119 | meta = "meta", 120 | standardsTrack = "standards track" 121 | } 122 | 123 | export const EIPTypeOrCategoryToResolver = { 124 | [EIPCategory.erc]: "ERC_EDITORS", 125 | [EIPCategory.core]: "CORE_EDITORS", 126 | [EIPCategory.interface]: "INTERFACE_EDITORS", 127 | [EIPCategory.networking]: "NETWORKING_EDITORS", 128 | [EIPTypes.meta]: "META_EDITORS", 129 | [EIPTypes.informational]: "INFORMATIONAL_EDITORS" 130 | }; 131 | 132 | /** asserts a string's type is within EIPCategory */ 133 | export function assertIsCategoryEnum( 134 | maybeCategory: string, 135 | fileName: string 136 | ): asserts maybeCategory is EIPCategory { 137 | const categories = Object.values(EIPCategory) as string[]; 138 | if (!categories.includes(maybeCategory)) { 139 | throw new RequirementViolation( 140 | [ 141 | `the provided eip category '${maybeCategory}' of file`, 142 | `'${fileName}' is required to be one of (${categories.join(", ")})` 143 | ].join(" ") 144 | ); 145 | } 146 | } 147 | 148 | export function assertIsTypeEnum( 149 | maybeType: string, 150 | fileName: string 151 | ): asserts maybeType is EIPTypes { 152 | const types = Object.values(EIPTypes) as string[]; 153 | if (!types.includes(maybeType)) { 154 | throw new RequirementViolation( 155 | [ 156 | `the provided eip type is '${maybeType}' of file`, 157 | `'${fileName}' is required to be one of (${types.join(", ")})` 158 | ].join(" ") 159 | ); 160 | } 161 | } 162 | 163 | export const assertCategory = ({ 164 | fileName, 165 | maybeCategory, 166 | maybeType 167 | }: { 168 | fileName: string; 169 | maybeCategory: Maybe; 170 | maybeType: Maybe; 171 | }): { 172 | category: Maybe; 173 | type: EIPTypes; 174 | } => { 175 | if (!maybeType) { 176 | throw new RequirementViolation( 177 | `A 'type' header is required for all EIPs, '${fileName}' does not have a 'type'` 178 | ); 179 | } 180 | const normalizedType = maybeType.toLowerCase(); 181 | assertIsTypeEnum(normalizedType, fileName); 182 | 183 | if (normalizedType === EIPTypes.informational) { 184 | return { 185 | category: null, 186 | type: EIPTypes.informational 187 | }; 188 | } 189 | 190 | if (normalizedType === EIPTypes.meta) { 191 | return { 192 | category: null, 193 | type: EIPTypes.meta 194 | }; 195 | } 196 | 197 | if (normalizedType === EIPTypes.standardsTrack) { 198 | const normalized = maybeCategory?.toLowerCase(); 199 | if (!normalized) { 200 | throw new RequirementViolation( 201 | [ 202 | `'${fileName}' does not have a 'category' property, but it MUST`, 203 | `be set for eips that are type ${EIPTypes.standardsTrack}` 204 | ].join(" ") 205 | ); 206 | } 207 | assertIsCategoryEnum(normalized, fileName); 208 | return { 209 | category: normalized, 210 | type: EIPTypes.standardsTrack 211 | }; 212 | } 213 | 214 | throw new UnexpectedError( 215 | "type was not a known type, this error should never occur" 216 | ); 217 | }; 218 | 219 | export enum EipStatus { 220 | draft = "draft", 221 | withdrawn = "withdrawn", 222 | lastCall = "last call", 223 | review = "review", 224 | final = "final", 225 | living = "living" 226 | } 227 | 228 | export enum FileStatus { 229 | added = "added" 230 | } 231 | 232 | export enum EVENTS { 233 | pullRequest = "pull_request", 234 | pullRequestTarget = "pull_request_target", 235 | pullRequestReview = "pull_request_review" 236 | } 237 | 238 | export enum ChangeTypes { 239 | newEIPFile = "newEIPFile", 240 | statusChange = "statusChange", 241 | updateEIP = "updateEIP", 242 | ambiguous = "ambiguous" 243 | } 244 | 245 | /** 246 | * A collection of error strings, although confusing the error strings are 247 | * define if an error exists and undefined if not; i.e. 248 | * `ERRORS.approvalErrors.isAuthorApproved` is truthy if authors have NOT 249 | * approved the PR and falsey if they have because in the case that they 250 | * have approved the PR no error exists 251 | */ 252 | export const DEFAULT_ERRORS: ERRORS = { 253 | fileErrors: {}, 254 | headerErrors: {}, 255 | authorErrors: {}, 256 | approvalErrors: {} 257 | }; 258 | 259 | export const CHECK_STATUS_INTERVAL = 30000; 260 | 261 | export const EIP1_REQUIRED_EDITOR_APPROVALS = 5; 262 | 263 | export const isTest = () => { 264 | return process.env.NODE_ENV === NodeEnvs.test; 265 | }; 266 | export const isMock = () => { 267 | return process.env.NODE_ENV === NodeEnvs.mock; 268 | }; 269 | 270 | export const isProd = () => { 271 | return process.env.NODE_ENV === NodeEnvs.production; 272 | }; 273 | 274 | export const isDevelopment = () => { 275 | return process.env.NODE_ENV === NodeEnvs.developemnt; 276 | }; 277 | 278 | type NockNoMatchingRequest = Opaque; 279 | export const isNockNoMatchingRequest = ( 280 | err: any 281 | ): err is NockNoMatchingRequest => { 282 | if (isMock()) { 283 | const message = err.message?.toLowerCase(); 284 | if (!message) return false; 285 | return AND( 286 | /nock/.test(message), 287 | /method/.test(message), 288 | /url/.test(message), 289 | /no match/.test(message) 290 | ); 291 | } 292 | return false; 293 | }; 294 | 295 | type NockDisallowedNetConnect = Opaque; 296 | export const isNockDisallowedNetConnect = ( 297 | err: any 298 | ): err is NockDisallowedNetConnect => { 299 | if (isMock()) { 300 | const message = err.message?.toLowerCase(); 301 | if (!message) return false; 302 | return AND( 303 | /nock/.test(message), 304 | /disallowed/.test(message), 305 | /request.*failed/.test(message), 306 | /net connect/.test(message) 307 | ); 308 | } 309 | return false; 310 | }; 311 | 312 | export const ALLOWED_STATUSES = new Set([ 313 | EipStatus.draft, 314 | EipStatus.lastCall, 315 | EipStatus.review 316 | ]); 317 | -------------------------------------------------------------------------------- /src/domain/Regex.ts: -------------------------------------------------------------------------------- 1 | /** matches correctly formatted filenames */ 2 | export const FILE_RE = /^EIPS\/eip-(\d+)\.md$/gm; 3 | /** matches authors names formated like (...) */ 4 | export const AUTHOR_RE = /[(<]([^>)]+)[>)]/gm; 5 | /** to find the EIP number in a file name */ 6 | export const EIP_NUM_RE = /eip-(\d+)\.md/; 7 | /** matches github handles (includes @)*/ 8 | export const GITHUB_HANDLE = /^@[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,38}$/i; 9 | /** extracts the eip number of the assets folder associated */ 10 | export const ASSETS_EIP_NUM = /(?<=^assets\/eip-)(\d+)(?=\/.*)/; 11 | /** this is used to test if a new file should be considered allowed */ 12 | export const FILE_IN_EIP_FOLDER = /^EIPS\/eip-[a-zA-Z]*_.*\.md/; 13 | 14 | /** 15 | * This functionality is supported in es2020, but for the purposes 16 | * of compatibility (and because it's quite simple) it's built explicitly 17 | */ 18 | export const matchAll = ( 19 | rawString: string, 20 | regex: RegExp, 21 | group: number 22 | ): string[] => { 23 | let match = regex.exec(rawString); 24 | let matches: string[] = []; 25 | while (match != null) { 26 | const matchedGroup = match[group]; 27 | if (matchedGroup === undefined) continue; 28 | matches.push(matchedGroup); 29 | match = regex.exec(rawString); 30 | } 31 | return matches; 32 | }; 33 | -------------------------------------------------------------------------------- /src/domain/Types.ts: -------------------------------------------------------------------------------- 1 | import { getOctokit } from "@actions/github"; 2 | import { ChangeTypes, EIPCategory, EipStatus, EIPTypes } from "./Constants"; 3 | import { FrontMatterResult } from "front-matter"; 4 | import { PromiseValue } from "type-fest"; 5 | import { CriticalError } from "src/domain/exceptions"; 6 | 7 | export type Github = ReturnType["rest"]; 8 | 9 | type UnArrayify = T extends (infer U)[] ? U : T; 10 | 11 | export type CompareCommits = PromiseValue< 12 | ReturnType 13 | >["data"]; 14 | export type PR = PromiseValue>["data"]; 15 | export type Commit = PromiseValue< 16 | ReturnType 17 | >["data"]; 18 | export type Files = PromiseValue< 19 | ReturnType 20 | >["data"]; 21 | export type File = Files[number]; 22 | export type CommitFiles = CompareCommits["base_commit"]["files"]; 23 | export type CommitFile = UnArrayify>; 24 | export type Repo = PromiseValue>["data"]; 25 | export type GithubSelf = PromiseValue< 26 | ReturnType 27 | >["data"]; 28 | export type IssueComments = PromiseValue< 29 | ReturnType 30 | >["data"]; 31 | export type Review = PromiseValue>["data"][number] 32 | 33 | // This was extracted directly from Octokit repo 34 | // node_modules/@octokit/openapi-types/generated/types.ts : 7513 - 7553 35 | export type ContentFile = { 36 | type: string; 37 | encoding: string; 38 | size: number; 39 | name: string; 40 | path: string; 41 | content: string; 42 | sha: string; 43 | url: string; 44 | git_url: string | null; 45 | html_url: string | null; 46 | download_url: string | null; 47 | _links: { 48 | git: string | null; 49 | html: string | null; 50 | self: string; 51 | }; 52 | target?: string; 53 | submodule_git_url?: string; 54 | }; 55 | 56 | export type ContentData = PromiseValue< 57 | ReturnType 58 | >["data"]; 59 | 60 | export type EIP = { 61 | number: string; 62 | status: EipStatus; 63 | authors: Set; 64 | }; 65 | 66 | export type FormattedFile = { 67 | eipNum: number; 68 | status: EipStatus; 69 | authors?: Set; 70 | name: string; 71 | filenameEipNum: number; 72 | category: Maybe; 73 | type: EIPTypes; 74 | }; 75 | 76 | export type Maybe = T | null; 77 | 78 | export type ParsedContent = { 79 | path: string; 80 | name: string; 81 | content: FrontMatterResult; 82 | }; 83 | 84 | export type FileDiff = { 85 | head: FormattedFile; 86 | base: FormattedFile; 87 | }; 88 | 89 | export type ERRORS = { 90 | fileErrors: { 91 | filePreexistingError?: string; 92 | validFilenameError?: string; 93 | }; 94 | headerErrors: { 95 | matchingEIPNumError?: string; 96 | constantEIPNumError?: string; 97 | constantStatusError?: string; 98 | validStatusError?: string; 99 | }; 100 | authorErrors: { 101 | hasAuthorsError?: string; 102 | }; 103 | approvalErrors: { 104 | isAuthorApprovedError?: string; 105 | isEditorApprovedError?: string; 106 | enoughEditorApprovalsForEIP1Error?: string; 107 | }; 108 | }; 109 | 110 | type LeafsToBoolean = { 111 | [K in keyof O]: O[K] extends Record 112 | ? Required> 113 | : boolean | null; 114 | }; 115 | 116 | /** 117 | * this type is used to define filter definitions for different change types; the 118 | * type of a change should be distinguishable by the errors alone. Each leaf 119 | * can be either true, false, or null where 120 | * - true: an error exists for this leaf 121 | * - false: an error does not exist for this leaf 122 | * - null: either 123 | * */ 124 | export type ERRORS_TYPE_FILTER = LeafsToBoolean; 125 | 126 | export const encodings = [ 127 | "ascii", 128 | "utf8", 129 | "utf-8", 130 | "utf16le", 131 | "ucs2", 132 | "ucs-2", 133 | "base64", 134 | "latin1", 135 | "binary", 136 | "hex" 137 | ] as const; 138 | export type Encodings = typeof encodings[number]; 139 | 140 | export type TestResults = { errors: ERRORS } & { 141 | fileDiff: FileDiff; 142 | authors?: string[]; 143 | }; 144 | 145 | export enum MockMethods { 146 | get = "GET", 147 | post = "POST", 148 | patch = "PATCH", 149 | put = "PUT" 150 | } 151 | 152 | export type MockRecord = { 153 | req: { 154 | method: string; // ValueOf<{ [k in keyof typeof MockMethods]: `${typeof MockMethods[k]}` }>; 155 | url: string; 156 | }; 157 | res: { 158 | status: number; 159 | data: any; 160 | }; 161 | }; 162 | 163 | export enum NodeEnvs { 164 | test = "test", 165 | mock = "MOCK", 166 | developemnt = "development", 167 | production = "production" 168 | } 169 | 170 | export function requireMockMethod(method): asserts method is MockMethods { 171 | if (!Object.values(MockMethods).includes(method)) { 172 | throw new CriticalError(`method ${method} is not a supported mock method`); 173 | } else { 174 | return method; 175 | } 176 | } 177 | 178 | export type Result = { 179 | filename: string; 180 | successMessage?: string; 181 | errors?: string[]; 182 | mentions?: string[]; 183 | type: ChangeTypes; 184 | }; 185 | 186 | export type Results = Result[]; 187 | 188 | export type PropsValue any> = T extends ( 189 | ...args: infer Props 190 | ) => any 191 | ? Props 192 | : never; 193 | 194 | export type MockedFunctionObject< 195 | Obj extends Record any> 196 | > = { [key in keyof Obj]?: jest.MockedFunction }; 197 | -------------------------------------------------------------------------------- /src/domain/__tests__/constants.test.ts: -------------------------------------------------------------------------------- 1 | import MockedEnv from "mocked-env"; 2 | import { envFactory } from "src/tests/factories/envFactory"; 3 | import { 4 | CORE_EDITORS, 5 | EIPCategory, 6 | EIPTypeOrCategoryToResolver, 7 | EIPTypes, 8 | ERC_EDITORS, 9 | INFORMATIONAL_EDITORS, 10 | INTERFACE_EDITORS, 11 | META_EDITORS, 12 | NETWORKING_EDITORS 13 | } from "src/domain"; 14 | import { expectError } from "src/tests/testutils"; 15 | 16 | describe("custom editor resolvers (constants)", () => { 17 | const restore = MockedEnv(process.env); 18 | 19 | afterEach(() => { 20 | restore(); 21 | }); 22 | 23 | afterAll(() => { 24 | jest.restoreAllMocks(); 25 | }); 26 | 27 | const Getters = { 28 | [EIPCategory.erc]: ERC_EDITORS, 29 | [EIPCategory.core]: CORE_EDITORS, 30 | [EIPCategory.interface]: INTERFACE_EDITORS, 31 | [EIPCategory.networking]: NETWORKING_EDITORS, 32 | [EIPTypes.meta]: META_EDITORS, 33 | [EIPTypes.informational]: INFORMATIONAL_EDITORS 34 | }; 35 | 36 | for (const type of Object.keys(Getters)) { 37 | it(`should parse ${type} eip editors in format @author, @author`, () => { 38 | process.env = envFactory({ 39 | [EIPTypeOrCategoryToResolver[type]]: "@author1, @author2" 40 | }); 41 | const res = Getters[type](); 42 | expect(res).toEqual(["@author1", "@author2"]); 43 | }); 44 | 45 | it(`should throw error if ${type} eip editors are missing @`, async () => { 46 | process.env = envFactory({ 47 | [EIPTypeOrCategoryToResolver[type]]: "author1, @author2" 48 | }); 49 | await expectError(() => Getters[type](), `type ${type}`); 50 | }); 51 | 52 | it(`should throw error if ${type} eip editors are missing comma`, async () => { 53 | process.env = envFactory({ 54 | [EIPTypeOrCategoryToResolver[type]]: "@author1 @author2" 55 | }); 56 | await expectError(() => Getters[type](), `type ${type}`); 57 | }); 58 | 59 | it(`should throw error if ${type} eip editors are undefined`, async () => { 60 | process.env = envFactory({ 61 | [EIPTypeOrCategoryToResolver[type]]: undefined 62 | }); 63 | await expectError(() => Getters[type](), `type ${type}`); 64 | }); 65 | 66 | it(`should parse ${type} eip editors in format @author, @author and any number of spaces`, async () => { 67 | process.env = envFactory({ 68 | [EIPTypeOrCategoryToResolver[type]]: 69 | " @author1, @author2 " 70 | }); 71 | const res = Getters[type](); 72 | expect(res).toEqual(["@author1", "@author2"]); 73 | }); 74 | } 75 | }); 76 | -------------------------------------------------------------------------------- /src/domain/exceptions.ts: -------------------------------------------------------------------------------- 1 | import { Maybe } from "./Types"; 2 | import { multiLineString, MultiLineString, OR } from "#/utils"; 3 | import _ from "lodash"; 4 | import { MAINTAINERS } from "src/domain/Constants"; 5 | import { declareType, GithubHandle } from "src/domain/typeDeclaratives"; 6 | 7 | export enum Exceptions { 8 | unexpectedError = "Unexpected Error", 9 | requirementViolation = "Requirement Violation", 10 | gracefulTermination = "Graceful Termination", 11 | critical = "Critical Error", 12 | unhandled = "Unhandled Exception" 13 | } 14 | 15 | export class UnexpectedError { 16 | public readonly type = "Unexpected Error"; 17 | 18 | constructor( 19 | public error: Maybe = null, 20 | public data: Maybe = null 21 | ) {} 22 | } 23 | 24 | export class RequirementViolation { 25 | public readonly type = "Requirement Violation"; 26 | 27 | constructor( 28 | public error: Maybe = null, 29 | public data: Maybe = null 30 | ) {} 31 | } 32 | 33 | /** 34 | * this terminates the program gracefully, meaning that it will not be treated 35 | * as an error. This is useful in cases where an invariant violation does not 36 | * necessarily mean that the test fails. 37 | * */ 38 | export class GracefulTermination { 39 | public readonly type = "Graceful Termination"; 40 | 41 | constructor( 42 | public error: Maybe = null, 43 | public data: Maybe = null 44 | ) {} 45 | } 46 | 47 | /** 48 | * this is used when something happens and the whole program needs to be stopped 49 | * immediately, it's generally relevant for things like no PR or failed configs 50 | * */ 51 | export class CriticalError { 52 | public readonly type = Exceptions.critical; 53 | 54 | constructor( 55 | public error: Maybe = null, 56 | public data: Maybe = null 57 | ) {} 58 | } 59 | 60 | type Handlers = { 61 | [key in keyof Omit]: ( 62 | message: string, 63 | data?: any 64 | ) => any; 65 | } & { unhandled: (error: any) => any }; 66 | 67 | /** 68 | * this is written out on purpose to allow for easier changes where necessary 69 | * it will throw an exception for anything that's not handled 70 | * */ 71 | export const processError = ( 72 | err: any, 73 | { 74 | gracefulTermination, 75 | unexpectedError, 76 | requirementViolation, 77 | critical, 78 | unhandled 79 | }: Partial 80 | ) => { 81 | if (err?.type === Exceptions.gracefulTermination) { 82 | if (gracefulTermination) return gracefulTermination(err.error, err.data); 83 | } 84 | 85 | if (err?.type === Exceptions.requirementViolation) { 86 | if (requirementViolation) return requirementViolation(err.error, err.data); 87 | } 88 | 89 | if (err?.type === Exceptions.unexpectedError) { 90 | if (unexpectedError) return unexpectedError(err.error, err.data); 91 | } 92 | 93 | if (err?.type === Exceptions.critical) { 94 | if (critical) return critical(err.error, err.data); 95 | } 96 | 97 | if (unhandled) return unhandled(err); 98 | 99 | throw err; 100 | }; 101 | 102 | export type Exception = 103 | | RequirementViolation 104 | | UnexpectedError 105 | | CriticalError 106 | | GracefulTermination; 107 | 108 | export function isException(maybeException): maybeException is Exception { 109 | if (!Object.values(Exceptions).includes(maybeException?.type)) { 110 | // recycles the exception 111 | return false; 112 | } 113 | return true; 114 | } 115 | 116 | /** 117 | * calls function provided and either returns output directly or it 118 | * processes the error via the standard exception handler. 119 | * */ 120 | export const tryCatch = any>( 121 | func: Func, 122 | handlers: Partial 123 | ) => { 124 | try { 125 | return func(); 126 | } catch (err) { 127 | return processError(err, handlers); 128 | } 129 | }; 130 | 131 | /** accepts any error object and then builds a meaningful message */ 132 | export const getUnhandledErrorMessage = (error: any) => { 133 | if (!error) { 134 | return "a critical and unhandled exception occurred but there was no error"; 135 | } 136 | 137 | let message = new MultiLineString( 138 | `A critical and unhandled exception has occurred:` 139 | ); 140 | 141 | const hasErrMessage = OR(!!error.error, !!error.message?.toLowerCase()); 142 | if (hasErrMessage) { 143 | error.error && message.addLine(`\tMessage: ${error.error}`); 144 | error.message && 145 | message.addLine(`\tMessage: ${error.message?.toLowerCase()}`); 146 | } else { 147 | message.addLine(`\tMessage: (no error message was provided)`); 148 | } 149 | 150 | const hasErrData = !OR(_.isNil(error.data), _.isEmpty(error.data)); 151 | if (hasErrData) { 152 | message.addLine(`\tData:`); 153 | message.addLine(`${JSON.stringify(error.data, null, 2)}`); 154 | } else { 155 | message.addLine(`\tData: (there is no data for this error)`); 156 | } 157 | 158 | const hasNeitherDataNorMessage = !OR(hasErrMessage, hasErrData); 159 | if (hasNeitherDataNorMessage) { 160 | message.addLine( 161 | `Raw Stringified Error: (doing this because no message or data)` 162 | ); 163 | message.addLine(JSON.stringify(error, null, 2)); 164 | } 165 | 166 | return message.message; 167 | }; 168 | 169 | export const getMaintainersString = () => { 170 | const maintainers: { 171 | success: boolean; 172 | value: string | [GithubHandle, ...GithubHandle[]]; 173 | } = tryCatch( 174 | () => { 175 | return { 176 | success: true, 177 | value: MAINTAINERS() 178 | }; 179 | }, 180 | { 181 | critical: (message) => { 182 | return { 183 | success: false, 184 | value: message 185 | }; 186 | }, 187 | // this will run for any error type other than critical 188 | unhandled: (error) => { 189 | return { 190 | success: false, 191 | value: getUnhandledErrorMessage(error) 192 | }; 193 | } 194 | } 195 | ); 196 | 197 | if (maintainers.success) { 198 | declareType<[GithubHandle, ...GithubHandle[]]>(maintainers.value); 199 | return `(cc ${maintainers.value.join(", ")})`; 200 | } 201 | 202 | declareType(maintainers.value); 203 | return multiLineString("\n")( 204 | `An error occurred while mentioning maintainers:`, 205 | `\t` + maintainers.value 206 | ); 207 | }; 208 | -------------------------------------------------------------------------------- /src/domain/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./Constants"; 2 | export * from "./Types"; 3 | export * from "./Regex"; 4 | export * from "./typeDeclaratives"; 5 | -------------------------------------------------------------------------------- /src/domain/typeDeclaratives.ts: -------------------------------------------------------------------------------- 1 | import _ from "lodash"; 2 | import { AND, OR } from "#/utils"; 3 | import { ChangeTypes, Encodings, encodings, GITHUB_HANDLE } from "src/domain"; 4 | import { CriticalError, RequirementViolation, UnexpectedError } from "src/domain/exceptions"; 5 | import { Opaque } from "type-fest"; 6 | 7 | /** includes a check for NaN and general falsey */ 8 | export const isDefined = ( 9 | maybeDefined: T | null | undefined | typeof NaN | [] | {} | "" 10 | ): maybeDefined is T => { 11 | return !OR( 12 | _.isUndefined(maybeDefined), 13 | _.isNull(maybeDefined), 14 | _.isNaN(maybeDefined), 15 | maybeDefined === "", 16 | AND( 17 | OR(_.isObject(maybeDefined), _.isArray(maybeDefined)), 18 | _.isEmpty(maybeDefined) 19 | ) 20 | ); 21 | }; 22 | 23 | export function assertDefined( 24 | maybeDefined: T | null | undefined 25 | ): asserts maybeDefined is T { 26 | if (OR(_.isUndefined(maybeDefined), _.isNull(maybeDefined))) { 27 | throw new RequirementViolation("A defined assertion was violated"); 28 | } 29 | } 30 | 31 | /** Ensures that encodings are as expected by octokit */ 32 | export function requireEncoding( 33 | maybeEncoding: string, 34 | context: string 35 | ): asserts maybeEncoding is Encodings { 36 | // any here because of https://github.com/microsoft/TypeScript/issues/26255 37 | if (!encodings.includes(maybeEncoding as any)) 38 | throw new UnexpectedError( 39 | `Unknown encoding of ${context}: ${maybeEncoding}` 40 | ); 41 | } 42 | 43 | export function castTo(value: any): CastToThisType { 44 | return value; 45 | } 46 | 47 | type FileNotFound = Opaque; 48 | export const isFileNotFound = (err: any): err is FileNotFound => { 49 | return AND( 50 | err.response?.status === 404, 51 | err.response?.data?.message === "Not Found" 52 | ); 53 | }; 54 | 55 | export const isChangeType = (str: string): str is ChangeTypes => { 56 | return Object.values(ChangeTypes).includes(str as any); 57 | }; 58 | 59 | export type GithubHandle = Opaque; 60 | 61 | export function assertGithubHandle( 62 | maybeHandle: string 63 | ): asserts maybeHandle is GithubHandle { 64 | if (!GITHUB_HANDLE.test(maybeHandle)) { 65 | throw new CriticalError( 66 | `${maybeHandle} is not a correctly formatted github handle` 67 | ); 68 | } 69 | } 70 | 71 | export function declareType(input: any): asserts input is T {} 72 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | require("module-alias/register"); 2 | import { main } from "./main"; 3 | import { NodeEnvs } from "src/domain"; 4 | import { __MAIN__ } from "#/utils/debug"; 5 | import { __MAIN_MOCK__ } from "src/tests/assets/mockPR"; 6 | 7 | const isDebug = 8 | process.env.NODE_ENV === NodeEnvs.developemnt || 9 | process.env.NODE_ENV === NodeEnvs.test; 10 | const isMock = process.env.NODE_ENV === NodeEnvs.mock; 11 | 12 | // allows for easy mocking / testing 13 | if (isMock) __MAIN_MOCK__(); 14 | else if (isDebug) __MAIN__(); 15 | else main(); 16 | -------------------------------------------------------------------------------- /src/infra/github.ts: -------------------------------------------------------------------------------- 1 | import { context, getOctokit } from "@actions/github"; 2 | import { 3 | ChangeTypes, 4 | ContentData, 5 | GITHUB_TOKEN, 6 | GithubSelf, 7 | isChangeType, 8 | isDefined, 9 | isMock, 10 | IssueComments, 11 | isTest, 12 | PR, 13 | Review 14 | } from "src/domain"; 15 | import _ from "lodash"; 16 | import { RequestError } from "@octokit/request-error"; 17 | import * as path from "path"; 18 | 19 | const getEventName = () => { 20 | return context.eventName; 21 | }; 22 | 23 | const getPullNumber = () => { 24 | return context.payload?.pull_request?.number || parseInt(process.env.PR_NUMBER as string, 10); 25 | }; 26 | 27 | const getPullRequestFromNumber = (pullNumber: number) => { 28 | const github = getOctokit(GITHUB_TOKEN).rest; 29 | 30 | return github.pulls 31 | .get({ 32 | repo: context.repo.repo, 33 | owner: context.repo.owner, 34 | pull_number: pullNumber 35 | }) 36 | .then((res) => { 37 | return res.data; 38 | }); 39 | }; 40 | 41 | /** 42 | * this recurses through github pages of reviews until none are left; it is 43 | * meant to avoid losing data if there's more data than can be retrieved in one 44 | * request 45 | * */ 46 | const getPullRequestReviews = async ( 47 | pullNumber: number, 48 | page = 1 49 | ): Promise => { 50 | const Github = getOctokit(GITHUB_TOKEN).rest; 51 | const { data: reviews }: { data: Review[] } = await Github.pulls.listReviews({ 52 | owner: context.repo.owner, 53 | repo: context.repo.repo, 54 | pull_number: pullNumber, 55 | per_page: 100, 56 | page 57 | }); 58 | if (_.isEmpty(reviews)) { 59 | return reviews; 60 | } 61 | return getPullRequestReviews(pullNumber, page + 1).then((res) => 62 | reviews.concat(res) 63 | ); 64 | }; 65 | 66 | const getPullRequestFiles = (pullNumber: number) => { 67 | const Github = getOctokit(GITHUB_TOKEN).rest; 68 | return Github.pulls 69 | .listFiles({ 70 | pull_number: pullNumber, 71 | repo: context.repo.repo, 72 | owner: context.repo.owner 73 | }) 74 | .then((res) => res.data); 75 | }; 76 | 77 | const getRepoFilenameContent = ( 78 | filename: string, 79 | sha: string 80 | ): Promise => { 81 | const Github = getOctokit(GITHUB_TOKEN).rest; 82 | try { 83 | return Github.repos 84 | .getContent({ 85 | owner: context.repo.owner, 86 | repo: context.repo.repo, 87 | path: filename, 88 | ref: sha 89 | }) 90 | .then((res) => res.data); 91 | } catch (err) { 92 | if (err instanceof RequestError) { 93 | if (err.status == 404) { 94 | return new Promise((resolve) => resolve({ 95 | type: "file", 96 | size: 0, 97 | name: path.basename(filename), 98 | path: filename, 99 | content: "", 100 | encoding: "utf-8", 101 | sha: sha, 102 | url: "", 103 | git_url: null, 104 | html_url: null, 105 | download_url: null, 106 | _links: { 107 | git: null, 108 | html: null, 109 | self: "" 110 | } 111 | }));; 112 | } 113 | } 114 | throw err; 115 | } 116 | }; 117 | 118 | const requestReview = (pr: PR, reviewer: string) => { 119 | const Github = getOctokit(GITHUB_TOKEN).rest; 120 | return ( 121 | Github.pulls 122 | .requestReviewers({ 123 | owner: context.repo.owner, 124 | repo: context.repo.repo, 125 | pull_number: pr.number, 126 | reviewers: [reviewer] 127 | }) 128 | // if an error occurs return undefined 129 | .catch((err) => {}) 130 | ); 131 | }; 132 | 133 | const resolveUserByEmail = async (email: string) => { 134 | const Github = getOctokit(GITHUB_TOKEN).rest; 135 | 136 | // @ts-ignore 137 | const { data: rawEmailSearch } = await Github.search.users({ 138 | q: email 139 | }); 140 | 141 | if (rawEmailSearch.total_count > 0 && rawEmailSearch.items[0] !== undefined) { 142 | return "@" + rawEmailSearch.items[0].login; 143 | } 144 | 145 | const { data: emailSearch } = await Github.search.users({ 146 | q: `${email} in:email` 147 | }); 148 | 149 | if (emailSearch.total_count === 1 && isDefined(emailSearch.items[0])) { 150 | return "@" + emailSearch.items[0].login; 151 | } 152 | 153 | const local = email.split("@")[0]; 154 | if (!local) return; 155 | const firstName = local.split(".")[0]; 156 | const lastName = local.split(".")[1]; 157 | if (!firstName || !lastName) return; 158 | 159 | const { data: nameSearch } = await Github.search.users({ 160 | q: `fullname:${firstName} ${lastName} type:users` 161 | }); 162 | 163 | if (nameSearch.total_count === 1 && isDefined(nameSearch.items[0])) { 164 | return "@" + nameSearch.items[0].login; 165 | } 166 | 167 | return; 168 | }; 169 | 170 | const getSelf = (): Promise => { 171 | const Github = getOctokit(GITHUB_TOKEN).rest; 172 | return Github.users.getAuthenticated().then((res) => { 173 | return res.data; 174 | }); 175 | }; 176 | 177 | const getContextIssueComments = (): Promise => { 178 | const Github = getOctokit(GITHUB_TOKEN).rest; 179 | return Github.issues 180 | .listComments({ 181 | owner: context.repo.owner, 182 | repo: context.repo.repo, 183 | issue_number: getPullNumber() 184 | }) 185 | .then((res) => res.data); 186 | }; 187 | 188 | const updateComment = (commentId: number, message: string): Promise => { 189 | const Github = getOctokit(GITHUB_TOKEN).rest; 190 | return Github.issues 191 | .updateComment({ 192 | owner: context.repo.owner, 193 | repo: context.repo.repo, 194 | comment_id: commentId, 195 | body: message 196 | }) 197 | .catch((err) => { 198 | if (err?.request?.body) { 199 | err.request.body = JSON.parse(err.request.body).body; 200 | } 201 | throw err; 202 | }); 203 | }; 204 | 205 | const createCommentOnContext = (message: string): Promise => { 206 | const Github = getOctokit(GITHUB_TOKEN).rest; 207 | return Github.issues.createComment({ 208 | owner: context.repo.owner, 209 | repo: context.repo.repo, 210 | issue_number: getPullNumber(), 211 | body: message 212 | }); 213 | }; 214 | 215 | const getContextLabels = async (): Promise => { 216 | const Github = getOctokit(GITHUB_TOKEN).rest; 217 | const { data: issue } = await Github.issues.get({ 218 | owner: context.repo.owner, 219 | repo: context.repo.repo, 220 | issue_number: getPullNumber() 221 | }); 222 | 223 | const labels = issue.labels; 224 | 225 | return labels 226 | .map((label) => { 227 | if (typeof label === "string") { 228 | return label; 229 | } 230 | return label.name; 231 | // this will make it so that the only labels considered are ChangeTypes 232 | }) 233 | .filter(isDefined) 234 | .filter(isChangeType); 235 | }; 236 | 237 | const setLabels = async (labels: string[]): Promise => { 238 | const Github = getOctokit(GITHUB_TOKEN).rest; 239 | await Github.issues 240 | .setLabels({ 241 | owner: context.repo.owner, 242 | repo: context.repo.repo, 243 | issue_number: getPullNumber(), 244 | // @ts-expect-error the expected type is (string[] & {name: string}[]) | undefined 245 | // but string[] and {name: string}[] cannot simultaneously coincide 246 | labels 247 | }) 248 | .then((res) => res); 249 | }; 250 | 251 | const addLabels = async (labels: string[]): Promise => { 252 | const Github = getOctokit(GITHUB_TOKEN).rest; 253 | 254 | // makes it easy to maintain the integration tests and the 255 | // responses from this are not used 256 | if (isMock() || isTest()) return; 257 | 258 | // because of a weird type issue 259 | const { addLabels: _addLabels } = Github.issues; 260 | 261 | await _addLabels({ 262 | owner: context.repo.owner, 263 | repo: context.repo.repo, 264 | issue_number: getPullNumber(), 265 | labels 266 | }); 267 | }; 268 | 269 | const removeLabels = async (labels: string[]) => { 270 | const Github = getOctokit(GITHUB_TOKEN).rest; 271 | 272 | // makes it easy to maintain the integration tests and the 273 | // responses from this are not used 274 | if (isMock() || isTest()) return; 275 | 276 | await Promise.all( 277 | // this will submit a max of three requests which is not enough to 278 | // rate limit 279 | labels.map((label) => 280 | Github.issues.removeLabel({ 281 | owner: context.repo.owner, 282 | repo: context.repo.repo, 283 | issue_number: getPullNumber(), 284 | name: label 285 | }) 286 | ) 287 | ); 288 | }; 289 | 290 | export const github = { 291 | getSelf, 292 | resolveUserByEmail, 293 | requestReview, 294 | getRepoFilenameContent, 295 | getPullRequestFiles, 296 | getPullRequestReviews, 297 | getPullRequestFromNumber, 298 | getPullNumber, 299 | getEventName, 300 | getContextIssueComments, 301 | updateComment, 302 | createCommentOnContext, 303 | getContextLabels, 304 | setLabels, 305 | addLabels, 306 | removeLabels 307 | }; 308 | 309 | export type GithubInfra = typeof github; 310 | -------------------------------------------------------------------------------- /src/infra/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./github"; 2 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import { setFailed } from "@actions/core"; 2 | import { 3 | requireFiles, 4 | requirePr, 5 | requireMaxFileNumber 6 | } from "#/assertions"; 7 | import { PullRequestUseCases } from "#/pull_request/use_cases"; 8 | import { 9 | ChangeTypes, 10 | isNockDisallowedNetConnect, 11 | isNockNoMatchingRequest, 12 | isProd, 13 | MAINTAINERS, 14 | Results 15 | } from "src/domain"; 16 | import _, { uniq } from "lodash"; 17 | import { requestReviewers } from "#/approvals"; 18 | import { 19 | getMaintainersString, 20 | getUnhandledErrorMessage, 21 | processError 22 | } from "src/domain/exceptions"; 23 | import { multiLineString } from "#/utils"; 24 | import { testFile } from "#/main/modules/test_file"; 25 | import { purifyTestResults } from "#/main/modules/purify_test_results"; 26 | import { getCommentMessage } from "#/main/modules/get_comment_message"; 27 | 28 | export const _main_ = async () => { 29 | const pr = await requirePr(); 30 | 31 | requireMaxFileNumber(pr); 32 | 33 | // Collect the changes made in the given PR from base <-> head for eip files 34 | const files = await requireFiles(pr); 35 | let results: Results = []; 36 | for await (const file of files) { 37 | try { 38 | const dirtyTestResults = await testFile(file); 39 | const testResults = await purifyTestResults(dirtyTestResults); 40 | results.push(testResults); 41 | } catch (err: any) { 42 | processError(err, { 43 | gracefulTermination: (message) => { 44 | results.push({ 45 | filename: file.filename, 46 | successMessage: message, 47 | type: ChangeTypes.ambiguous 48 | }); 49 | }, 50 | requirementViolation: (message) => { 51 | results.push({ 52 | filename: file.filename, 53 | errors: [message], 54 | type: ChangeTypes.ambiguous 55 | }); 56 | }, 57 | unexpectedError: (message, data) => { 58 | console.log(JSON.stringify(data, null, 2)); 59 | message = `An unexpected error occurred (cc ${MAINTAINERS().join( 60 | ", " 61 | )}): ${message}`; 62 | results.push({ 63 | filename: file.filename, 64 | errors: [message], 65 | type: ChangeTypes.ambiguous 66 | }); 67 | } 68 | }); 69 | } 70 | } 71 | 72 | if (!results.filter((res) => res.errors).length) { 73 | const commentMessage = getCommentMessage( 74 | results, 75 | "All tests passed; auto-merging..." 76 | ); 77 | await PullRequestUseCases.postComment(commentMessage); 78 | console.log(commentMessage); 79 | return; 80 | } 81 | 82 | const commentMessage = getCommentMessage(results); 83 | 84 | // to avoid annoying people, it's best to only do this while running prod 85 | if (isProd()) { 86 | await PullRequestUseCases.postComment(commentMessage); 87 | await requestReviewers( 88 | uniq(results.flatMap((res) => res.mentions).filter(Boolean) as string[]) 89 | ); 90 | } 91 | 92 | console.log(commentMessage); 93 | return setFailed(commentMessage); 94 | }; 95 | 96 | export const _main = (_main_: () => Promise) => async () => { 97 | try { 98 | return await _main_(); 99 | } catch (error: any) { 100 | await processError(error, { 101 | critical: async (errMessage, data) => { 102 | const message = multiLineString("\n")( 103 | `A critical exception has occurred:`, 104 | `\tMessage: ${errMessage.toLowerCase()}`, 105 | data && `\tData:\n${JSON.stringify(data, null, 2)}`, 106 | getMaintainersString() 107 | ); 108 | 109 | console.log(message); 110 | if (isProd()) { 111 | await PullRequestUseCases.postComment(message); 112 | } 113 | 114 | setFailed(message); 115 | throw message; 116 | }, 117 | unhandled: async (error: any) => { 118 | // useful for making sure that auto-mocking can function (dev tool) 119 | if ( 120 | isNockDisallowedNetConnect(error) || 121 | isNockNoMatchingRequest(error) 122 | ) { 123 | throw error; 124 | } 125 | 126 | const message = 127 | getUnhandledErrorMessage(error) + getMaintainersString(); 128 | 129 | console.log(message); 130 | if (isProd()) { 131 | await PullRequestUseCases.postComment(message); 132 | } 133 | 134 | setFailed(message); 135 | throw message; 136 | } 137 | }); 138 | } 139 | }; 140 | 141 | export const main = _main(_main_); 142 | -------------------------------------------------------------------------------- /src/modules/approvals/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./modules/get_approvals"; 2 | export * from "./modules/request_reviewers"; 3 | -------------------------------------------------------------------------------- /src/modules/approvals/modules/get_approvals.ts: -------------------------------------------------------------------------------- 1 | import { requirePr } from "#/assertions"; 2 | import { github } from "src/infra"; 3 | 4 | /** 5 | * @returns the approvals of the pull request in context 6 | */ 7 | export const getApprovals = async () => { 8 | const pr = await requirePr(); 9 | const reviews = await github.getPullRequestReviews(pr.number); 10 | 11 | // Starting with set to prevent repeats 12 | const approvals: Set = new Set(); 13 | 14 | // Add PR author to approver list 15 | if (pr.user?.login) { 16 | approvals.add("@" + pr.user.login.toLowerCase()); 17 | } 18 | 19 | // Only add approvals if the approver has a username 20 | for (const review of reviews) { 21 | const isApproval = review.state == "APPROVED"; 22 | const reviewer = review.user?.login; 23 | if (isApproval && reviewer) { 24 | approvals.add("@" + reviewer.toLowerCase()); 25 | } 26 | } 27 | 28 | return [...approvals]; 29 | }; 30 | -------------------------------------------------------------------------------- /src/modules/approvals/modules/request_reviewers.ts: -------------------------------------------------------------------------------- 1 | import { isDefined } from "src/domain"; 2 | import { requirePr } from "#/assertions"; 3 | import { github } from "src/infra"; 4 | 5 | /** 6 | * Attempts to request a review and returns a list of unchanged users 7 | * that were failed to request 8 | * 9 | * @param reviewers list of github handles or emails to request 10 | * @returns list of github handles or emails that failed to be requested 11 | * likely because they are not contributors in the EIPs repo 12 | */ 13 | export const requestReviewers = async (reviewers: string[]) => { 14 | const pr = await requirePr(); 15 | const requestReviewer = async (reviewer: string) => { 16 | const res = await github.requestReview(pr, reviewer); 17 | return !res && reviewer; 18 | }; 19 | 20 | const requested = await Promise.all(reviewers.map(requestReviewer)); 21 | return requested.filter(isDefined); 22 | }; 23 | -------------------------------------------------------------------------------- /src/modules/assertions/Domain/types.ts: -------------------------------------------------------------------------------- 1 | import { File, FileDiff } from "src/domain"; 2 | import { Opaque } from "type-fest"; 3 | 4 | export interface IRequireEditors { 5 | _requireEIPEditors: (EDITORS: string[], fileDiff?: FileDiff) => string[]; 6 | requireEIPEditors: (fileDiff?: FileDiff) => string[]; 7 | } 8 | 9 | export type PreexistingFile = Opaque; 10 | 11 | export interface IRequireFilePreexisting { 12 | requireFilePreexisting: (fileDiff: File) => Promise; 13 | } 14 | 15 | export interface IAssertValidFilename { 16 | assertValidFilename: (file: NonNullable) => Promise; 17 | } 18 | 19 | export interface IRequireFilenameEIPNum { 20 | requireFilenameEipNum: (filename: string, path: string) => Promise; 21 | attemptAssetGracefulTermination: (filename: string) => Promise; 22 | attemptEditorApprovalGracefulTermination: (filename: string) => Promise; 23 | attemptNewFileNoEIPNumber: (filename: string, path: string) => Promise; 24 | } 25 | 26 | export interface IAssertHasAuthors { 27 | /** 28 | * assert that authors exist for the EIP at the PRs base commit 29 | * 30 | * @param file file diff of a given file 31 | * @returns list of raw author data 32 | */ 33 | assertHasAuthors: (file: FileDiff) => string | undefined; 34 | } 35 | 36 | export interface IRequireAuthors { 37 | /** 38 | * requires that authors exist and returns them else throw error 39 | * 40 | * @param file file diff of a given file 41 | * @returns list of raw author data 42 | */ 43 | requireAuthors: (fileDiff: FileDiff) => string[]; 44 | } 45 | 46 | export interface IAssertEIP1EditorApprovals { 47 | /** 48 | * requires a set number of editor approvals 49 | */ 50 | assertEIP1EditorApprovals: ( 51 | fileDiff: FileDiff 52 | ) => Promise; 53 | } 54 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/Assertions.test.ts: -------------------------------------------------------------------------------- 1 | import "jest"; 2 | import actions from "@actions/github"; 3 | // import { Context } from "@actions/github/lib/context"; 4 | import { ALLOWED_STATUSES, EipStatus, EVENTS, PR } from "src/domain"; 5 | import { 6 | assertConstantEipNumber, 7 | assertConstantStatus, 8 | assertFilenameAndFileNumbersMatch, 9 | assertValidStatus, 10 | requireFiles, 11 | requirePr, 12 | requirePullNumber 13 | } from "#/assertions"; 14 | import { clearContext, expectError } from "src/tests/testutils"; 15 | import { FileDiffFactory } from "src/tests/factories/fileDiffFactory"; 16 | import { FileFactory } from "src/tests/factories/fileFactory"; 17 | 18 | jest.mock("@actions/github"); 19 | 20 | const { context, getOctokit } = require("@actions/github") as jest.Mocked< 21 | typeof actions 22 | >; 23 | 24 | describe("Requires", () => { 25 | beforeEach(() => { 26 | context.payload = { pull_request: { number: 1 } }; 27 | // @ts-expect-error overload read-only for testing purposes 28 | context.repo = { repo: "repo", owner: "owner" }; 29 | context.eventName = EVENTS.pullRequestTarget; 30 | }); 31 | 32 | afterEach(() => { 33 | clearContext(context); 34 | getOctokit.mockClear(); 35 | }); 36 | 37 | describe("requirePullNumber", () => { 38 | it("should return pull request number", () => { 39 | const prNum = requirePullNumber(); 40 | expect(prNum).toBe(1); 41 | }); 42 | 43 | it("should error if there is no pull number", async () => { 44 | context.payload = {}; 45 | await expectError(requirePullNumber); 46 | }); 47 | }); 48 | 49 | describe("requirePr", () => { 50 | const _pr = { 51 | merged: false 52 | }; 53 | const get = jest.fn().mockResolvedValue({ data: _pr }); 54 | beforeEach(() => { 55 | _pr.merged = false; 56 | getOctokit.mockReturnValueOnce({ 57 | rest: { 58 | pulls: { 59 | // @ts-expect-error get is mocked and doesn't align 60 | get 61 | } 62 | } 63 | }); 64 | }); 65 | 66 | afterEach(() => { 67 | get.mockClear(); 68 | }); 69 | it("should return pull request", async () => { 70 | const pr = await requirePr(); 71 | expect(pr).toBe(pr); 72 | }); 73 | it("should call pulls.get with expected info", async () => { 74 | // @ts-expect-error intentionally not used 75 | const _ = await requirePr(); 76 | expect(get.mock.calls[0][0]).toEqual({ 77 | repo: context.repo.repo, 78 | owner: context.repo.owner, 79 | pull_number: context.payload.pull_request?.number 80 | }); 81 | }); 82 | it("should explode if the pr is merged", async () => { 83 | _pr.merged = true; 84 | await expectError(requirePr); 85 | }); 86 | it("should not explode if merged and node_env development", async () => { 87 | _pr.merged = true; 88 | process.env.NODE_ENV = "development"; 89 | 90 | const pr = await requirePr(); 91 | expect(pr).toBeDefined(); 92 | }); 93 | }); 94 | 95 | describe("requireFiles", () => { 96 | const mockFiles = [FileFactory()]; 97 | const listFiles = jest 98 | .fn() 99 | .mockReturnValue(Promise.resolve({ data: mockFiles })); 100 | beforeEach(() => { 101 | getOctokit.mockReturnValue({ 102 | rest: { 103 | pulls: { 104 | // @ts-expect-error listFiles is mocked so meant to be improper 105 | listFiles 106 | } 107 | } 108 | }); 109 | }); 110 | 111 | it("should call github and return files", async () => { 112 | const files = await requireFiles({ number: 1 } as PR); 113 | expect(files).toBe(mockFiles); 114 | }); 115 | 116 | it("should explode if no files exist", async () => { 117 | listFiles.mockReturnValueOnce(Promise.resolve({ data: [] })); 118 | await expectError(() => requireFiles({ number: 1 } as PR)); 119 | }); 120 | }); 121 | }); 122 | 123 | describe("Asserts", () => { 124 | beforeEach(() => { 125 | context.payload = { pull_request: { number: 1 } }; 126 | // @ts-expect-error overload read-only for testing purposes 127 | context.repo = { repo: "repo", owner: "owner" }; 128 | context.eventName = EVENTS.pullRequestTarget; 129 | }); 130 | 131 | afterEach(() => { 132 | clearContext(context); 133 | getOctokit.mockClear(); 134 | }); 135 | 136 | describe("assertFilenameAndFileNumbersMatch", () => { 137 | it("should return undefined if the file names and headers match", () => { 138 | const fileDiff = FileDiffFactory(); 139 | const res = assertFilenameAndFileNumbersMatch(fileDiff); 140 | expect(res).toBeUndefined(); 141 | }); 142 | it("returns an error if the numbers don't match in head", () => { 143 | const fileDiff = FileDiffFactory({ 144 | head: { filenameEipNum: 1, eipNum: 2 } 145 | }); 146 | const res = assertFilenameAndFileNumbersMatch(fileDiff); 147 | expect(res).toBeDefined(); 148 | }); 149 | 150 | it("does not return error if numbers don't match in base (base is assumed accurate)", () => { 151 | const fileDiff = FileDiffFactory({ 152 | base: { filenameEipNum: 1, eipNum: 2 } 153 | }); 154 | const res = assertFilenameAndFileNumbersMatch(fileDiff); 155 | expect(res).toBeUndefined(); 156 | }); 157 | }); 158 | 159 | describe("assertConstantEipNumber", () => { 160 | it("should return nothing if the eip numbers haven't changed", () => { 161 | const fileDiff = FileDiffFactory(); 162 | const res = assertConstantEipNumber(fileDiff); 163 | expect(res).toBeUndefined(); 164 | }); 165 | 166 | it("should return error message if only filename eip number changes", () => { 167 | const fileDiff = FileDiffFactory({ 168 | base: { filenameEipNum: 1 }, 169 | head: { filenameEipNum: 2 } 170 | }); 171 | const res = assertConstantEipNumber(fileDiff); 172 | expect(res).toBeDefined(); 173 | }); 174 | 175 | it("should return error message if only header eip number changes", () => { 176 | const fileDiff = FileDiffFactory({ 177 | base: { eipNum: 1 }, 178 | head: { eipNum: 2 } 179 | }); 180 | const res = assertConstantEipNumber(fileDiff); 181 | expect(res).toBeDefined(); 182 | }); 183 | 184 | it("should return error message if both header and filename eip number changes", () => { 185 | const fileDiff = FileDiffFactory({ 186 | base: { eipNum: 1, filenameEipNum: 1 }, 187 | head: { eipNum: 2, filenameEipNum: 2 } 188 | }); 189 | const res = assertConstantEipNumber(fileDiff); 190 | expect(res).toBeDefined(); 191 | }); 192 | }); 193 | 194 | describe("assertConstantStatus", () => { 195 | it("should return undefined if the status is constant", () => { 196 | const fileDiff = FileDiffFactory(); 197 | const res = assertConstantStatus(fileDiff); 198 | expect(res).toBeUndefined(); 199 | }); 200 | 201 | it("should return error message if status is not constant", () => { 202 | const fileDiff = FileDiffFactory({ 203 | head: { status: EipStatus.draft }, 204 | base: { status: EipStatus.review } 205 | }); 206 | const res = assertConstantStatus(fileDiff); 207 | expect(res).toBeDefined(); 208 | }); 209 | }); 210 | 211 | describe("assertValidStatus", () => { 212 | const allStatuses = Object.values(EipStatus); 213 | const validStatuses = [...ALLOWED_STATUSES] as EipStatus[]; 214 | const invalidStatuses = allStatuses.filter( 215 | (status) => !validStatuses.includes(status) 216 | ); 217 | 218 | for (const status of validStatuses) { 219 | it(`should NOT return error if status is ${status} in the head commit`, () => { 220 | const fileDiff = FileDiffFactory({ head: { status } }); 221 | const res = assertValidStatus(fileDiff); 222 | expect(res).toBeUndefined(); 223 | }); 224 | 225 | it(`should NOT return error if status is ${status} in the base commit`, () => { 226 | const fileDiff = FileDiffFactory({ base: { status } }); 227 | const res = assertValidStatus(fileDiff); 228 | expect(res).toBeUndefined(); 229 | }); 230 | } 231 | 232 | for (const status of invalidStatuses) { 233 | it(`should return error if status is ${status} in the head commit`, () => { 234 | const fileDiff = FileDiffFactory({ head: { status } }); 235 | const res = assertValidStatus(fileDiff); 236 | expect(res).toBeDefined(); 237 | }); 238 | 239 | it(`should return error if status is ${status} in the base commit`, () => { 240 | const fileDiff = FileDiffFactory({ base: { status } }); 241 | const res = assertValidStatus(fileDiff); 242 | expect(res).toBeDefined(); 243 | }); 244 | } 245 | }); 246 | }); 247 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/assert_has_authors.test.ts: -------------------------------------------------------------------------------- 1 | import { FileDiffFactory } from "src/tests/factories/fileDiffFactory"; 2 | import { assertHasAuthors } from "#/assertions"; 3 | import { initGeneralTestEnv, mockGithubContext } from "src/tests/testutils"; 4 | import { EVENTS } from "src/domain"; 5 | 6 | describe("assertHasAuthors", () => { 7 | initGeneralTestEnv(); 8 | mockGithubContext({ 9 | payload: { 10 | pull_request: { 11 | number: 1 12 | } 13 | }, 14 | repo: { repo: "repo", owner: "owner" }, 15 | eventName: EVENTS.pullRequestTarget 16 | }); 17 | 18 | it("should return undefined when assert succeeds", () => { 19 | const fileDiff = FileDiffFactory(); 20 | const res = assertHasAuthors(fileDiff); 21 | // expect that no error occurs 22 | expect(res).toBeUndefined(); 23 | }); 24 | it("should return error if no authors", () => { 25 | const fileDiff = FileDiffFactory({ 26 | head: { authors: new Set() }, 27 | base: { authors: new Set() } 28 | }); 29 | const res = assertHasAuthors(fileDiff); 30 | // expect that an error occurs 31 | expect(res).toBeDefined(); 32 | }); 33 | it("should only consider the authors at the base commit", () => { 34 | const fileDiff = FileDiffFactory({ head: { authors: new Set() } }); 35 | const res = assertHasAuthors(fileDiff); 36 | // expect that no error occurs 37 | expect(res).toBeUndefined(); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/assert_valid_filename.test.ts: -------------------------------------------------------------------------------- 1 | import { initGeneralTestEnv, mockGithubContext } from "src/tests/testutils"; 2 | import { EVENTS, File } from "src/domain"; 3 | import { FileFactory } from "src/tests/factories/fileFactory"; 4 | import { AssertValidFilename } from "#/assertions/assert_valid_filename"; 5 | 6 | describe("require_file_preexisting", () => { 7 | mockGithubContext({ 8 | payload: { pull_request: { number: 1 } }, 9 | repo: { repo: "repo", owner: "owner" }, 10 | eventName: EVENTS.pullRequestTarget 11 | }); 12 | 13 | initGeneralTestEnv(); 14 | 15 | const requireFilenameEipNum = jest.fn(); 16 | const _AssertValidFilename = new AssertValidFilename({ 17 | requireFilenameEipNum 18 | }); 19 | 20 | beforeEach(async () => { 21 | requireFilenameEipNum.mockReturnValue(Promise.resolve(1)); 22 | }); 23 | 24 | it("should return undefined if filename is valid", async () => { 25 | const file = FileFactory(); 26 | const res = await _AssertValidFilename.assertValidFilename(file); 27 | expect(res).toBeUndefined(); 28 | }); 29 | 30 | it("should return defined if filename is not valid", async () => { 31 | const files = [ 32 | FileFactory({ filename: "eip-123" }), 33 | FileFactory({ filename: "ep-123.md" }), 34 | FileFactory({ filename: "eip-a.md" }), 35 | FileFactory({ filename: "eip-123.js" }) 36 | ]; 37 | expect( 38 | await _AssertValidFilename.assertValidFilename(files[0] as File) 39 | ).toBeDefined(); 40 | expect( 41 | await _AssertValidFilename.assertValidFilename(files[1] as File) 42 | ).toBeDefined(); 43 | expect( 44 | await _AssertValidFilename.assertValidFilename(files[2] as File) 45 | ).toBeDefined(); 46 | expect( 47 | await _AssertValidFilename.assertValidFilename(files[3] as File) 48 | ).toBeDefined(); 49 | }); 50 | }); 51 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/require_authors.test.ts: -------------------------------------------------------------------------------- 1 | import { FileDiffFactory } from "src/tests/factories/fileDiffFactory"; 2 | import { requireAuthors } from "#/assertions"; 3 | import { 4 | expectError, 5 | initGeneralTestEnv, 6 | mockGithubContext 7 | } from "src/tests/testutils"; 8 | import { EVENTS } from "src/domain"; 9 | 10 | describe("requireAuthors", () => { 11 | initGeneralTestEnv(); 12 | mockGithubContext({ 13 | payload: { 14 | pull_request: { 15 | number: 1 16 | } 17 | }, 18 | repo: { repo: "repo", owner: "owner" }, 19 | eventName: EVENTS.pullRequestTarget 20 | }); 21 | 22 | it("returns authors", () => { 23 | const fileDiff = FileDiffFactory(); 24 | const authors = requireAuthors(fileDiff); 25 | // @ts-expect-error errors because authors can be undefined but it's not 26 | expect(authors).toEqual(Array.from(fileDiff.base.authors)); 27 | }); 28 | 29 | it("does not return head authors (only from base)", () => { 30 | const fileDiff = FileDiffFactory({ 31 | head: { authors: new Set(["fake"]) } 32 | }); 33 | const authors = requireAuthors(fileDiff); 34 | // @ts-expect-error errors because authors can be undefined but it's not 35 | expect(authors).toEqual(Array.from(fileDiff.base.authors)); 36 | }); 37 | 38 | it("explodes if no authors", () => { 39 | const fileDiff = FileDiffFactory({ 40 | head: { authors: new Set() }, 41 | base: { authors: new Set() } 42 | }); 43 | expectError(() => requireAuthors(fileDiff)); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/require_editors.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | EIPCategory, 3 | EIPTypeOrCategoryToResolver, 4 | EIPTypes, 5 | EVENTS, 6 | INFORMATIONAL_EDITORS, 7 | INTERFACE_EDITORS, 8 | META_EDITORS, 9 | NETWORKING_EDITORS 10 | } from "src/domain/Constants"; 11 | import { expectError, mockGithubContext } from "src/tests/testutils"; 12 | import { RequireEditors as _RequireEditors } from "#/assertions/require_editors"; 13 | import { CORE_EDITORS, ERC_EDITORS, FileDiff, EipStatus } from "src/domain"; 14 | import { FileDiffFactory } from "src/tests/factories/fileDiffFactory"; 15 | 16 | describe("_requireEIPEditors", () => { 17 | mockGithubContext({ 18 | payload: { pull_request: { number: 1 } }, 19 | repo: { repo: "repo", owner: "owner" }, 20 | eventName: EVENTS.pullRequestTarget 21 | }); 22 | 23 | const editors: [string, string, string] = ["editor1", "editor2", "editor3"]; 24 | 25 | const RequireEIPEditors = new _RequireEditors({ 26 | requireAuthors: jest.fn(), 27 | ERC_EDITORS, 28 | CORE_EDITORS, 29 | INFORMATIONAL_EDITORS, 30 | INTERFACE_EDITORS, 31 | META_EDITORS, 32 | NETWORKING_EDITORS 33 | }); 34 | const requireAuthorsSpy = jest.spyOn(RequireEIPEditors, "requireAuthors"); 35 | const consoleSpy = jest.spyOn(console, "warn"); 36 | 37 | beforeEach(async () => { 38 | requireAuthorsSpy.mockClear(); 39 | consoleSpy.mockClear(); 40 | }); 41 | 42 | afterAll(() => { 43 | consoleSpy.mockReset(); 44 | }); 45 | 46 | it("should emit a console warning if no file diff is provided", () => { 47 | const res = RequireEIPEditors._requireEIPEditors(editors); 48 | expect(res).toEqual(editors); 49 | expect(consoleSpy).toHaveBeenCalledTimes(1); 50 | }); 51 | 52 | it("should return only editors that are not authors", () => { 53 | requireAuthorsSpy.mockReturnValueOnce([editors[0]]); 54 | const res = RequireEIPEditors._requireEIPEditors(editors, {} as FileDiff); 55 | expect(res).toEqual([editors[1], editors[2]]); 56 | expect(consoleSpy).not.toHaveBeenCalled(); 57 | }); 58 | 59 | it("should return all editors if none are authors", () => { 60 | requireAuthorsSpy.mockReturnValueOnce(["not an author"]); 61 | const res = RequireEIPEditors._requireEIPEditors(editors, {} as FileDiff); 62 | expect(res).toEqual(editors); 63 | expect(consoleSpy).not.toHaveBeenCalled(); 64 | }); 65 | 66 | it("should normalize editors to lowercase and no file diff provided", () => { 67 | const res = RequireEIPEditors._requireEIPEditors( 68 | editors.map((editor) => editor.toUpperCase()) 69 | ); 70 | expect(res).toEqual(editors); 71 | }); 72 | 73 | it("should normalize editors to lowercase and file diff provided", () => { 74 | requireAuthorsSpy.mockReturnValueOnce([editors[0]]); 75 | const res = RequireEIPEditors._requireEIPEditors( 76 | editors.map((i) => i.toUpperCase()), 77 | {} as FileDiff 78 | ); 79 | expect(res).toEqual([editors[1], editors[2]]); 80 | }); 81 | }); 82 | 83 | describe("requireEditors", () => { 84 | mockGithubContext({ 85 | payload: { pull_request: { number: 1 } }, 86 | repo: { repo: "repo", owner: "owner" }, 87 | eventName: EVENTS.pullRequestTarget 88 | }); 89 | 90 | const editors: [string, string, string] = ["editor1", "editor2", "editor3"]; 91 | 92 | const requireAuthors = jest.fn(); 93 | const RequireEditors = new _RequireEditors({ 94 | requireAuthors, 95 | ERC_EDITORS, 96 | CORE_EDITORS, 97 | INFORMATIONAL_EDITORS, 98 | INTERFACE_EDITORS, 99 | META_EDITORS, 100 | NETWORKING_EDITORS 101 | }); 102 | const _requireEIPEditorsMock = jest.fn(); 103 | RequireEditors._requireEIPEditors = _requireEIPEditorsMock; 104 | 105 | const requireAuthorsSpy = jest.spyOn(RequireEditors, "requireAuthors"); 106 | const consoleSpy = jest.spyOn(console, "warn"); 107 | 108 | const types = [EIPTypes.meta, EIPTypes.informational]; 109 | 110 | const categories = [ 111 | EIPCategory.erc, 112 | EIPCategory.core, 113 | EIPCategory.networking, 114 | EIPCategory.interface 115 | ]; 116 | 117 | beforeEach(async () => { 118 | requireAuthorsSpy.mockReset(); 119 | consoleSpy.mockClear(); 120 | 121 | for (const method of Object.values(EIPTypeOrCategoryToResolver)) { 122 | RequireEditors[method] = jest.fn(); 123 | } 124 | }); 125 | 126 | for (const category of categories) { 127 | it(`should call ${category} editor getter if fileDiff is of category ${category}`, () => { 128 | RequireEditors[EIPTypeOrCategoryToResolver[category]].mockReturnValue( 129 | editors 130 | ); 131 | RequireEditors.requireEIPEditors({ 132 | base: { category } 133 | } as FileDiff); 134 | expect( 135 | RequireEditors[EIPTypeOrCategoryToResolver[category]] 136 | ).toBeCalled(); 137 | }); 138 | } 139 | 140 | for (const type of types) { 141 | it(`should call ${type} editor getter if fileDiff is of category ${type}`, () => { 142 | RequireEditors[EIPTypeOrCategoryToResolver[type]].mockReturnValue( 143 | editors 144 | ); 145 | RequireEditors.requireEIPEditors({ 146 | base: { type } 147 | } as FileDiff); 148 | expect(RequireEditors[EIPTypeOrCategoryToResolver[type]]).toBeCalled(); 149 | }); 150 | } 151 | 152 | it("should explode if no valid category is given", async () => { 153 | await expectError(() => { 154 | // @ts-expect-error this is on purpose 155 | RequireEditors.requireEIPEditors({ 156 | base: { category: "fake category" } 157 | } as FileDiff); 158 | }); 159 | }); 160 | 161 | it("should ignore category and return all editors if status is living", async () => { 162 | const fileDiff = FileDiffFactory({ 163 | base: { 164 | // this should be ignored for eip 1 165 | category: EIPCategory.erc, 166 | status: EipStatus.living, 167 | filenameEipNum: 1234 168 | }, 169 | head: { 170 | // only the status should be considered, 171 | status: EipStatus.draft, 172 | filenameEipNum: 1234 173 | } 174 | }); 175 | 176 | _requireEIPEditorsMock.mockImplementation((input) => input); 177 | RequireEditors[ 178 | EIPTypeOrCategoryToResolver[EIPCategory.erc] 179 | ].mockReturnValue(["@1"]); 180 | RequireEditors[ 181 | EIPTypeOrCategoryToResolver[EIPCategory.core] 182 | ].mockReturnValue(["@2"]); 183 | const res = RequireEditors.requireEIPEditors(fileDiff); 184 | expect(res).toContainEqual("@1"); 185 | expect(res).toContainEqual("@2"); 186 | }); 187 | }); 188 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/require_file_preexisting.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | expectError, 3 | initGeneralTestEnv, 4 | mockGithubContext 5 | } from "src/tests/testutils"; 6 | import { EVENTS } from "src/domain"; 7 | import { PRFactory } from "src/tests/factories/prFactory"; 8 | import { FileFactory } from "src/tests/factories/fileFactory"; 9 | import { RequireFilePreexisting } from "#/assertions/require_file_preexisting"; 10 | 11 | describe("require_file_preexisting", () => { 12 | mockGithubContext({ 13 | payload: { pull_request: { number: 1 } }, 14 | repo: { repo: "repo", owner: "owner" }, 15 | eventName: EVENTS.pullRequestTarget 16 | }); 17 | 18 | initGeneralTestEnv(); 19 | 20 | const getContentMock = jest.fn(); 21 | const requirePrMock = jest.fn(); 22 | const RequireFilePreexistingInstance = new RequireFilePreexisting( 23 | requirePrMock, 24 | getContentMock 25 | ); 26 | 27 | beforeEach(async () => { 28 | requirePrMock.mockReturnValue(Promise.resolve(await PRFactory())); 29 | getContentMock.mockReturnValue(Promise.resolve()); 30 | }); 31 | 32 | it("should return undefined if a file exists and is retrievable", async () => { 33 | const file = FileFactory(); 34 | const res = await RequireFilePreexistingInstance.requireFilePreexisting( 35 | file 36 | ); 37 | expect(res).toBe(file); 38 | }); 39 | 40 | it("should throw error if github request returns 404", async () => { 41 | const file = FileFactory(); 42 | getContentMock.mockReturnValueOnce(Promise.reject({ status: 404 })); 43 | await expectError(() => 44 | RequireFilePreexistingInstance.requireFilePreexisting(file) 45 | ); 46 | }); 47 | 48 | it("should not throw error if github request does NOT return 404 (but still an error)", async () => { 49 | const file = FileFactory(); 50 | getContentMock.mockReturnValueOnce(Promise.reject({ status: 403 })); 51 | const res = await RequireFilePreexistingInstance.requireFilePreexisting( 52 | file 53 | ); 54 | expect(res).toBe(file); 55 | }); 56 | 57 | it("should consider previous_filename", async () => { 58 | const file = FileFactory(); 59 | file.previous_filename = "previous"; 60 | file.filename = "now"; 61 | 62 | await RequireFilePreexistingInstance.requireFilePreexisting(file); 63 | expect(getContentMock.mock.calls[0][0]).toEqual("previous"); 64 | }); 65 | 66 | it("should consider filename if previous_filename is undefined", async () => { 67 | const file = FileFactory(); 68 | file.previous_filename = ""; 69 | file.filename = "now"; 70 | 71 | await RequireFilePreexistingInstance.requireFilePreexisting(file); 72 | expect(getContentMock.mock.calls[0][0]).toEqual("now"); 73 | }); 74 | 75 | it("should throw error if file status is `added`", async () => { 76 | const file = FileFactory(); 77 | file.status = "added"; 78 | await expectError(() => 79 | RequireFilePreexistingInstance.requireFilePreexisting(file) 80 | ); 81 | }); 82 | }); 83 | -------------------------------------------------------------------------------- /src/modules/assertions/__tests__/require_filename_eip_num.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | expectError, 3 | expectErrorWithHandler, 4 | initGeneralTestEnv, 5 | mockGithubContext 6 | } from "src/tests/testutils"; 7 | import { EVENTS } from "src/domain"; 8 | import { RequireFilenameEIPNum } from "#/assertions/require_filename_eip_num"; 9 | import { FileFactory } from "src/tests/factories/fileFactory"; 10 | import { PRFactory } from "src/tests/factories/prFactory"; 11 | import { Exceptions } from "src/domain/exceptions"; 12 | 13 | describe("requireFilenameEipNum", () => { 14 | mockGithubContext({ 15 | payload: { pull_request: { number: 1 } }, 16 | repo: { repo: "repo", owner: "owner" }, 17 | eventName: EVENTS.pullRequestTarget 18 | }); 19 | 20 | initGeneralTestEnv(); 21 | 22 | const requireEIPEditors = jest.fn(); 23 | const getPullRequestFiles = jest.fn(); 24 | const requirePr = jest.fn(); 25 | const getApprovals = jest.fn(); 26 | const getParsedContent = jest.fn(); 27 | const _RequireFilenameEIPNum = new RequireFilenameEIPNum({ 28 | requireEIPEditors, 29 | getPullRequestFiles, 30 | requirePr, 31 | getApprovals, 32 | getParsedContent 33 | }); 34 | 35 | const attemptEditorApproval = jest 36 | .fn() 37 | .mockImplementation( 38 | _RequireFilenameEIPNum.attemptEditorApprovalGracefulTermination 39 | ); 40 | const attemptAsset = jest 41 | .fn() 42 | .mockImplementation(_RequireFilenameEIPNum.attemptAssetGracefulTermination); 43 | 44 | _RequireFilenameEIPNum.attemptEditorApprovalGracefulTermination = 45 | attemptEditorApproval; 46 | _RequireFilenameEIPNum.attemptAssetGracefulTermination = attemptAsset; 47 | 48 | beforeEach(async () => { 49 | requireEIPEditors.mockReturnValue(["@test","@test3"]); 50 | getPullRequestFiles.mockResolvedValue(FileFactory()); 51 | requirePr.mockResolvedValue(await PRFactory()); 52 | // no approvals 53 | getApprovals.mockResolvedValue([]); 54 | }); 55 | 56 | it("should not error if filename matches regex", async () => { 57 | const eipNum = await _RequireFilenameEIPNum.requireFilenameEipNum( 58 | "eip-123.md" 59 | ); 60 | expect(eipNum).toBe(123); 61 | }); 62 | 63 | it("should explode if filename doesn't match", async () => { 64 | await expectError(() => 65 | _RequireFilenameEIPNum.requireFilenameEipNum("eip-123") 66 | ); 67 | await expectError(() => 68 | _RequireFilenameEIPNum.requireFilenameEipNum("ep-123.md") 69 | ); 70 | await expectError(() => 71 | _RequireFilenameEIPNum.requireFilenameEipNum("eip-a.md") 72 | ); 73 | await expectError(() => 74 | _RequireFilenameEIPNum.requireFilenameEipNum("eip-123.js") 75 | ); 76 | }); 77 | 78 | it("should attempt graceful termination if files don't match pattern", async () => { 79 | await _RequireFilenameEIPNum 80 | .requireFilenameEipNum("eip-dsd") 81 | .catch((_) => {}); 82 | expect(attemptEditorApproval).toBeCalledTimes(1); 83 | expect(attemptEditorApproval).toBeCalledTimes(1); 84 | expect(attemptEditorApproval).toBeCalledTimes(1); 85 | }); 86 | 87 | describe("graceful termination routes", () => { 88 | describe("editor approval", () => { 89 | it("should explode with graceful termination with editor approval", async () => { 90 | // this should return the names of two of the editors 91 | getApprovals.mockResolvedValue(["@test","@test3"]); 92 | await expectErrorWithHandler( 93 | () => 94 | _RequireFilenameEIPNum.attemptEditorApprovalGracefulTermination( 95 | "test" 96 | ), 97 | (err) => { 98 | expect(err.type).toBe(Exceptions.gracefulTermination); 99 | }, 100 | "should explode with graceful termination" 101 | ); 102 | }); 103 | 104 | it("should not explode with graceful if there's only one editor approval", async () => { 105 | // this should return the name of one editor 106 | getApprovals.mockResolvedValue(["@test3"]); 107 | await _RequireFilenameEIPNum.attemptEditorApprovalGracefulTermination( 108 | "test" 109 | ); 110 | }); 111 | 112 | it("should not explode with graceful if there's no editor approval", async () => { 113 | // this should return the name of one non-editor 114 | getApprovals.mockResolvedValue(["@test2"]); 115 | await _RequireFilenameEIPNum.attemptEditorApprovalGracefulTermination( 116 | "test" 117 | ); 118 | }); 119 | }); 120 | 121 | describe("related asset changes", () => { 122 | const expectGraceful = (filename: string) => { 123 | return expectErrorWithHandler( 124 | () => 125 | _RequireFilenameEIPNum.attemptAssetGracefulTermination(filename), 126 | (err) => { 127 | expect(err.type).toBe(Exceptions.gracefulTermination); 128 | }, 129 | "related asset changes" 130 | ); 131 | }; 132 | beforeEach(() => { 133 | getPullRequestFiles.mockResolvedValue([ 134 | FileFactory({ filename: "EIPs/eip-2.md" }) 135 | ]); 136 | }); 137 | it("should explode with graceful termination if the assets are allowed", async () => { 138 | await expectGraceful("assets/eip-2/test.md"); 139 | }); 140 | it("should fail if the file is not in assets folder", async () => { 141 | await expectError( 142 | () => expectGraceful("eip-1/test.md"), 143 | "should fail if the file is not in assets folder" 144 | ); 145 | }); 146 | it("should fail if the change is made to a file in a different eip folder", async () => { 147 | await expectError( 148 | () => expectGraceful("assets/eip-1/test.md"), 149 | "should fail if the change is made to a file in a different eip folder" 150 | ); 151 | }); 152 | }); 153 | }); 154 | 155 | describe("new file submission", () => { 156 | const attemptNewFile = (path: string = "path") => { 157 | return _RequireFilenameEIPNum.attemptNewFileNoEIPNumber(path); 158 | }; 159 | 160 | it("should rethrow error if not of known type", async () => { 161 | getParsedContent.mockRejectedValueOnce("erroorr"); 162 | await expectError( 163 | () => attemptNewFile(), 164 | "should rethrow error if not of known type" 165 | ); 166 | }); 167 | 168 | const notFoundError = { 169 | response: { 170 | status: 404, 171 | data: { 172 | message: "Not Found" 173 | } 174 | } 175 | }; 176 | 177 | it("should not throw if error is known file not found type", async () => { 178 | getParsedContent.mockRejectedValueOnce(notFoundError); 179 | expect(await attemptNewFile()).toBeUndefined(); 180 | }); 181 | 182 | it("should not throw exception if eip has a eip number", async () => { 183 | getParsedContent.mockRejectedValueOnce(notFoundError); 184 | expect(await attemptNewFile("EIPS/eip-4444.md")).toBeUndefined(); 185 | }); 186 | 187 | it("should not throw exception unless file is in EIPS folder and follows format", async () => { 188 | getParsedContent.mockRejectedValueOnce(notFoundError); 189 | expect(await attemptNewFile("assets/eip-draft_test.md")).toBeUndefined(); 190 | }); 191 | 192 | it("should throw requirement violation error", async () => { 193 | getParsedContent.mockRejectedValueOnce(notFoundError); 194 | const type = await attemptNewFile("EIPS/eip-draft_test.md").catch( 195 | (err) => err.type 196 | ); 197 | expect(type).toBe(Exceptions.requirementViolation); 198 | }); 199 | }); 200 | }); 201 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_constant_eip_number.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | 3 | /** 4 | * asserts that eip number in both filename and header has not changed 5 | * 6 | * @returns error or undefined 7 | */ 8 | export const assertConstantEipNumber = ({ head, base }: FileDiff) => { 9 | const filenameNumMatches = base.filenameEipNum === head.filenameEipNum; 10 | const fileNumMatches = base.eipNum === head.eipNum; 11 | 12 | if (!(filenameNumMatches && fileNumMatches)) { 13 | return [ 14 | `Base EIP has number ${base.eipNum} which was changed`, 15 | `to head ${head.eipNum}; EIP number changing is not allowed` 16 | ].join(" "); 17 | } else return; 18 | }; 19 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_constant_status.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | 3 | /** 4 | * assert that the status hasn't changed, if it hasn't changed then also 5 | * assert that the given status is one of the auto-mergable statuses 6 | * 7 | * @returns error or undefined 8 | */ 9 | export const assertConstantStatus = ({ head, base }: FileDiff) => { 10 | if (head.status !== base.status) { 11 | return [ 12 | `eip-${base.eipNum} state was changed from ${base.status}`, 13 | `to ${head.status}` 14 | ].join(" "); 15 | } else return; 16 | }; 17 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_eip1_editor_approvals.ts: -------------------------------------------------------------------------------- 1 | import { requireEIPEditors } from "#/assertions"; 2 | import { EIP1_REQUIRED_EDITOR_APPROVALS, FileDiff } from "src/domain"; 3 | import { multiLineString } from "#/utils"; 4 | import { IAssertEIP1EditorApprovals } from "#/assertions/Domain/types"; 5 | 6 | export class AssertEIP1EditorApprovals implements IAssertEIP1EditorApprovals { 7 | constructor(public getApprovals: () => Promise) {} 8 | 9 | assertEIP1EditorApprovals = async (fileDiff: FileDiff) => { 10 | const approvals = await this.getApprovals(); 11 | 12 | const editors = requireEIPEditors(fileDiff); 13 | const editorApprovals = approvals.filter((approver) => 14 | editors.includes(approver) 15 | ); 16 | if (editorApprovals.length < EIP1_REQUIRED_EDITOR_APPROVALS) { 17 | return multiLineString(" ")( 18 | `Changes to EIP 1 require at least ${EIP1_REQUIRED_EDITOR_APPROVALS}`, 19 | `unique approvals from editors; there's currently ${editorApprovals.length} approvals;`, 20 | `the remaining editors are ${editors 21 | .filter((editor) => !editorApprovals.includes(editor)) 22 | .join(", ")}` 23 | ); 24 | } else return; 25 | }; 26 | } 27 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_eip_editor_approval.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | import { getApprovals } from "#/approvals"; 3 | import { requireEIPEditors } from "#/assertions"; 4 | 5 | /** returns an error string if the PR does NOT have editor approval */ 6 | export const assertEIPEditorApproval = async (fileDiff: FileDiff) => { 7 | const approvals = await getApprovals(); 8 | const editors = requireEIPEditors(fileDiff); 9 | 10 | const isApproved = approvals.find((approver) => editors.includes(approver)); 11 | if (!isApproved) { 12 | return `This PR requires review from one of [${editors.join(", ")}]`; 13 | } else return; 14 | }; 15 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_filename_and_file_numbers_match.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | 3 | /** 4 | * asserts that the eip number in the filename and in the header are the same 5 | * 6 | * @returns error or undefined 7 | */ 8 | export const assertFilenameAndFileNumbersMatch = ({ head, base }: FileDiff) => { 9 | const headMatchesSelf = head.filenameEipNum === head.eipNum; 10 | 11 | if (!headMatchesSelf) { 12 | return `EIP header in file ${head.name} does not match: ${base.name}`; 13 | } else return; 14 | }; 15 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_has_authors.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | import { IAssertHasAuthors } from "#/assertions/Domain/types"; 3 | import { multiLineString } from "#/utils"; 4 | 5 | export class AssertHasAuthors implements IAssertHasAuthors { 6 | constructor() {} 7 | 8 | assertHasAuthors = (file: FileDiff) => { 9 | // take from base to avoid people adding themselves and being able to approve 10 | const authors = file.base.authors && [...file.base.authors]; 11 | 12 | // Make sure there are authors 13 | if (!authors || authors.length === 0) { 14 | return multiLineString(" ")( 15 | `${file.head.name} has no identifiable authors who`, 16 | `can approve the PR (only considering the base version)` 17 | ); 18 | } else return; 19 | }; 20 | } 21 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_is_approved_by_authors.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | import { requireAuthors } from "#/assertions"; 3 | import { getApprovals } from "#/approvals"; 4 | 5 | export const assertIsApprovedByAuthors = async (fileDiff: FileDiff) => { 6 | const approvals = await getApprovals(); 7 | const authors = requireAuthors(fileDiff); 8 | 9 | // there exists an approver who is also an author 10 | const hasAuthorApproval = !!approvals.find((approver) => 11 | authors.includes(approver) 12 | ); 13 | 14 | if (!hasAuthorApproval) { 15 | return [ 16 | `${fileDiff.head.name} requires approval from one of`, 17 | `(${authors.join(", ")})` 18 | ].join(" "); 19 | } else return; 20 | }; 21 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_valid_filename.ts: -------------------------------------------------------------------------------- 1 | import { File, FILE_RE } from "src/domain"; 2 | import { IAssertValidFilename } from "#/assertions/Domain/types"; 3 | import { multiLineString } from "#/utils"; 4 | 5 | export class AssertValidFilename implements IAssertValidFilename { 6 | requireFilenameEipNum: (path: string) => Promise; 7 | 8 | constructor({ 9 | requireFilenameEipNum 10 | }: { 11 | requireFilenameEipNum: (path: string) => Promise; 12 | }) { 13 | this.requireFilenameEipNum = requireFilenameEipNum; 14 | } 15 | 16 | /** 17 | * Accepts a file and returns whether or not its name is valid 18 | * 19 | * @param errors a list to add any errors that occur to 20 | * @returns {boolean} is the provided file's filename valid? 21 | */ 22 | assertValidFilename = async (file: NonNullable) => { 23 | const filename = file.filename; 24 | 25 | // File name is formatted correctly and is in the EIPS folder 26 | const match = filename.search(FILE_RE); 27 | if (match === -1) { 28 | return multiLineString(" ")( 29 | `Filename ${filename} is not in EIP format 'EIPS/eip-####.md';`, 30 | `if this is a new submission (and prior to eip # being given) then`, 31 | `format your file like so 'eip-draft_{summary of eip}.md (don't`, 32 | `include the braces)` 33 | ); 34 | } 35 | 36 | // EIP number is defined within the filename and can be parsed 37 | // filename is actually path when fetching directly 38 | const filenameEipNum = await this.requireFilenameEipNum(filename); 39 | if (!filenameEipNum) { 40 | return `No EIP number was found to be associated with filename ${filename}`; 41 | } 42 | 43 | return; 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /src/modules/assertions/assert_valid_status.ts: -------------------------------------------------------------------------------- 1 | import { ALLOWED_STATUSES, FileDiff } from "src/domain"; 2 | 3 | /** 4 | * determines if the status of either the base or the head are 5 | * not auto mergeable. A non-auto mergeable status requires editor 6 | * approval 7 | * 8 | * @returns error or undefined 9 | */ 10 | export const assertValidStatus = ({ head, base }: FileDiff) => { 11 | const allowedStatus = [...ALLOWED_STATUSES].join(" or "); 12 | if (!ALLOWED_STATUSES.has(head.status)) { 13 | return [ 14 | `${head.name} is in state ${head.status} at the head commit,`, 15 | `not ${allowedStatus}; an EIP editor needs to approve this change` 16 | ].join(" "); 17 | } else if (!ALLOWED_STATUSES.has(base.status)) { 18 | const allowedStatus = [...ALLOWED_STATUSES].join(" or "); 19 | return [ 20 | `${base.name} is in state ${base.status} at the base commit,`, 21 | `not ${allowedStatus}; an EIP editor needs to approve this change` 22 | ].join(" "); 23 | } else return; 24 | }; 25 | -------------------------------------------------------------------------------- /src/modules/assertions/index.ts: -------------------------------------------------------------------------------- 1 | import { RequireAuthors } from "./require_authors"; 2 | import { RequireEditors } from "./require_editors"; 3 | import { RequireFilePreexisting } from "./require_file_preexisting"; 4 | import { 5 | castTo, 6 | CORE_EDITORS, 7 | ERC_EDITORS, 8 | FileDiff, 9 | INFORMATIONAL_EDITORS, 10 | INTERFACE_EDITORS, 11 | META_EDITORS, 12 | NETWORKING_EDITORS 13 | } from "src/domain"; 14 | import { requirePr } from "#/assertions/require_pr"; 15 | import { AssertValidFilename } from "#/assertions/assert_valid_filename"; 16 | import { RequireFilenameEIPNum } from "./require_filename_eip_num"; 17 | import { getApprovals } from "../approvals"; 18 | import { getParsedContent } from "../file/modules/get_parsed_content"; 19 | import { AssertHasAuthors } from "#/assertions/assert_has_authors"; 20 | import { AssertEIP1EditorApprovals } from "#/assertions/assert_eip1_editor_approvals"; 21 | import { github } from "src/infra"; 22 | 23 | export * from "./require_pull_number"; 24 | export * from "./require_pr"; 25 | export * from "./assert_is_approved_by_authors"; 26 | export * from "./require_files"; 27 | export * from "./assert_filename_and_file_numbers_match"; 28 | export * from "./assert_constant_eip_number"; 29 | export * from "./assert_valid_status"; 30 | export * from "./assert_eip_editor_approval"; 31 | export * from "./assert_constant_status"; 32 | export * from "./require_max_file_number"; 33 | 34 | const _RequireAuthors = new RequireAuthors(); 35 | export const requireAuthors = castTo( 36 | (...args) => { 37 | // @ts-ignore 38 | return _RequireAuthors.requireAuthors(...args); 39 | } 40 | ); 41 | 42 | const _RequireEIPEditors = new RequireEditors({ 43 | requireAuthors, 44 | ERC_EDITORS, 45 | CORE_EDITORS, 46 | INFORMATIONAL_EDITORS, 47 | INTERFACE_EDITORS, 48 | META_EDITORS, 49 | NETWORKING_EDITORS 50 | }); 51 | export const requireEIPEditors = (fileDiff?: FileDiff) => 52 | _RequireEIPEditors.requireEIPEditors(fileDiff); 53 | 54 | const _RequireFilePreexisting = new RequireFilePreexisting( 55 | requirePr, 56 | github.getRepoFilenameContent 57 | ); 58 | export const requireFilePreexisting = castTo< 59 | typeof _RequireFilePreexisting.requireFilePreexisting 60 | >((...args) => { 61 | // @ts-ignore 62 | return _RequireFilePreexisting.requireFilePreexisting(...args); 63 | }); 64 | 65 | const _RequireFilenameEIPNum = new RequireFilenameEIPNum({ 66 | getPullRequestFiles: github.getPullRequestFiles, 67 | requirePr, 68 | requireEIPEditors, 69 | getApprovals, 70 | getParsedContent 71 | }); 72 | export const requireFilenameEipNum = castTo< 73 | typeof _RequireFilenameEIPNum.requireFilenameEipNum 74 | >((...args) => { 75 | // @ts-ignore 76 | return _RequireFilenameEIPNum.requireFilenameEipNum(...args); 77 | }); 78 | 79 | const _AssertValidFilename = new AssertValidFilename({ 80 | requireFilenameEipNum 81 | }); 82 | export const assertValidFilename = castTo< 83 | typeof _AssertValidFilename.assertValidFilename 84 | >((...args) => { 85 | // @ts-ignore 86 | return _AssertValidFilename.assertValidFilename(...args); 87 | }); 88 | 89 | const _AssertHasAuthors = new AssertHasAuthors(); 90 | export const assertHasAuthors = castTo< 91 | typeof _AssertHasAuthors.assertHasAuthors 92 | >((...args) => { 93 | // @ts-ignore 94 | return _AssertHasAuthors.assertHasAuthors(...args); 95 | }); 96 | 97 | const _AssertEIP1EditorApprovals = new AssertEIP1EditorApprovals(getApprovals); 98 | export const assertEIP1EditorApprovals = castTo< 99 | typeof _AssertEIP1EditorApprovals.assertEIP1EditorApprovals 100 | >((...args) => { 101 | // @ts-ignore 102 | return _AssertEIP1EditorApprovals.assertEIP1EditorApprovals(...args); 103 | }); 104 | -------------------------------------------------------------------------------- /src/modules/assertions/require_authors.ts: -------------------------------------------------------------------------------- 1 | import { FileDiff } from "src/domain"; 2 | import { RequirementViolation } from "src/domain/exceptions"; 3 | import { IRequireAuthors } from "#/assertions/Domain/types"; 4 | 5 | export class RequireAuthors implements IRequireAuthors { 6 | constructor() {} 7 | 8 | requireAuthors = (fileDiff: FileDiff): string[] => { 9 | // take from base to avoid people adding themselves and being able to approve 10 | const authors = fileDiff.base.authors && [...fileDiff.base.authors]; 11 | 12 | // Make sure there are authors 13 | if (!authors || authors.length === 0) { 14 | throw new RequirementViolation( 15 | `${fileDiff.head.name} has no identifiable authors who can approve the PR (only considering the base version)` 16 | ); 17 | } 18 | 19 | return authors; 20 | }; 21 | } 22 | -------------------------------------------------------------------------------- /src/modules/assertions/require_editors.ts: -------------------------------------------------------------------------------- 1 | import { EIPCategory, EIPTypes, FileDiff, EipStatus } from "src/domain"; 2 | import { IRequireEditors } from "#/assertions/Domain/types"; 3 | import _ from "lodash"; 4 | import { RequirementViolation } from "src/domain/exceptions"; 5 | 6 | export class RequireEditors implements IRequireEditors { 7 | public requireAuthors: (fileDiff: FileDiff) => string[]; 8 | public ERC_EDITORS: () => string[]; 9 | public CORE_EDITORS: () => string[]; 10 | public INFORMATIONAL_EDITORS: () => string[]; 11 | public INTERFACE_EDITORS: () => string[]; 12 | public META_EDITORS: () => string[]; 13 | public NETWORKING_EDITORS: () => string[]; 14 | 15 | constructor({ 16 | requireAuthors, 17 | ERC_EDITORS, 18 | CORE_EDITORS, 19 | INFORMATIONAL_EDITORS, 20 | INTERFACE_EDITORS, 21 | META_EDITORS, 22 | NETWORKING_EDITORS 23 | }) { 24 | this.requireAuthors = requireAuthors; 25 | this.ERC_EDITORS = ERC_EDITORS; 26 | this.CORE_EDITORS = CORE_EDITORS; 27 | this.INFORMATIONAL_EDITORS = INFORMATIONAL_EDITORS; 28 | this.INTERFACE_EDITORS = INTERFACE_EDITORS; 29 | this.META_EDITORS = META_EDITORS; 30 | this.NETWORKING_EDITORS = NETWORKING_EDITORS; 31 | } 32 | 33 | // injected to make testing easier 34 | _requireEIPEditors(EDITORS: string[], fileDiff?: FileDiff) { 35 | EDITORS = _.uniq(EDITORS.map((i) => i.toLowerCase())); 36 | if (fileDiff) { 37 | const authors = this.requireAuthors(fileDiff); 38 | return EDITORS.filter((editor) => !authors.includes(editor)); 39 | } else { 40 | console.warn( 41 | [ 42 | "You are requesting all of the EIP_EDITORS, but an edgecase may exist where", 43 | "an editor is also an author; it's recommended that you instead request the", 44 | "editors with respect to a fileDiff" 45 | ].join(" ") 46 | ); 47 | return EDITORS; 48 | } 49 | } 50 | 51 | requireEIPEditors(fileDiff?: FileDiff) { 52 | const { 53 | ERC_EDITORS, 54 | CORE_EDITORS, 55 | INFORMATIONAL_EDITORS, 56 | INTERFACE_EDITORS, 57 | META_EDITORS, 58 | NETWORKING_EDITORS 59 | } = this; 60 | 61 | if (!fileDiff || fileDiff.base.status === EipStatus.living) { 62 | // if no fileDiff is provided (meaning it's a new file) then return all editors 63 | return this._requireEIPEditors( 64 | _.concat( 65 | ERC_EDITORS(), 66 | CORE_EDITORS(), 67 | NETWORKING_EDITORS(), 68 | INTERFACE_EDITORS(), 69 | META_EDITORS(), 70 | INFORMATIONAL_EDITORS() 71 | ) 72 | ); 73 | } 74 | 75 | const isERC = fileDiff.base.category === EIPCategory.erc; 76 | const isCore = fileDiff.base.category === EIPCategory.core; 77 | const isNetworking = fileDiff.base.category === EIPCategory.networking; 78 | const isInterface = fileDiff.base.category === EIPCategory.interface; 79 | const isMeta = fileDiff.base.type === EIPTypes.meta; 80 | const isInformational = fileDiff.base.type === EIPTypes.informational; 81 | 82 | if (isERC) { 83 | return this._requireEIPEditors(ERC_EDITORS(), fileDiff); 84 | } 85 | 86 | if (isCore) { 87 | return this._requireEIPEditors(CORE_EDITORS(), fileDiff); 88 | } 89 | 90 | if (isNetworking) { 91 | return this._requireEIPEditors(NETWORKING_EDITORS(), fileDiff); 92 | } 93 | 94 | if (isInterface) { 95 | return this._requireEIPEditors(INTERFACE_EDITORS(), fileDiff); 96 | } 97 | 98 | // these types need to be below category to prevent mismatching categories 99 | if (isMeta) { 100 | return this._requireEIPEditors(META_EDITORS(), fileDiff); 101 | } 102 | 103 | if (isInformational) { 104 | return this._requireEIPEditors(INFORMATIONAL_EDITORS(), fileDiff); 105 | } 106 | 107 | throw new RequirementViolation( 108 | [ 109 | `the fileDiff for '${fileDiff?.base.name}' with category '${fileDiff?.base.category}'`, 110 | `was neither seen to be a core or erc eip while fetching the editors. This should`, 111 | `never happen` 112 | ].join(" ") 113 | ); 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /src/modules/assertions/require_file_preexisting.ts: -------------------------------------------------------------------------------- 1 | import { ContentData, File, FileStatus, isDefined, PR } from "src/domain"; 2 | import { 3 | IRequireFilePreexisting, 4 | PreexistingFile 5 | } from "#/assertions/Domain/types"; 6 | import { RequirementViolation, UnexpectedError } from "src/domain/exceptions"; 7 | import { multiLineString } from "#/utils"; 8 | 9 | export class RequireFilePreexisting implements IRequireFilePreexisting { 10 | constructor( 11 | public requirePr: () => Promise, 12 | public getRepoFilenameContent: ( 13 | filename: string, 14 | sha: string 15 | ) => Promise 16 | ) {} 17 | 18 | /** 19 | * accepts a standard File object and throws an error if the status is new or 20 | * it does not exist at the base commit; uses the file's previous_filename if 21 | * it exists. 22 | */ 23 | async requireFilePreexisting(file: File): Promise { 24 | const pr = await this.requirePr(); 25 | const filename = file.previous_filename || file.filename; 26 | 27 | if (!isDefined(filename)) { 28 | throw new UnexpectedError( 29 | multiLineString(" ")( 30 | `the file did not have a previous or current`, 31 | `filename associated with it` 32 | ), 33 | { 34 | pr, 35 | file 36 | } 37 | ); 38 | } 39 | 40 | const error = await this.getRepoFilenameContent( 41 | filename, 42 | pr.base.sha 43 | ).catch((err) => err); 44 | 45 | if ( 46 | (isDefined(error) && error.status === 404) || 47 | file.status === FileStatus.added 48 | ) { 49 | throw new RequirementViolation( 50 | multiLineString(" ")( 51 | `File with name ${filename} is new and new files must be reviewed` 52 | ) 53 | ); 54 | } 55 | 56 | return file as PreexistingFile; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/modules/assertions/require_filename_eip_num.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ASSETS_EIP_NUM, 3 | EIP_NUM_RE, 4 | File, 5 | FILE_IN_EIP_FOLDER, 6 | FileDiff, 7 | isFileNotFound, 8 | ParsedContent, 9 | PR 10 | } from "src/domain"; 11 | import { 12 | GracefulTermination, 13 | RequirementViolation, 14 | UnexpectedError 15 | } from "src/domain/exceptions"; 16 | import { IRequireFilenameEIPNum } from "#/assertions/Domain/types"; 17 | import { multiLineString } from "../utils"; 18 | import _ from "lodash"; 19 | 20 | export class RequireFilenameEIPNum implements IRequireFilenameEIPNum { 21 | public getPullRequestFiles: (pullNumber: number) => Promise; 22 | public requirePr: () => Promise; 23 | public requireEIPEditors: (fileDiff?: FileDiff | undefined) => string[]; 24 | public getApprovals: () => Promise; 25 | public getParsedContent: ( 26 | filename: string, 27 | sha: string 28 | ) => Promise; 29 | 30 | constructor({ 31 | getPullRequestFiles, 32 | requirePr, 33 | requireEIPEditors, 34 | getApprovals, 35 | getParsedContent 36 | }: { 37 | getPullRequestFiles: (pullNumber: number) => Promise; 38 | requirePr: () => Promise; 39 | requireEIPEditors: (fileDiff?: FileDiff | undefined) => string[]; 40 | getApprovals: () => Promise; 41 | getParsedContent: (filename: string, sha: string) => Promise; 42 | }) { 43 | this.getPullRequestFiles = getPullRequestFiles; 44 | this.requirePr = requirePr; 45 | this.requireEIPEditors = requireEIPEditors; 46 | this.getApprovals = getApprovals; 47 | this.getParsedContent = getParsedContent; 48 | } 49 | 50 | public attemptAssetGracefulTermination = async (path: string) => { 51 | if (!ASSETS_EIP_NUM.test(path)) { 52 | return; 53 | } 54 | 55 | const assetEipNumMatch = path.match(ASSETS_EIP_NUM); 56 | if (!assetEipNumMatch || assetEipNumMatch[1] === undefined) { 57 | throw new UnexpectedError( 58 | multiLineString(" ")( 59 | `The filename '${path}' is seen to match an asset file but`, 60 | `the extracted eip number is undefined` 61 | ) 62 | ); 63 | } 64 | const assetEipNum = parseInt(assetEipNumMatch[1]); 65 | const pr = await this.requirePr(); 66 | const files = await this.getPullRequestFiles(pr.number); 67 | 68 | const filenames = files.map((file) => file.filename); 69 | 70 | for (const otherFilename of filenames) { 71 | // if other filename is same as current one then skip 72 | if (otherFilename === path) { 73 | continue; 74 | } 75 | 76 | // if the filename doesn't match to an eip number skip 77 | const eipNumMatch = otherFilename.match(EIP_NUM_RE); 78 | if (!eipNumMatch || eipNumMatch[1] === undefined) { 79 | continue; 80 | } 81 | 82 | const eipNum = parseInt(eipNumMatch[1]); 83 | if (eipNum === assetEipNum) { 84 | throw new GracefulTermination( 85 | multiLineString(" ")( 86 | `file ${path} is associated with EIP ${assetEipNum}; because`, 87 | `there are also changes being made to ${otherFilename} all changes`, 88 | `to corresponding assets are also allowed` 89 | ) 90 | ); 91 | } 92 | } 93 | throw new RequirementViolation( 94 | multiLineString(" ")( 95 | `file ${path} is associated with EIP ${assetEipNum} but there`, 96 | `are no changes being made to corresponding EIP itself. To assure`, 97 | `that the change is authorized by the relevant stake-holders, you must`, 98 | `also make changes to the EIP file itself for the asset changes to`, 99 | `be eligible for auto-merge` 100 | ) 101 | ); 102 | }; 103 | 104 | public attemptEditorApprovalGracefulTermination = async ( 105 | filename: string 106 | ) => { 107 | const editorApprovals = this.requireEIPEditors(); 108 | const approvals = await this.getApprovals(); 109 | 110 | const isEditorApproved = 111 | _.intersection(editorApprovals, approvals).length >= 2; 112 | if (isEditorApproved) { 113 | throw new GracefulTermination( 114 | multiLineString(" ")( 115 | `file ${filename} is not a valid filename, but this error has been`, 116 | `ignored due to editor approvals` 117 | ) 118 | ); 119 | } 120 | }; 121 | 122 | attemptNewFileNoEIPNumber = async (path?: string) => { 123 | if (!path) return; 124 | const PR = await this.requirePr(); 125 | 126 | let isNewFile = await this.getParsedContent(path, PR.base.sha) 127 | .then((res) => false) 128 | .catch((err) => { 129 | if (isFileNotFound(err)) { 130 | return true; 131 | } 132 | throw err; 133 | }); 134 | 135 | // if it's not a new file then the edgecase doesn't apply 136 | if (!isNewFile) { 137 | return; 138 | } 139 | 140 | const hasEIPNumber = EIP_NUM_RE.test(path); 141 | // this edgecase is only relevant if the filename is not in expected format 142 | if (hasEIPNumber) { 143 | return; 144 | } 145 | 146 | // this only applies to files in the eips folder 147 | const isInEIPSFolder = FILE_IN_EIP_FOLDER.test(path); 148 | if (!isInEIPSFolder) { 149 | return; 150 | } 151 | 152 | const editors = this.requireEIPEditors(); 153 | 154 | throw new RequirementViolation( 155 | multiLineString(" ")( 156 | `file '${path}' is not a valid eip file name;`, 157 | `all eip files need to be in eip-####.md format. It's assumed`, 158 | `however that this has been included because an eip number`, 159 | `has not been provided for this eip yet. cc ${editors.join(",")}` 160 | ) 161 | ); 162 | }; 163 | 164 | /** 165 | * Extracts the EIP number from a given filename (or returns null) 166 | * @param filename EIP filename 167 | */ 168 | requireFilenameEipNum = async (path: string) => { 169 | const eipNumMatch = path.match(EIP_NUM_RE); 170 | if (!eipNumMatch || eipNumMatch[1] === undefined) { 171 | await this.attemptAssetGracefulTermination(path); 172 | await this.attemptEditorApprovalGracefulTermination(path); 173 | await this.attemptNewFileNoEIPNumber(path); 174 | throw new RequirementViolation( 175 | `'${path}' must be in eip-###.md format; this error will be overwritten upon relevant editor approval` 176 | ); 177 | } 178 | return eipNumMatch && parseInt(eipNumMatch[1]); 179 | }; 180 | } 181 | -------------------------------------------------------------------------------- /src/modules/assertions/require_files.ts: -------------------------------------------------------------------------------- 1 | import { Files, PR } from "src/domain"; 2 | import { RequirementViolation } from "src/domain/exceptions"; 3 | import { github } from "src/infra/github"; 4 | 5 | /** 6 | * compares the diff between the base commit of the PR and 7 | * the head commit; if no files were found then it will explode 8 | * 9 | * @returns {File} 10 | */ 11 | export const requireFiles = async (pr: PR): Promise => { 12 | const files = await github.getPullRequestFiles(pr.number); 13 | 14 | if (!files?.length) { 15 | throw new RequirementViolation( 16 | [ 17 | "There were no files found to be associated", 18 | "with the PR within context" 19 | ].join(" ") 20 | ); 21 | } 22 | 23 | return files; 24 | }; 25 | -------------------------------------------------------------------------------- /src/modules/assertions/require_max_file_number.ts: -------------------------------------------------------------------------------- 1 | import { PR } from "src/domain"; 2 | import { github } from "src/infra/github"; 3 | 4 | class TooManyFilesError extends Error { 5 | constructor(message) { 6 | super(message); 7 | this.name = "TooManyFilesError"; 8 | } 9 | } 10 | 11 | export const requireMaxFileNumber = async (pr: PR) => { 12 | const max_files_allowed = 25; 13 | const files = await github.getPullRequestFiles(pr.number); 14 | if ((files?.length) > max_files_allowed) { 15 | throw new TooManyFilesError(`Critical error: Number of PR Files > ${max_files_allowed}`); 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/modules/assertions/require_pr.ts: -------------------------------------------------------------------------------- 1 | import { requirePullNumber } from "#/assertions"; 2 | import { PR } from "src/domain"; 3 | import { CriticalError } from "src/domain/exceptions"; 4 | import { github } from "src/infra"; 5 | 6 | export const requirePr = async (): Promise => { 7 | const prNum = requirePullNumber(); 8 | const pr = await github.getPullRequestFromNumber(prNum); 9 | 10 | if (pr.merged && process.env.NODE_ENV !== "development") { 11 | throw new CriticalError(`PR ${prNum} is already merged; quitting`); 12 | } 13 | 14 | return pr; 15 | }; 16 | -------------------------------------------------------------------------------- /src/modules/assertions/require_pull_number.ts: -------------------------------------------------------------------------------- 1 | import { CriticalError } from "src/domain/exceptions"; 2 | import { github } from "src/infra"; 3 | 4 | export const requirePullNumber = () => { 5 | const pullNumber = github.getPullNumber(); 6 | 7 | if (!pullNumber) { 8 | throw new CriticalError( 9 | "Build does not have a PR number associated with it; quitting..." 10 | ); 11 | } 12 | 13 | return pullNumber; 14 | }; 15 | -------------------------------------------------------------------------------- /src/modules/file/domain/types.ts: -------------------------------------------------------------------------------- 1 | import { File, FileDiff, FormattedFile, ParsedContent } from "src/domain"; 2 | 3 | export interface IFileDiff { 4 | getFileDiff: (file: NonNullable) => Promise; 5 | 6 | formatFile: (file: ParsedContent) => Promise; 7 | 8 | getParsedContent: (filename: string, sha: string) => Promise; 9 | 10 | getAuthors: (rawAuthorList?: string) => Promise>; 11 | } 12 | -------------------------------------------------------------------------------- /src/modules/file/index.ts: -------------------------------------------------------------------------------- 1 | import { FileDiffInfra } from "#/file/modules/file_diff_infra"; 2 | import { requireFilenameEipNum, requirePr } from "#/assertions"; 3 | import { PropsValue } from "src/domain"; 4 | import { getParsedContent } from "#/file/modules/get_parsed_content"; 5 | 6 | const _FileDiffInfra_ = new FileDiffInfra( 7 | requireFilenameEipNum, 8 | requirePr, 9 | getParsedContent 10 | ); 11 | 12 | export const getFileDiff = ( 13 | ...args: PropsValue 14 | ) => { 15 | return _FileDiffInfra_.getFileDiff(...args); 16 | }; 17 | -------------------------------------------------------------------------------- /src/modules/file/modules/file_diff_infra.ts: -------------------------------------------------------------------------------- 1 | import { 2 | assertCategory, 3 | AUTHOR_RE, 4 | File, 5 | FileDiff, 6 | FormattedFile, 7 | FrontMatterAttributes, 8 | isDefined, 9 | isNockNoMatchingRequest, 10 | matchAll, 11 | ParsedContent, 12 | PR 13 | } from "src/domain"; 14 | import { IFileDiff } from "#/file/domain/types"; 15 | import { github } from "src/infra"; 16 | 17 | export class FileDiffInfra implements IFileDiff { 18 | constructor( 19 | public requireFilenameEipNum: (path: string) => Promise, 20 | public requirePr: () => Promise, 21 | public getParsedContent: ( 22 | filename: string, 23 | sha: string 24 | ) => Promise 25 | ) {} 26 | 27 | /** 28 | * Accepts a file and returns the information of that file at the beginning 29 | * and current state of the PR; can be used to verify changes 30 | * 31 | * @param file given file name + diff to be done 32 | * @returns the formatted file content at the head and base of the PR 33 | */ 34 | getFileDiff = async (file: NonNullable): Promise => { 35 | const pr = await this.requirePr(); 36 | const filename = file.filename; 37 | 38 | // Get and parse head and base file 39 | const head = await this.getParsedContent(filename, pr.head.sha); 40 | // if the base file is new this will error, so use head instead 41 | const base = await this.getParsedContent(filename, pr.base.sha).catch( 42 | (err) => { 43 | const shouldAddToRecords = isNockNoMatchingRequest(err); 44 | if (shouldAddToRecords) { 45 | throw err; 46 | } 47 | return head; 48 | } 49 | ); 50 | 51 | // Organize information cleanly 52 | return { 53 | head: await this.formatFile(head), 54 | base: await this.formatFile(base) 55 | }; 56 | }; 57 | 58 | formatFile = async (file: ParsedContent): Promise => { 59 | const filenameEipNum = await this.requireFilenameEipNum(file.path); 60 | 61 | return { 62 | eipNum: file.content.attributes[FrontMatterAttributes.eip], 63 | status: 64 | file.content.attributes[FrontMatterAttributes.status]?.toLowerCase(), 65 | authors: await this.getAuthors( 66 | file.content.attributes[FrontMatterAttributes.author] 67 | ), 68 | name: file.name, 69 | filenameEipNum, 70 | category: assertCategory({ 71 | maybeCategory: file.content.attributes[FrontMatterAttributes.category], 72 | fileName: file.name, 73 | maybeType: file.content.attributes[FrontMatterAttributes.type] 74 | }).category, 75 | type: assertCategory({ 76 | maybeCategory: file.content.attributes[FrontMatterAttributes.category], 77 | fileName: file.name, 78 | maybeType: file.content.attributes[FrontMatterAttributes.type] 79 | }).type 80 | }; 81 | }; 82 | 83 | getAuthors = async (rawAuthorList?: string) => { 84 | if (!rawAuthorList) return; 85 | 86 | const resolveAuthor = async (author: string) => { 87 | if (author[0] === "@") { 88 | return author.toLowerCase(); 89 | } else { 90 | // Email address 91 | const queriedUser = await github.resolveUserByEmail(author); 92 | if (!queriedUser) return; 93 | return queriedUser.toLowerCase(); 94 | } 95 | }; 96 | 97 | const authors = matchAll(rawAuthorList, AUTHOR_RE, 1); 98 | const resolved = await Promise.all(authors.map(resolveAuthor)).then((res) => 99 | res.filter(isDefined) 100 | ); 101 | return new Set(resolved); 102 | }; 103 | } 104 | -------------------------------------------------------------------------------- /src/modules/file/modules/get_parsed_content.ts: -------------------------------------------------------------------------------- 1 | import { ContentFile, ParsedContent, requireEncoding } from "src/domain"; 2 | import { UnexpectedError } from "src/domain/exceptions"; 3 | import frontmatter from "front-matter"; 4 | import { github } from "src/infra/github"; 5 | 6 | export const getParsedContent = async ( 7 | filename: string, 8 | sha: string 9 | ): Promise => { 10 | const decodeData = (data: ContentFile) => { 11 | const encoding = data.encoding; 12 | requireEncoding(encoding, filename); 13 | return Buffer.from(data.content, encoding).toString(); 14 | }; 15 | 16 | // Collect the file contents at the given sha reference frame 17 | const data = await github 18 | .getRepoFilenameContent(filename, sha) 19 | .then((res) => res as ContentFile); 20 | 21 | // Assert type assumptions 22 | if (!data?.path) { 23 | throw new UnexpectedError( 24 | `requested file ${filename} at ref sha ${sha} has no path` 25 | ); 26 | } 27 | if (!data?.name) { 28 | throw new UnexpectedError( 29 | `requested file ${filename} at ref sha ${sha} has no name` 30 | ); 31 | } 32 | if (!data?.content) { 33 | console.warn(`requested file ${filename} at ref sha ${sha} contains no content`); 34 | return { 35 | path: data.path, 36 | name: data.name, 37 | content: frontmatter('') 38 | }; 39 | } 40 | 41 | // Return parsed information 42 | return { 43 | path: data.path, 44 | name: data.name, 45 | content: frontmatter(decodeData(data)) 46 | }; 47 | }; 48 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_comment_message.ts: -------------------------------------------------------------------------------- 1 | import { COMMENT_HEADER, Results } from "src/domain"; 2 | 3 | export const getCommentMessage = (results: Results, header?: string) => { 4 | if (!results.length) return "There were no results cc @alita-moore"; 5 | const comment: string[] = []; 6 | 7 | comment.push(header || COMMENT_HEADER); 8 | comment.push("---"); 9 | for (const { filename, errors, successMessage, type } of results) { 10 | const classification = () => { 11 | comment.push(`| classification |`); 12 | comment.push(`| ------------- |`); 13 | comment.push(`| \`${type}\` |`); 14 | }; 15 | 16 | if (!errors) { 17 | comment.push(`## (pass) ${filename}`); 18 | classification(); 19 | const message = `- ` + (successMessage || "passed!"); 20 | comment.push(message); 21 | continue; 22 | } 23 | 24 | comment.push(`## (fail) ${filename}`); 25 | classification(); 26 | for (const error of errors) { 27 | comment.push(`- ${error}`); 28 | } 29 | } 30 | return comment.join("\n"); 31 | }; 32 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_type/__tests__/getType.test.ts: -------------------------------------------------------------------------------- 1 | import { __Filters__, getType } from "#/main/modules/get_type"; 2 | import _ from "lodash"; 3 | import { testResultsFactory } from "src/tests/factories/testResultsFactory"; 4 | import { convertTrueToStringOnLeafs } from "src/tests/testutils"; 5 | import { ChangeTypes } from "src/domain"; 6 | import { getAllTruthyObjectPaths } from "#/utils"; 7 | 8 | const Filters = _.mapValues(__Filters__, (val) => { 9 | return convertTrueToStringOnLeafs(val); 10 | }); 11 | 12 | describe("getType", () => { 13 | it("should return status change if matching errors", () => { 14 | const results = testResultsFactory({ 15 | errors: Filters.statusChange 16 | }); 17 | 18 | const res = getType(results); 19 | expect(res).toBe(ChangeTypes.statusChange); 20 | }); 21 | 22 | it("should return new eip file if matching errors", () => { 23 | const results = testResultsFactory({ 24 | errors: Filters.newEIPFile 25 | }); 26 | 27 | const res = getType(results); 28 | expect(res).toBe(ChangeTypes.newEIPFile); 29 | }); 30 | 31 | it("should return update eip if matching errors", () => { 32 | const results = testResultsFactory({ 33 | errors: Filters.updateEIP 34 | }); 35 | 36 | const res = getType(results); 37 | expect(res).toBe(ChangeTypes.updateEIP); 38 | }); 39 | 40 | it("should return ambiguous if none match", () => { 41 | const results = testResultsFactory({ 42 | errors: _.merge(Filters.newEIPFile, Filters.statusChange) 43 | }); 44 | 45 | const res = getType(results); 46 | expect(res).toBe(ChangeTypes.ambiguous); 47 | }); 48 | }); 49 | 50 | describe("type definitions", () => { 51 | // A and B are valid if at least one must have matches with a must not 52 | const combinations = Object.keys(__Filters__).flatMap((first, i, arr) => { 53 | return arr.slice(i + 1).flatMap((second) => { 54 | return [ 55 | [first, second], 56 | [second, first] 57 | ] as [ChangeTypes, ChangeTypes][]; 58 | }); 59 | }); 60 | 61 | for (const combo of combinations) { 62 | test(`combo ${combo.join(" => ")} should be valid`, () => { 63 | const A = __Filters__[combo[0]]; 64 | const B = __Filters__[combo[1]]; 65 | const paths = getAllTruthyObjectPaths(A); 66 | for (const path of paths) { 67 | // if one is true and other false, they cannot coincide 68 | expect(_.get(B, path)).toBe(false); 69 | } 70 | }); 71 | } 72 | }); 73 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_type/index.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ChangeTypes, 3 | ERRORS_TYPE_FILTER, 4 | isChangeType, 5 | isDefined, 6 | TestResults 7 | } from "src/domain"; 8 | import { newEIPFile } from "#/main/modules/get_type/new_eip_file"; 9 | import { statusChange } from "#/main/modules/get_type/status_change"; 10 | import { updateEIP } from "#/main/modules/get_type/update_eip"; 11 | import _ from "lodash"; 12 | import { 13 | getAllFalseObjectPaths, 14 | getAllTruthyObjectPaths, 15 | multiLineString 16 | } from "#/utils"; 17 | import { getLogs } from "./logs"; 18 | import { UnexpectedError } from "src/domain/exceptions"; 19 | 20 | const Logs = getLogs(); 21 | 22 | const Filters = { 23 | [ChangeTypes.newEIPFile]: newEIPFile, 24 | [ChangeTypes.statusChange]: statusChange, 25 | [ChangeTypes.updateEIP]: updateEIP 26 | }; 27 | 28 | // for tests 29 | export const __Filters__ = Filters; 30 | 31 | export const getType = (result: TestResults): ChangeTypes => { 32 | const results = _.reduce( 33 | Filters, 34 | function (arr, val, key) { 35 | Logs.typeCheckingHeader(isChangeType(key) ? key : ChangeTypes.ambiguous); 36 | const res = testFilter(val, result); 37 | if (res) { 38 | return [...arr, key]; 39 | } 40 | return arr; 41 | }, 42 | [] as string[] 43 | ); 44 | 45 | if (results.length === 1) { 46 | const type = results[0]!; 47 | if (isChangeType(type)) { 48 | Logs.isType(type); 49 | return type; 50 | } 51 | } 52 | Logs.noMatchingTypes(); 53 | 54 | if (results.length > 2) { 55 | throw new UnexpectedError( 56 | multiLineString(" ")( 57 | "this change meets the criteria for more than one type, which", 58 | `should never happen || [${results.join(", ")}]` 59 | ) 60 | ); 61 | } 62 | 63 | // this captures all edgecases 64 | return ChangeTypes.ambiguous; 65 | }; 66 | 67 | const testFilter = ( 68 | filter: ERRORS_TYPE_FILTER, 69 | result: TestResults 70 | ): boolean => { 71 | const paths = { 72 | mustNotHave: getAllFalseObjectPaths(filter), 73 | mustHave: getAllTruthyObjectPaths(filter) 74 | }; 75 | 76 | let violations = { 77 | mustNotHave: [] as string[], 78 | mustHave: [] as string[] 79 | }; 80 | 81 | Logs.mustHaveHeader(); 82 | for (const path of paths.mustHave) { 83 | const value = _.get(result.errors, path); 84 | 85 | if (!isDefined(value)) { 86 | violations.mustHave.push(path); 87 | Logs.pathViolation(path); 88 | } 89 | } 90 | 91 | Logs.mustNotHaveHeader(); 92 | for (const path of paths.mustNotHave) { 93 | const value = _.get(result.errors, path); 94 | 95 | if (isDefined(value)) { 96 | violations.mustNotHave.push(path); 97 | Logs.pathViolation(path); 98 | } 99 | } 100 | 101 | return _.every(_.map(violations, (err) => _.isEmpty(err))); 102 | }; 103 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_type/logs.ts: -------------------------------------------------------------------------------- 1 | import { ChangeTypes } from "src/domain"; 2 | 3 | export const getLogs = () => { 4 | return { 5 | typeCheckingHeader: (type: ChangeTypes) => { 6 | console.log(`#### Testing For Type ${type} ####`); 7 | }, 8 | noMatchingTypes: () => { 9 | console.log("There were no matching types"); 10 | }, 11 | mustHaveHeader: () => { 12 | console.log(`-- Testing Must Have --`); 13 | }, 14 | pathViolation: (path: string) => { 15 | console.log(`\t violation: ${path}`); 16 | }, 17 | mustNotHaveHeader: () => { 18 | console.log(`-- Testing Must Not Have --`); 19 | }, 20 | isType: (type: ChangeTypes) => { 21 | console.log(`!! is type ${type}`); 22 | } 23 | }; 24 | }; 25 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_type/new_eip_file.ts: -------------------------------------------------------------------------------- 1 | import { ERRORS_TYPE_FILTER } from "src/domain"; 2 | 3 | export const newEIPFile: ERRORS_TYPE_FILTER = { 4 | fileErrors: { 5 | validFilenameError: null, 6 | filePreexistingError: true 7 | }, 8 | headerErrors: { 9 | matchingEIPNumError: null, 10 | constantEIPNumError: null, 11 | constantStatusError: false, 12 | validStatusError: null 13 | }, 14 | authorErrors: { 15 | hasAuthorsError: null 16 | }, 17 | approvalErrors: { 18 | isAuthorApprovedError: null, 19 | isEditorApprovedError: null, 20 | enoughEditorApprovalsForEIP1Error: null 21 | } 22 | }; 23 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_type/status_change.ts: -------------------------------------------------------------------------------- 1 | import { ERRORS_TYPE_FILTER } from "src/domain"; 2 | 3 | export const statusChange: ERRORS_TYPE_FILTER = { 4 | fileErrors: { 5 | validFilenameError: false, 6 | filePreexistingError: false 7 | }, 8 | headerErrors: { 9 | matchingEIPNumError: false, 10 | constantEIPNumError: false, 11 | constantStatusError: true, 12 | validStatusError: null 13 | }, 14 | authorErrors: { 15 | hasAuthorsError: null 16 | }, 17 | approvalErrors: { 18 | isAuthorApprovedError: null, 19 | isEditorApprovedError: null, 20 | enoughEditorApprovalsForEIP1Error: null 21 | } 22 | }; 23 | -------------------------------------------------------------------------------- /src/modules/main/modules/get_type/update_eip.ts: -------------------------------------------------------------------------------- 1 | import { ERRORS_TYPE_FILTER } from "src/domain"; 2 | 3 | export const updateEIP: ERRORS_TYPE_FILTER = { 4 | fileErrors: { 5 | validFilenameError: null, 6 | filePreexistingError: false 7 | }, 8 | headerErrors: { 9 | matchingEIPNumError: null, 10 | constantEIPNumError: null, 11 | constantStatusError: false, 12 | validStatusError: null 13 | }, 14 | authorErrors: { 15 | hasAuthorsError: null 16 | }, 17 | approvalErrors: { 18 | isAuthorApprovedError: null, 19 | isEditorApprovedError: null, 20 | enoughEditorApprovalsForEIP1Error: null 21 | } 22 | }; 23 | -------------------------------------------------------------------------------- /src/modules/main/modules/purify_test_results.ts: -------------------------------------------------------------------------------- 1 | import { Result, TestResults } from "src/domain"; 2 | import { 3 | editorApprovalPurifier, 4 | EIP1Purifier, 5 | statusChangeAllowedPurifier, 6 | withdrawnExceptionPurifier 7 | } from "#/purifiers"; 8 | import { getAllTruthyObjectPaths, innerJoinAncestors } from "#/utils"; 9 | import { get } from "lodash"; 10 | import { getType } from "./get_type"; 11 | 12 | export const purifyTestResults = async ( 13 | dirtyTestResults: TestResults 14 | ): Promise => { 15 | // Apply independent purifiers 16 | const primedPurifiers = [ 17 | statusChangeAllowedPurifier(dirtyTestResults), 18 | editorApprovalPurifier(dirtyTestResults), 19 | EIP1Purifier(dirtyTestResults), 20 | withdrawnExceptionPurifier(dirtyTestResults) 21 | ]; 22 | 23 | // Purify the dirty results 24 | const testResults = innerJoinAncestors(dirtyTestResults, primedPurifiers); 25 | const errors: string[] = getAllTruthyObjectPaths(testResults.errors).map( 26 | (path) => get(testResults.errors, path) 27 | ); 28 | 29 | const type = getType(testResults); 30 | if (errors.length === 0) { 31 | console.log(`${testResults.fileDiff.base.name} passed!`); 32 | return { 33 | filename: testResults.fileDiff.base.name, 34 | type 35 | }; 36 | } 37 | 38 | return { 39 | filename: testResults.fileDiff.base.name, 40 | errors, 41 | type 42 | }; 43 | }; 44 | -------------------------------------------------------------------------------- /src/modules/main/modules/test_file.ts: -------------------------------------------------------------------------------- 1 | import { DEFAULT_ERRORS, File, TestResults } from "src/domain"; 2 | import { getFileDiff } from "#/file"; 3 | import { 4 | assertConstantEipNumber, 5 | assertConstantStatus, 6 | assertEIP1EditorApprovals, 7 | assertEIPEditorApproval, 8 | assertFilenameAndFileNumbersMatch, 9 | assertHasAuthors, 10 | assertIsApprovedByAuthors, 11 | assertValidFilename, 12 | assertValidStatus, 13 | requireAuthors, 14 | requireFilePreexisting 15 | } from "#/assertions"; 16 | import { processError } from "src/domain/exceptions"; 17 | 18 | export const testFile = async (file: File): Promise => { 19 | // we need to define this here because the below logic can get very complicated otherwise 20 | const errors = DEFAULT_ERRORS; 21 | 22 | // file testing is not compatible (yet) with an initialy undefined file 23 | // so instead it's required here. It throws an exception for consistency 24 | const fileDiff = await getFileDiff(file); 25 | try { 26 | file = await requireFilePreexisting(file); 27 | } catch (err: any) { 28 | processError(err, { 29 | requirementViolation: (message) => { 30 | errors.fileErrors.filePreexistingError = message; 31 | } 32 | // all other types will throw the exception 33 | }); 34 | 35 | errors.approvalErrors.isEditorApprovedError = await assertEIPEditorApproval( 36 | fileDiff 37 | ); 38 | // new files are acceptable if an editor has approved 39 | if (errors.approvalErrors.isEditorApprovedError) { 40 | return { 41 | errors, 42 | fileDiff 43 | }; 44 | } 45 | } 46 | 47 | errors.approvalErrors.isEditorApprovedError = await assertEIPEditorApproval( 48 | fileDiff 49 | ); 50 | errors.approvalErrors.enoughEditorApprovalsForEIP1Error = 51 | await assertEIP1EditorApprovals(fileDiff); 52 | errors.fileErrors.validFilenameError = await assertValidFilename(file); 53 | errors.headerErrors.matchingEIPNumError = 54 | assertFilenameAndFileNumbersMatch(fileDiff); 55 | errors.headerErrors.constantEIPNumError = assertConstantEipNumber(fileDiff); 56 | errors.headerErrors.constantStatusError = assertConstantStatus(fileDiff); 57 | errors.headerErrors.validStatusError = assertValidStatus(fileDiff); 58 | errors.authorErrors.hasAuthorsError = assertHasAuthors(fileDiff); 59 | 60 | // if no authors then remaining items aren't relevant to check 61 | if (errors.authorErrors.hasAuthorsError) { 62 | return { 63 | errors, 64 | fileDiff 65 | }; 66 | } 67 | 68 | errors.approvalErrors.isAuthorApprovedError = await assertIsApprovedByAuthors( 69 | fileDiff 70 | ); 71 | return { 72 | errors, 73 | fileDiff, 74 | authors: requireAuthors(fileDiff) 75 | }; 76 | }; 77 | -------------------------------------------------------------------------------- /src/modules/pull_request/domain/types.ts: -------------------------------------------------------------------------------- 1 | import { ChangeTypes } from "src/domain"; 2 | 3 | export interface IGithubPullRequest { 4 | postComment: (string) => Promise; 5 | updateLabels: (labels: ChangeTypes[]) => Promise; 6 | } 7 | -------------------------------------------------------------------------------- /src/modules/pull_request/infra/github_api/__tests__/updateLabels.test.ts: -------------------------------------------------------------------------------- 1 | import { initGeneralTestEnv } from "src/tests/testutils"; 2 | import { GithubInfra } from "src/infra"; 3 | import { PullRequestGithubApiLogs } from "../log"; 4 | import { GithubPullRequest } from "../github_pull_request"; 5 | import { ChangeTypes, MockedFunctionObject } from "src/domain"; 6 | 7 | describe("update labels", () => { 8 | initGeneralTestEnv(); 9 | const githubRepoMock: MockedFunctionObject = { 10 | getContextLabels: jest.fn(), 11 | addLabels: jest.fn(), 12 | removeLabels: jest.fn() 13 | }; 14 | const logsMock: MockedFunctionObject = { 15 | labelsMatch: jest.fn(), 16 | labelsToBeChanged: jest.fn() 17 | }; 18 | const PullRequest = new GithubPullRequest( 19 | githubRepoMock as any, 20 | logsMock as any 21 | ); 22 | 23 | it("should find matching", async () => { 24 | githubRepoMock.getContextLabels?.mockResolvedValueOnce([ 25 | ChangeTypes.newEIPFile, 26 | ChangeTypes.statusChange 27 | ]); 28 | 29 | await PullRequest.updateLabels([ 30 | ChangeTypes.statusChange, 31 | ChangeTypes.newEIPFile 32 | ]); 33 | 34 | expect(logsMock.labelsMatch).toBeCalled(); 35 | expect(githubRepoMock.addLabels).not.toBeCalled(); 36 | expect(githubRepoMock.removeLabels).not.toBeCalled(); 37 | }); 38 | 39 | it("should find difference", async () => { 40 | githubRepoMock.getContextLabels?.mockResolvedValueOnce([ 41 | ChangeTypes.newEIPFile, 42 | ChangeTypes.ambiguous 43 | ]); 44 | 45 | await PullRequest.updateLabels([ 46 | ChangeTypes.statusChange, 47 | ChangeTypes.newEIPFile 48 | ]); 49 | 50 | expect(logsMock.labelsMatch).not.toBeCalled(); 51 | expect(githubRepoMock.addLabels).toBeCalled(); 52 | expect(githubRepoMock.removeLabels).toBeCalled(); 53 | expect(logsMock.labelsToBeChanged).toBeCalled(); 54 | 55 | const call = logsMock.labelsToBeChanged?.mock.calls[0]!; 56 | expect(call[2]).toEqual([ChangeTypes.statusChange]); 57 | expect(call[3]).toEqual([ChangeTypes.ambiguous]); 58 | }); 59 | 60 | const genAddRemoveTests = (otherLabels: string[]) => { 61 | it("adds and removes when appropriate", async () => { 62 | githubRepoMock.getContextLabels?.mockResolvedValueOnce([ 63 | ChangeTypes.newEIPFile, 64 | ChangeTypes.ambiguous, 65 | ...(otherLabels as any) 66 | ]); 67 | 68 | // i.e. adds statusChange and removes ambiguous 69 | await PullRequest.updateLabels([ 70 | ChangeTypes.statusChange, 71 | ChangeTypes.newEIPFile 72 | ]); 73 | 74 | expect(githubRepoMock.addLabels).toBeCalled(); 75 | expect(githubRepoMock.removeLabels).toBeCalled(); 76 | 77 | expect(githubRepoMock.addLabels?.mock.calls[0]![0].length).toEqual(1); 78 | expect(githubRepoMock.removeLabels?.mock.calls[0]![0].length).toEqual(1); 79 | }); 80 | 81 | it("adds and does not remove when appropriate", async () => { 82 | githubRepoMock.getContextLabels?.mockResolvedValueOnce([ 83 | ChangeTypes.newEIPFile, 84 | ChangeTypes.ambiguous, 85 | ...(otherLabels as any) 86 | ]); 87 | 88 | // i.e. adds statusChange 89 | await PullRequest.updateLabels([ 90 | ChangeTypes.statusChange, 91 | ChangeTypes.ambiguous, 92 | ChangeTypes.newEIPFile 93 | ]); 94 | 95 | expect(githubRepoMock.addLabels).toBeCalled(); 96 | expect(githubRepoMock.removeLabels).not.toBeCalled(); 97 | 98 | expect(githubRepoMock.addLabels?.mock.calls[0]![0].length).toEqual(1); 99 | }); 100 | 101 | it("removes and does not add when appropriate", async () => { 102 | githubRepoMock.getContextLabels?.mockResolvedValueOnce([ 103 | ChangeTypes.newEIPFile, 104 | ChangeTypes.ambiguous, 105 | ...(otherLabels as any) 106 | ]); 107 | 108 | // i.e. removes ambiguous 109 | await PullRequest.updateLabels([ChangeTypes.newEIPFile]); 110 | 111 | expect(githubRepoMock.addLabels).not.toBeCalled(); 112 | expect(githubRepoMock.removeLabels).toBeCalled(); 113 | 114 | expect(githubRepoMock.removeLabels?.mock.calls[0]![0].length).toEqual(1); 115 | }); 116 | 117 | it("does not add or remove when appropriate", async () => { 118 | githubRepoMock.getContextLabels?.mockResolvedValueOnce([ 119 | ChangeTypes.newEIPFile, 120 | ChangeTypes.ambiguous, 121 | ...(otherLabels as any) 122 | ]); 123 | 124 | // i.e. changes nothing 125 | await PullRequest.updateLabels([ 126 | ChangeTypes.ambiguous, 127 | ChangeTypes.newEIPFile 128 | ]); 129 | 130 | expect(githubRepoMock.addLabels).not.toBeCalled(); 131 | expect(githubRepoMock.removeLabels).not.toBeCalled(); 132 | }); 133 | }; 134 | 135 | describe("add and remove testing no added fields", () => { 136 | genAddRemoveTests([]); 137 | }); 138 | 139 | describe("add and remove testing with added non-standard labels", () => { 140 | genAddRemoveTests(["field1,", "field4", "field5"]); 141 | }); 142 | }); 143 | -------------------------------------------------------------------------------- /src/modules/pull_request/infra/github_api/github_pull_request.ts: -------------------------------------------------------------------------------- 1 | import { IGithubPullRequest } from "#/pull_request/domain/types"; 2 | import { GithubInfra } from "src/infra"; 3 | import { ChangeTypes, isDefined } from "src/domain"; 4 | import _ from "lodash"; 5 | import { PullRequestGithubApiLogs } from "#/pull_request/infra/github_api/log"; 6 | 7 | export class GithubPullRequest implements IGithubPullRequest { 8 | constructor( 9 | public github: GithubInfra, 10 | public logs: PullRequestGithubApiLogs 11 | ) {} 12 | 13 | async postComment(message: string) { 14 | const me = await this.github.getSelf(); 15 | const comments = await this.github.getContextIssueComments(); 16 | 17 | // If comment already exists, update it 18 | for (const comment of comments) { 19 | if (comment.user?.login == me.login) { 20 | if (comment.body != message) { 21 | await this.github.updateComment(comment.id, message); 22 | } 23 | return; 24 | } 25 | } 26 | 27 | await this.github.createCommentOnContext(message); 28 | } 29 | 30 | async updateLabels(labels: ChangeTypes[]) { 31 | const currentRaw = await this.github.getContextLabels(); 32 | // filters out unrelated tags so it doesn't change those 33 | const current = _.intersection(currentRaw, Object.values(ChangeTypes)); 34 | const diff = _.xor(labels, currentRaw); 35 | 36 | if (_.isEmpty(diff)) { 37 | return this.logs.labelsMatch(current, labels); 38 | } 39 | 40 | const toRemove = _.intersection(current, diff); 41 | const toAdd = _.intersection(labels, diff); 42 | 43 | this.logs.labelsToBeChanged(current, labels, toAdd, toRemove); 44 | 45 | if (isDefined(toRemove)) { 46 | await this.github.removeLabels(toRemove); 47 | } 48 | 49 | if (isDefined(toAdd)) { 50 | await this.github.addLabels(toAdd); 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/modules/pull_request/infra/github_api/log.ts: -------------------------------------------------------------------------------- 1 | import { multiLineString } from "#/utils"; 2 | 3 | export const Logs = { 4 | labelsMatch: (current, expected) => { 5 | console.log( 6 | multiLineString("\n")( 7 | `The current labels match their expected values`, 8 | `\t current: [${current.join(", ")}]`, 9 | `\t expected: [${expected.join(", ")}]` 10 | ) 11 | ); 12 | }, 13 | labelsToBeChanged: ( 14 | current: string[], 15 | expected: string[], 16 | toAdd: string[], 17 | toRemove: string[] 18 | ) => { 19 | console.log( 20 | multiLineString("\n")( 21 | `The current labels do not match their expected values, changing...`, 22 | `\t current: [${current.join(", ")}]`, 23 | `\t expected: [${expected.join(", ")}]`, 24 | `\t to be added: [${toAdd.join(", ")}]`, 25 | `\t to be removed: [${toRemove.join(", ")}]` 26 | ) 27 | ); 28 | } 29 | }; 30 | 31 | export type PullRequestGithubApiLogs = typeof Logs; 32 | -------------------------------------------------------------------------------- /src/modules/pull_request/use_cases/index.ts: -------------------------------------------------------------------------------- 1 | import { postComment } from "./post_comment"; 2 | import { updateLabels } from "./update_labels"; 3 | 4 | export const PullRequestUseCases = { 5 | postComment, 6 | updateLabels 7 | }; 8 | -------------------------------------------------------------------------------- /src/modules/pull_request/use_cases/post_comment.ts: -------------------------------------------------------------------------------- 1 | import { GithubPullRequest } from "#/pull_request/infra/github_api/github_pull_request"; 2 | import { castTo } from "src/domain"; 3 | import { github } from "src/infra"; 4 | import { Logs } from "../infra/github_api/log"; 5 | 6 | const PullRequest = new GithubPullRequest(github, Logs); 7 | 8 | export const postComment = castTo<(string) => Promise>( 9 | (message: string) => { 10 | return PullRequest.postComment(message); 11 | } 12 | ); 13 | -------------------------------------------------------------------------------- /src/modules/pull_request/use_cases/update_labels.ts: -------------------------------------------------------------------------------- 1 | import { GithubPullRequest } from "#/pull_request/infra/github_api/github_pull_request"; 2 | import { castTo, ChangeTypes } from "src/domain"; 3 | import { github } from "src/infra"; 4 | import { Logs } from "../infra/github_api/log"; 5 | 6 | const PullRequest = new GithubPullRequest(github, Logs); 7 | 8 | export const updateLabels = castTo< 9 | (expectedLabels: ChangeTypes[]) => Promise 10 | >((args) => { 11 | return PullRequest.updateLabels(args); 12 | }); 13 | -------------------------------------------------------------------------------- /src/modules/purifiers/__tests__/editor_approval.test.ts: -------------------------------------------------------------------------------- 1 | import { testResultsFactory } from "src/tests/factories/testResultsFactory"; 2 | import { editorApprovalPurifier } from "#/purifiers"; 3 | import { EipStatus } from "src/domain"; 4 | 5 | describe("editor approval purifier", () => { 6 | it("editor approval should validStatusError & filePreexistingError", () => { 7 | const testResults = testResultsFactory({ 8 | errors: { 9 | headerErrors: { 10 | validStatusError: "error" 11 | }, 12 | fileErrors: { 13 | filePreexistingError: "error" 14 | }, 15 | approvalErrors: { 16 | isEditorApprovedError: undefined // this infers that it's approved 17 | } 18 | } 19 | }); 20 | const res = editorApprovalPurifier(testResults); 21 | expect(res.errors.headerErrors.validStatusError).toBeUndefined(); 22 | expect(res.errors.fileErrors.filePreexistingError).toBeUndefined(); 23 | }); 24 | it("should mention editors if it's a new file", () => { 25 | const testResults = testResultsFactory({ 26 | errors: { 27 | fileErrors: { 28 | filePreexistingError: "error" 29 | }, 30 | approvalErrors: { 31 | isEditorApprovedError: "no editor approval" 32 | } 33 | } 34 | }); 35 | const purified = editorApprovalPurifier(testResults); 36 | expect(purified.errors.approvalErrors.isEditorApprovedError).toBeDefined(); 37 | }); 38 | it("should mention editors if invalid status", () => { 39 | const testResults = testResultsFactory({ 40 | errors: { 41 | headerErrors: { 42 | validStatusError: "invalid status" 43 | }, 44 | approvalErrors: { 45 | isEditorApprovedError: "no editor approval" 46 | } 47 | } 48 | }); 49 | const purified = editorApprovalPurifier(testResults); 50 | expect(purified.errors.approvalErrors.isEditorApprovedError).toBeDefined(); 51 | }); 52 | it("should not show editors if not author approved on final", () => { 53 | const testResults = testResultsFactory({ 54 | errors: { 55 | approvalErrors: { 56 | isAuthorApprovedError: "no author approval", 57 | isEditorApprovedError: "no editor approval" 58 | } 59 | }, 60 | fileDiff: { 61 | head: { 62 | status: EipStatus.final 63 | }, 64 | base: { 65 | status: EipStatus.final 66 | } 67 | } 68 | }); 69 | const purified = editorApprovalPurifier(testResults); 70 | expect( 71 | purified.errors.approvalErrors.isEditorApprovedError 72 | ).toBeUndefined(); 73 | }); 74 | it("should show editors if author approved on final", () => { 75 | const testResults = testResultsFactory({ 76 | errors: { 77 | approvalErrors: { 78 | // in this case we assume author approval so no error 79 | isEditorApprovedError: "no editor approval" 80 | } 81 | }, 82 | fileDiff: { 83 | head: { 84 | status: EipStatus.final 85 | }, 86 | base: { 87 | status: EipStatus.final 88 | } 89 | } 90 | }); 91 | const purified = editorApprovalPurifier(testResults); 92 | expect(purified.errors.approvalErrors.isEditorApprovedError).toBeDefined(); 93 | }); 94 | it("should not mention editors if there are no other errors", () => { 95 | const testResults = testResultsFactory({ 96 | errors: { 97 | approvalErrors: { 98 | isEditorApprovedError: "no editor approval" 99 | } 100 | } 101 | }); 102 | const purified = editorApprovalPurifier(testResults); 103 | expect( 104 | purified.errors.approvalErrors.isEditorApprovedError 105 | ).toBeUndefined(); 106 | }); 107 | }); 108 | -------------------------------------------------------------------------------- /src/modules/purifiers/__tests__/eip1.test.ts: -------------------------------------------------------------------------------- 1 | import { testResultsFactory } from "src/tests/factories/testResultsFactory"; 2 | import { EIP1Purifier } from "#/purifiers"; 3 | 4 | describe("eip1 purifier", () => { 5 | it("should purify appropriate fields if it IS eip 1", () => { 6 | const testResults = testResultsFactory({ 7 | errors: { 8 | headerErrors: { 9 | validStatusError: 10 | "eip1 can be whatever status b/c of editor approvals" 11 | }, 12 | approvalErrors: { 13 | enoughEditorApprovalsForEIP1Error: "...", 14 | isAuthorApprovedError: "just in case..", 15 | isEditorApprovedError: "just a repeat of ^" 16 | }, 17 | authorErrors: { 18 | hasAuthorsError: "eip1 doesn't have resolvable authors" 19 | } 20 | }, 21 | fileDiff: { 22 | // should only be considering base 23 | base: { 24 | eipNum: 1 25 | }, 26 | head: { 27 | eipNum: 100 28 | } 29 | } 30 | }); 31 | const purified = EIP1Purifier(testResults); 32 | expect(purified.errors.headerErrors.validStatusError).toBeUndefined(); 33 | expect( 34 | purified.errors.approvalErrors.isAuthorApprovedError 35 | ).toBeUndefined(); 36 | expect( 37 | purified.errors.approvalErrors.isEditorApprovedError 38 | ).toBeUndefined(); 39 | expect(purified.errors.authorErrors.hasAuthorsError).toBeUndefined(); 40 | expect( 41 | purified.errors.approvalErrors.enoughEditorApprovalsForEIP1Error 42 | ).toBeDefined(); 43 | }); 44 | 45 | it("should purify itself if not eip 1", () => { 46 | const testResults = testResultsFactory({ 47 | errors: { 48 | headerErrors: { 49 | validStatusError: 50 | "eip1 can be whatever status b/c of editor approvals" 51 | }, 52 | approvalErrors: { 53 | enoughEditorApprovalsForEIP1Error: "...", 54 | isAuthorApprovedError: "just in case..", 55 | isEditorApprovedError: "just a repeat of ^" 56 | }, 57 | authorErrors: { 58 | hasAuthorsError: "eip1 doesn't have resolvable authors" 59 | } 60 | }, 61 | fileDiff: { 62 | // should only be considering base 63 | base: { 64 | eipNum: 100 65 | }, 66 | head: { 67 | eipNum: 100 68 | } 69 | } 70 | }); 71 | const purified = EIP1Purifier(testResults); 72 | expect(purified.errors.headerErrors.validStatusError).toBeDefined(); 73 | expect(purified.errors.approvalErrors.isAuthorApprovedError).toBeDefined(); 74 | expect(purified.errors.approvalErrors.isEditorApprovedError).toBeDefined(); 75 | expect(purified.errors.authorErrors.hasAuthorsError).toBeDefined(); 76 | expect( 77 | purified.errors.approvalErrors.enoughEditorApprovalsForEIP1Error 78 | ).toBeUndefined(); 79 | }); 80 | }); 81 | -------------------------------------------------------------------------------- /src/modules/purifiers/__tests__/withdrawn_exceptions.test.ts: -------------------------------------------------------------------------------- 1 | import { testResultsFactory } from "src/tests/factories/testResultsFactory"; 2 | import { withdrawnExceptionPurifier } from "../withdrawn_exceptions"; 3 | import { EipStatus } from "src/domain"; 4 | 5 | describe("withdrawn purifier", () => { 6 | it("should purify if author changes status to withdrawn", () => { 7 | const testResults = testResultsFactory({ 8 | errors: { 9 | headerErrors: { 10 | constantStatusError: "this is testing the change", 11 | validStatusError: "withdrawn is not technically allowed" 12 | }, 13 | approvalErrors: { 14 | isEditorApprovedError: 15 | "for other status changes editor approval is required" 16 | } 17 | }, 18 | fileDiff: { 19 | // the head is what counts because constantStatusError tells you if it changed 20 | head: { 21 | status: EipStatus.withdrawn 22 | } 23 | } 24 | }); 25 | const purified = withdrawnExceptionPurifier(testResults); 26 | expect(purified.errors.headerErrors.validStatusError).toBeUndefined(); 27 | expect(purified.errors.headerErrors.constantStatusError).toBeUndefined(); 28 | expect( 29 | purified.errors.approvalErrors.isEditorApprovedError 30 | ).toBeUndefined(); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /src/modules/purifiers/editor_approval.ts: -------------------------------------------------------------------------------- 1 | import { EipStatus, TestResults } from "src/domain"; 2 | import { cloneDeep } from "lodash"; 3 | import { OR } from "#/utils"; 4 | import { statusChangeAllowedPurifier } from "./status_change_allowed"; 5 | 6 | export const editorApprovalPurifier = (testResults: TestResults) => { 7 | const _testResults = cloneDeep(testResults); 8 | const { errors, fileDiff } = _testResults; 9 | 10 | const isEditorApproved = !errors.approvalErrors.isEditorApprovedError; 11 | const isNewFile = !!errors.fileErrors.filePreexistingError; 12 | // I call the purifier because we shouldn't mention editors if 13 | // the status change is allowed 14 | const statusChangedAllowed = 15 | !statusChangeAllowedPurifier(testResults).errors.headerErrors 16 | .constantStatusError; 17 | 18 | const isInvalidStatus = !!errors.headerErrors.validStatusError; 19 | const isAuthorApproved = !errors.approvalErrors.isAuthorApprovedError; 20 | const isFinal = OR( 21 | fileDiff.head.status === EipStatus.final, 22 | fileDiff.base.status === EipStatus.final 23 | ); 24 | 25 | if (isEditorApproved && isNewFile) { 26 | errors.fileErrors.filePreexistingError = undefined; 27 | } 28 | 29 | if (isEditorApproved) { 30 | errors.headerErrors.validStatusError = undefined; 31 | } 32 | 33 | const mentionEditors = OR( 34 | !isEditorApproved && isNewFile, 35 | !isEditorApproved && isInvalidStatus, 36 | !isEditorApproved && !statusChangedAllowed, 37 | // Final EIPs should first get author approval then mention editors 38 | !isEditorApproved && isAuthorApproved && isFinal 39 | ); 40 | if (!mentionEditors) { 41 | errors.approvalErrors.isEditorApprovedError = undefined; 42 | } 43 | 44 | return { ...testResults, errors }; 45 | }; 46 | -------------------------------------------------------------------------------- /src/modules/purifiers/eip1.ts: -------------------------------------------------------------------------------- 1 | import { TestResults } from "src/domain"; 2 | import { cloneDeep } from "lodash"; 3 | 4 | export const EIP1Purifier = (testResults: TestResults) => { 5 | const { errors } = cloneDeep(testResults); 6 | const eipNum = testResults.fileDiff.base.eipNum; 7 | 8 | if (eipNum === 1) { 9 | // authors not required for EIP1 10 | errors.approvalErrors.isAuthorApprovedError = undefined; 11 | // eip-1 doesn't have authors that are discernible so it can be ignored 12 | errors.authorErrors.hasAuthorsError = undefined; 13 | // this is a repeat of hasEnoughEditorApprovals 14 | errors.approvalErrors.isEditorApprovedError = undefined; 15 | // eip-1 must be reviewed by multiple editors, so we can allowed it to be 16 | // any status without saying so 17 | errors.headerErrors.validStatusError = undefined; 18 | } else { 19 | errors.approvalErrors.enoughEditorApprovalsForEIP1Error = undefined; 20 | } 21 | 22 | // clear error in all other cases 23 | return { ...testResults, errors }; 24 | }; 25 | -------------------------------------------------------------------------------- /src/modules/purifiers/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./eip1"; 2 | export * from "./editor_approval"; 3 | export * from "./status_change_allowed"; 4 | export * from "./withdrawn_exceptions"; 5 | -------------------------------------------------------------------------------- /src/modules/purifiers/status_change_allowed.ts: -------------------------------------------------------------------------------- 1 | import { EipStatus, TestResults } from "src/domain"; 2 | import { cloneDeep } from "lodash"; 3 | import { ANY } from "#/utils"; 4 | 5 | export const statusChangeAllowedPurifier = (testResults: TestResults) => { 6 | const _testResults = cloneDeep(testResults); 7 | const { errors, fileDiff } = _testResults; 8 | 9 | const isStatusChangeAllowed = ANY([ 10 | // state changes from lastcall -> review 11 | fileDiff?.base.status === EipStatus.lastCall && 12 | fileDiff?.head.status === EipStatus.review, 13 | // editors can approve state changes 14 | !errors.approvalErrors.isEditorApprovedError 15 | ]); 16 | 17 | if (isStatusChangeAllowed) { 18 | // always clear the constant status error if changes are allowed 19 | errors.headerErrors.constantStatusError = undefined; 20 | } 21 | 22 | return { 23 | ...testResults, 24 | errors 25 | }; 26 | }; 27 | -------------------------------------------------------------------------------- /src/modules/purifiers/withdrawn_exceptions.ts: -------------------------------------------------------------------------------- 1 | import { EipStatus, TestResults } from "src/domain"; 2 | import { cloneDeep } from "lodash"; 3 | 4 | export const withdrawnExceptionPurifier = (testResults: TestResults) => { 5 | const _testResults = cloneDeep(testResults); 6 | const { errors, fileDiff } = _testResults; 7 | 8 | const isChangingStatus = !!errors.headerErrors.constantStatusError; 9 | const isAuthorApproved = !errors.approvalErrors.isAuthorApprovedError; 10 | const isWithdrawnAtHead = fileDiff?.head.status === EipStatus.withdrawn; 11 | 12 | // the author is allowed to change the status to withdrawn without editor approval 13 | if (isChangingStatus && isAuthorApproved && isWithdrawnAtHead) { 14 | errors.approvalErrors.isEditorApprovedError = undefined; 15 | errors.headerErrors.constantStatusError = undefined; 16 | errors.headerErrors.validStatusError = undefined; 17 | } 18 | 19 | return { 20 | ...testResults, 21 | errors 22 | }; 23 | }; 24 | -------------------------------------------------------------------------------- /src/modules/utils/debug.ts: -------------------------------------------------------------------------------- 1 | import { NodeEnvs } from "../../domain/Types"; 2 | import { CriticalError } from "src/domain/exceptions"; 3 | 4 | export const __MAIN__ = async (debugEnv?: NodeJS.ProcessEnv) => { 5 | const isDebug = 6 | process.env.NODE_ENV === NodeEnvs.developemnt || 7 | process.env.NODE_ENV === NodeEnvs.test; 8 | 9 | if (!isDebug) 10 | throw new CriticalError("trying to run debug without proper auth"); 11 | 12 | // setup debug env 13 | setDebugContext(debugEnv); 14 | 15 | // by instantiating after context and env are custom set, 16 | // it allows for a custom environment that's setup programmatically 17 | const main = require("src/main").main; 18 | return await main(); 19 | }; 20 | 21 | export const setDebugContext = (debugEnv?: NodeJS.ProcessEnv) => { 22 | const env = { ...process.env, ...debugEnv }; 23 | process.env = env; 24 | 25 | // By instantiating after above it allows it to initialize with custom env 26 | const context = require("@actions/github").context; 27 | 28 | context.payload.pull_request = { 29 | base: { 30 | sha: env.BASE_SHA 31 | }, 32 | head: { 33 | sha: env.HEAD_SHA 34 | }, 35 | number: parseInt(env.PULL_NUMBER || "") || 0 36 | }; 37 | 38 | context.payload.repository = { 39 | // @ts-ignore 40 | name: env.REPO_NAME, 41 | owner: { 42 | key: "", 43 | // @ts-ignore 44 | login: env.REPO_OWNER_NAME, 45 | name: env.REPO_OWNER_NAME 46 | }, 47 | full_name: `${env.REPO_OWNER}/${env.REPO_NAME}` 48 | }; 49 | context.eventName = env.EVENT_TYPE; 50 | }; 51 | -------------------------------------------------------------------------------- /src/modules/utils/index.ts: -------------------------------------------------------------------------------- 1 | import { isDefined, TestResults } from "src/domain"; 2 | import _, { intersection, set } from "lodash"; 3 | 4 | export const OR = (...args: [boolean, ...boolean[]]) => args.includes(true); 5 | export const AND = (...args: [boolean, ...boolean[]]) => _.every(args, Boolean); 6 | 7 | export const multiLineString = 8 | (joinWith = " ") => 9 | (...args: [string, ...string[]]) => 10 | args.filter(isDefined).join(joinWith); 11 | 12 | export class MultiLineString { 13 | public message = ""; 14 | 15 | constructor(initialValue: string = "") { 16 | this.message = initialValue; 17 | } 18 | 19 | addLine(line: string) { 20 | this.message += `\n${line}`; 21 | } 22 | } 23 | 24 | export const ANY = (states: any[]) => states.filter(Boolean).length > 0; 25 | 26 | /** 27 | * designed to collect the purified results and return the common paths; 28 | * this is useful because it means that if one error is purified in one 29 | * purifier but not in others it will be purified in this step, which 30 | * avoids race conditions and keeps logic linear and shallow (improves 31 | * readability) 32 | * 33 | * @param parent common ancestor between potentially mutated objects 34 | * @param objects mutated objects from ancestor 35 | * @returns common paths of the mutated objects relative to the parent 36 | */ 37 | export const innerJoinAncestors = ( 38 | parent: TestResults, 39 | objects: TestResults[] 40 | ) => { 41 | const objectPaths = objects.map(getAllTruthyObjectPaths); 42 | const commonPaths = intersection(...objectPaths); 43 | const clearPaths = getAllTruthyObjectPaths(parent).filter( 44 | (path) => !commonPaths.includes(path) 45 | ); 46 | 47 | return clearPaths.reduce( 48 | (obj, path) => set(obj, path, undefined), 49 | parent 50 | ) as TestResults; 51 | }; 52 | 53 | export const getAllTruthyObjectPaths = (obj: object) => { 54 | function rKeys(o: object, path?: string) { 55 | if (!o) return; 56 | if (typeof o !== "object") return path; 57 | return Object.keys(o).map((key) => 58 | rKeys(o[key], path ? [path, key].join(".") : key) 59 | ); 60 | } 61 | 62 | return rKeys(obj).toString().split(",").filter(isDefined); 63 | }; 64 | 65 | export const getAllFalseObjectPaths = (obj: object) => { 66 | function rKeys(o: object, path?: string) { 67 | if (typeof o !== "object" || _.isNull(o)) { 68 | if (o === false) return path; 69 | return; 70 | } 71 | return Object.keys(o).map((key) => 72 | rKeys(o[key], path ? [path, key].join(".") : key) 73 | ); 74 | } 75 | 76 | return rKeys(obj).toString().split(",").filter(isDefined); 77 | }; 78 | 79 | export const getAllNullObjectPaths = (obj: object) => { 80 | function rKeys(o: object, path?: string) { 81 | if (typeof o !== "object") { 82 | if (_.isNull(o)) return path; 83 | return; 84 | } 85 | return Object.keys(o).map((key) => 86 | rKeys(o[key], path ? [path, key].join(".") : key) 87 | ); 88 | } 89 | 90 | return rKeys(obj).toString().split(",").filter(isDefined); 91 | }; 92 | -------------------------------------------------------------------------------- /src/tests/assets/mockPR.ts: -------------------------------------------------------------------------------- 1 | import { getOctokit } from "@actions/github"; 2 | import nock from "nock"; 3 | import { 4 | GITHUB_TOKEN, 5 | MockMethods, 6 | MockRecord, 7 | NodeEnvs, 8 | PR, 9 | requireMockMethod 10 | } from "src/domain"; 11 | import { assertSavedRecord, getMockRecords, SavedRecord } from "./records"; 12 | import * as fs from "fs"; 13 | import { CriticalError, UnexpectedError } from "src/domain/exceptions"; 14 | import * as HttpStatus from "http-status"; 15 | 16 | const baseUrl = "https://api.github.com"; 17 | const scope = nock(baseUrl).persist(); 18 | 19 | /** 20 | * This is a tool used to mock pull requests, this is useful for testing and it's also 21 | * useful for development. It makes dealing with merged PRs trivial because if you change 22 | * the mocked requests in its respective asset file then you can simulate situations 23 | * 24 | * @param pullNumber the pull number to mock (mocks the necesary github api requests) 25 | * @returns mocked pull request of the pull number 26 | */ 27 | export const mockPR = async (pullNumber: SavedRecord) => { 28 | const mockRecords = await getMockRecords(); 29 | const records = mockRecords[`PR${pullNumber}`]; 30 | 31 | if (!records) 32 | throw new CriticalError(`no mocked records for pull number ${pullNumber}`); 33 | 34 | for (const record of records) { 35 | const req = record.req; 36 | const res = record.res; 37 | 38 | if (!req && !res) continue; // allows for setting {} for new mocks 39 | 40 | const wildcard = req.url.replace(baseUrl, ""); 41 | 42 | switch (req.method) { 43 | case "GET": 44 | scope.get(wildcard).reply(res.status, res.data); 45 | break; 46 | case "POST": 47 | scope.post(wildcard).reply(res.status, res.data); 48 | break; 49 | case "PATCH": 50 | scope.patch(wildcard).reply(res.status, res.data); 51 | break; 52 | case MockMethods.put: 53 | scope.put(wildcard).reply(res.status, res.data); 54 | break; 55 | } 56 | } 57 | 58 | nock.disableNetConnect(); 59 | 60 | const PRWildcard = `/repos/ethereum/EIPs/pulls/${pullNumber}`; 61 | return records.find( 62 | (record) => 63 | record.req?.method === "GET" && 64 | record.req?.url === `${baseUrl}${PRWildcard}` 65 | )?.res?.data as PR; 66 | }; 67 | 68 | // TODO: rename and reorganize these debugging tools 69 | export const __MAIN_MOCK__ = async (mockEnv?: NodeJS.ProcessEnv) => { 70 | const isMock = 71 | process.env.NODE_ENV === NodeEnvs.mock || 72 | process.env.NODE_ENV === NodeEnvs.test; 73 | 74 | if (!isMock) 75 | throw new CriticalError("trying to run debug without proper auth"); 76 | 77 | // setup debug env 78 | await setMockContext(mockEnv); 79 | 80 | // by instantiating after context and env are custom set, 81 | // it allows for a custom environment that's setup programmatically 82 | const main = (await import("src/main")).main; 83 | 84 | // only want to run this once to make things easier 85 | try { 86 | return await main(); 87 | } catch (err: any) { 88 | const url = err?.request?.url; 89 | const method = err?.request?.method; 90 | const body = err?.request?.body; 91 | 92 | console.log(err.type); 93 | if (url && method) { 94 | await fetchAndCreateRecord(url, method, body); 95 | } else { 96 | throw err; 97 | } 98 | } 99 | }; 100 | 101 | export const setMockContext = async (mockEnv?: NodeJS.ProcessEnv) => { 102 | const env = { ...process.env, ...mockEnv }; 103 | process.env = env; 104 | 105 | if (!env.PULL_NUMBER) 106 | throw new CriticalError("PULL_NUMBER is required to mock"); 107 | 108 | // setup saved record (mocking network responses) 109 | assertSavedRecord(env.PULL_NUMBER); 110 | const pr = await mockPR(env.PULL_NUMBER); 111 | 112 | // By instantiating after above it allows it to initialize with custom env 113 | const context = (await import("@actions/github")).context; 114 | 115 | context.payload.pull_request = { 116 | base: { 117 | sha: pr?.base?.sha 118 | }, 119 | head: { 120 | sha: pr?.head?.sha 121 | }, 122 | number: parseInt(env.PULL_NUMBER || "") || 0 123 | }; 124 | 125 | // context.issue.number = pr.number 126 | 127 | context.payload.repository = { 128 | // @ts-ignore 129 | name: env.REPO_NAME, 130 | owner: { 131 | key: "", 132 | // @ts-ignore 133 | login: env.REPO_OWNER_NAME, 134 | name: env.REPO_OWNER_NAME 135 | }, 136 | full_name: `${env.REPO_OWNER}/${env.REPO_NAME}` 137 | }; 138 | // @ts-ignore 139 | context.eventName = env.EVENT_TYPE; 140 | }; 141 | 142 | const fetchAndCreateRecord = async ( 143 | url: string, 144 | method: MockMethods, 145 | body?: string 146 | ) => { 147 | console.error("failed request", method, url, "\nmocking request..."); 148 | 149 | const isMock = process.env.NODE_ENV === NodeEnvs.mock; 150 | 151 | if (!isMock) return; 152 | 153 | nock.cleanAll(); 154 | nock.enableNetConnect(); 155 | const github = getOctokit(GITHUB_TOKEN).request; 156 | const res = await github({ 157 | method, 158 | url, 159 | ...JSON.parse(body || "{}") 160 | }).catch((err) => { 161 | nock.disableNetConnect(); 162 | return err; 163 | }); 164 | console.log("successfully fetched data"); 165 | nock.disableNetConnect(); 166 | 167 | const fileName = `records/${process.env.PULL_NUMBER?.replace("_", "/")}.json`; 168 | const mockedRecord: MockRecord[] = (await import("./" + fileName)).default; 169 | 170 | requireMockMethod(method); 171 | const handleResData = (res) => { 172 | const status = res.status; 173 | if ([HttpStatus.OK, HttpStatus.CREATED].includes(status)) { 174 | // when successful it returns the response in a res.data format 175 | return res.data; 176 | } 177 | if ([HttpStatus.NOT_FOUND].includes(status)) { 178 | // when it returns a not found or other types of failures 179 | return res.response.data; 180 | } 181 | throw new UnexpectedError(`status code ${status} is not a handled status`); 182 | }; 183 | mockedRecord.push({ 184 | req: { 185 | url, 186 | method 187 | }, 188 | res: { 189 | status: res.status, 190 | data: handleResData(res) 191 | } 192 | }); 193 | 194 | console.log(process.cwd() + "/src/tests/assets/" + fileName); 195 | fs.writeFile( 196 | process.cwd() + "/src/tests/assets/" + fileName, 197 | JSON.stringify(mockedRecord, null, 2), 198 | () => { 199 | console.log(mockedRecord); 200 | console.log("wrote file"); 201 | } 202 | ); 203 | }; 204 | -------------------------------------------------------------------------------- /src/tests/assets/records/index.ts: -------------------------------------------------------------------------------- 1 | import { MockRecord, requireMockMethod } from "src/domain"; 2 | import { CriticalError } from "src/domain/exceptions"; 3 | 4 | export enum SavedRecord { 5 | /** 6 | * **SHOULD FAIL** 7 | * 8 | * Summary: editor approval wasn't required if the author of the PR was an editor 9 | * 10 | * Explanation: if an editor is an author of an EIP and they submit a PR, then the 11 | * bot will assume that it has been approved by the editor. But this shouldn't happen. 12 | * The fix to this bug was considering EIP_EDITORS to be dynamic based on the eip / file 13 | * at hand. So if an editor is an author they won't be consider an editor for that test. 14 | * */ 15 | PR3596 = "3596", 16 | /** **SHOULD PASS** 17 | * 18 | * Summary: wasn't passing despite editor approval 19 | * 20 | * Explanation: The cause of the bug was that the EIP_EDITORS list had capitals in it, 21 | * so when it was checking if it was in the list it wouldn't match MicahZoltu 22 | * to micahzoltu. And so it failed. 23 | */ 24 | PR3654_1 = "3654_1", 25 | /** **SHOULD FAIL** 26 | * 27 | * Summary: editors weren't mentioned if there was only a valid status error 28 | * 29 | * Explanation: The cause of this bug was that despite there being a valid status error 30 | * (i.e. status is Final) and that requiring editor approval the logic 31 | * that actually collected the mentions didn't account for it. So I added that 32 | * logic and it was golden 33 | */ 34 | PR3654_2 = "3654_2", 35 | /** 36 | * **SHOULD PASS** 37 | * 38 | * @summary: [false alarm] greg opened a pull request and it automatically merged for 39 | * an unknown reason. There were no editor reviews. 40 | * 41 | * @description: This was perceived as an error because greg moved the status from 42 | * draft to review at first but then reverted this change. In this case it was 43 | * expected behavior to auto merge; but it was incorrect interpreted. 44 | */ 45 | PR3767 = "3767", 46 | /** 47 | * @summary: a pull request changed the status from last call to review, 48 | * it was caught by the linter but the editors weren't mentioned and they 49 | * were presumably not required 50 | * 51 | * @description: 52 | */ 53 | PR3676 = "3676", 54 | /** 55 | * **SHOULD PASS** 56 | * 57 | * @summary: multi-file PR change with bot 58 | */ 59 | PR3612 = "3612", 60 | /** 61 | * **SHOULD FAIL** 62 | * 63 | * @summary: multi-file PR that does not have the necessary reviews for it to pass 64 | */ 65 | PR4192 = "4192", 66 | /** 67 | * **SHOULD SUCCEED** 68 | * 69 | * @summary: this is one where the bot mentioned the email of the user when 70 | * it couldn't find the username. Either try to 71 | */ 72 | PR3768_1 = "3768_1", 73 | /** 74 | * **SHOULD FAIL** 75 | * 76 | * @summary: Same as PR3768_1 but in this case I deleted the author's review; 77 | * the goal is to use this PR to verify that an author with an email won't be 78 | * mentioned. 79 | */ 80 | PR3768_2 = "3768_2", 81 | /** 82 | * **SHOULD PASS** 83 | * 84 | * @summary: PR3623 was approved by the author but it didn't merge, so this 85 | * was a bug where author's approval didn't actually merge anything 86 | */ 87 | PR3623 = "3623", 88 | /** 89 | * @summary: this is an example PR that was used to implement the feature 90 | * that authors be allowed to submit a PR to mark their EIP withdrawn and 91 | * that should be merged automatically 92 | */ 93 | PR4189 = "4189", 94 | /** 95 | * @summary: this pull request automatically merged despite the tests failing 96 | * it was due to the fact unhandled errors never triggered a critical failure 97 | */ 98 | PR4478 = "4478", 99 | /** 100 | * @summary: a change to eip-1 that's not able to discern the authors 101 | */ 102 | PR4499 = "4499", 103 | /** 104 | * 105 | * @summary: the bot didn't fail gracefully on an asset file because the filename 106 | * provided was just the file's name instead of the path; I made all of the uses 107 | * of requireEIPNumber use path instead 108 | * @description: the bot reported 109 | * > ## (fail) eip-3448.md 110 | * > - eip-3448 state was changed from draft to review 111 | * > - This PR requires review from one of [@micahzoltu, @lightclient, @axic] 112 | * > ## (fail) assets/eip-3448/MetaProxyFactory.sol 113 | * > - 'MetaProxyFactory.sol' must be in eip-###.md format; this error will be overwritten upon relevant editor approval 114 | * 115 | * but it should have had a graceful failure on the assets/eip-3448/MetaProxyFactory.sol 116 | * 117 | * The problem was that it was evaluating the filename (MetaProxyFactory.sol) instead 118 | * of the path (assets/eip-3448/MetaProxyFactory.sol) so the code had no way 119 | * of knowing. 120 | */ 121 | PR4506 = "4506", 122 | /** 123 | * @summary: this pull request was approved by an editor but it still said that it was not 124 | * 125 | * @description: 126 | * The bug here was a result of the fact that github limits the number of 127 | * responses it can return. At the time of this pull request the max number of 128 | * reviews that could be returned was 30. But there were closer to 60 on the 129 | * pull request. So when the bot requested reviews, it got back a truncated 130 | * list that lacked the editor's approval. To fix this I built in a mechanism 131 | * to get all reviews (no matter how many) and I increased the max to 100. 132 | */ 133 | PR4361 = "4361" 134 | } 135 | 136 | /** 137 | * This will error if the provided string is not a known SavedRecord 138 | * @param maybeSavedRecord a string corresponding to a known SavedRecord 139 | */ 140 | export function assertSavedRecord( 141 | maybeSavedRecord: string 142 | ): asserts maybeSavedRecord is SavedRecord { 143 | const savedRecords = Object.values(SavedRecord); 144 | 145 | // @ts-expect-error savedRecords is actually a string[] 146 | if (!savedRecords.includes(maybeSavedRecord)) { 147 | throw new CriticalError( 148 | `${maybeSavedRecord} is not a SavedRecord, the options are ${savedRecords}` 149 | ); 150 | } 151 | } 152 | 153 | const assertMethods = (records: { default: MockRecord[] }) => { 154 | records.default.map( 155 | (record) => record.req?.method && requireMockMethod(record.req.method) 156 | ); 157 | }; 158 | export const getMockRecords = async () => { 159 | const PR3767 = await import("./3767.json"); 160 | const PR3676 = await import("./3676.json"); 161 | const PR3612 = await import("./3612.json"); 162 | const PR4192 = await import("./4192.json"); 163 | const PR3768_1 = await import("./3768/1.json"); 164 | const PR3768_2 = await import("./3768/2.json"); 165 | const PR3596 = await import("./3596.json"); 166 | const PR3654_1 = await import("./3654/1.json"); 167 | const PR3654_2 = await import("./3654/2.json"); 168 | const PR3623 = await import("./3623.json"); 169 | const PR4189 = await import("./4189.json"); 170 | const PR4478 = await import("./4478.json"); 171 | const PR4499 = await import("./4499.json"); 172 | const PR4506 = await import("./4506.json"); 173 | const PR4361 = await import("./4361.json"); 174 | 175 | assertMethods(PR3767); 176 | assertMethods(PR3676); 177 | assertMethods(PR3612); 178 | assertMethods(PR4192); 179 | assertMethods(PR3768_1); 180 | assertMethods(PR3768_2); 181 | assertMethods(PR3596); 182 | assertMethods(PR3654_1); 183 | assertMethods(PR3654_2); 184 | assertMethods(PR3623); 185 | assertMethods(PR4189); 186 | assertMethods(PR4478); 187 | assertMethods(PR4506); 188 | assertMethods(PR4361); 189 | 190 | const Records: { [k in keyof typeof SavedRecord]: MockRecord[] } = { 191 | PR3596: PR3596.default, 192 | PR3654_1: PR3654_1.default, 193 | PR3654_2: PR3654_2.default, 194 | PR3767: PR3767.default, 195 | PR3676: PR3676.default, 196 | PR3612: PR3612.default, 197 | PR4192: PR4192.default, 198 | PR3768_1: PR3768_1.default, 199 | PR3768_2: PR3768_2.default, 200 | PR3623: PR3623.default, 201 | PR4189: PR4189.default, 202 | PR4478: PR4478.default, 203 | PR4499: PR4499.default, 204 | PR4506: PR4506.default, 205 | PR4361: PR4361.default 206 | }; 207 | return Records; 208 | }; 209 | -------------------------------------------------------------------------------- /src/tests/factories/envFactory.ts: -------------------------------------------------------------------------------- 1 | import { SavedRecord } from "src/tests/assets/records"; 2 | import faker from "faker"; 3 | import { 4 | EIPCategory, 5 | EIPTypeOrCategoryToResolver, 6 | EIPTypes, 7 | EVENTS, 8 | NodeEnvs 9 | } from "src/domain"; 10 | 11 | type Env = { 12 | PULL_NUMBER: SavedRecord; 13 | BASE_SHA: string; 14 | HEAD_SHA: string; 15 | GITHUB_TOKEN: string; 16 | NODE_ENV: NodeEnvs; 17 | REPO_OWNER_NAME: string; 18 | REPO_NAME: string; 19 | WORKFLOW_ID: string; 20 | GITHUB_REPOSITORY: string; 21 | EVENT_TYPE: EVENTS; 22 | CORE_EDITORS: string; 23 | ERC_EDITORS: string; 24 | MAINTAINERS: string; 25 | }; 26 | 27 | const _envFactory = 28 | >(base: B) => 29 | >(overrides: O): B & O & NodeJS.ProcessEnv => ({ 30 | ...process.env, 31 | ...base, 32 | ...overrides 33 | }); 34 | 35 | export const envFactory = _envFactory({ 36 | REPO_OWNER_NAME: "ethereum", 37 | REPO_NAME: "EIPs", 38 | GITHUB_TOKEN: faker.random.alphaNumeric(10), 39 | NODE_ENV: NodeEnvs.test, 40 | GITHUB_REPOSITORY: "ethereum/EIPs", 41 | EVENT_TYPE: EVENTS.pullRequestTarget, 42 | [EIPTypeOrCategoryToResolver[ 43 | EIPCategory.erc 44 | ]]: `@${EIPTypeOrCategoryToResolver[EIPCategory.erc].replace( 45 | "_", 46 | "-" 47 | )}, @test, @editors`, 48 | [EIPTypeOrCategoryToResolver[ 49 | EIPCategory.core 50 | ]]: `@${EIPTypeOrCategoryToResolver[EIPCategory.core].replace( 51 | "_", 52 | "-" 53 | )}, @test, @editors`, 54 | [EIPTypeOrCategoryToResolver[ 55 | EIPCategory.interface 56 | ]]: `@${EIPTypeOrCategoryToResolver[EIPCategory.interface].replace( 57 | "_", 58 | "-" 59 | )}, @test, @editors`, 60 | [EIPTypeOrCategoryToResolver[ 61 | EIPCategory.networking 62 | ]]: `@${EIPTypeOrCategoryToResolver[EIPCategory.networking].replace( 63 | "_", 64 | "-" 65 | )}, @test, @editors`, 66 | [EIPTypeOrCategoryToResolver[EIPTypes.meta]]: `@${EIPTypeOrCategoryToResolver[ 67 | EIPTypes.meta 68 | ].replace("_", "-")}, @test, @editors`, 69 | [EIPTypeOrCategoryToResolver[ 70 | EIPTypes.informational 71 | ]]: `@${EIPTypeOrCategoryToResolver[EIPTypes.informational].replace( 72 | "_", 73 | "-" 74 | )}, @test, @editors`, 75 | MAINTAINERS: "@maintainers" 76 | }); 77 | -------------------------------------------------------------------------------- /src/tests/factories/fileDiffFactory.ts: -------------------------------------------------------------------------------- 1 | import { EipStatus, FileDiff } from "src/domain"; 2 | import { RecursivePartial } from "src/tests/testutils"; 3 | 4 | export const FileDiffFactory = ( 5 | overrides: RecursivePartial = {} 6 | ): FileDiff => { 7 | const defaults = { 8 | head: { 9 | eipNum: 2930, 10 | status: EipStatus.draft, 11 | authors: new Set(["@vbuterin", "@holiman"]), 12 | name: "eip-2930.md", 13 | filenameEipNum: 2930 14 | }, 15 | base: { 16 | eipNum: 2930, 17 | status: EipStatus.draft, 18 | authors: new Set(["@vbuterin", "@holiman"]), 19 | name: "eip-2930.md", 20 | filenameEipNum: 2930 21 | } 22 | }; 23 | 24 | return { 25 | head: { 26 | ...defaults.head, 27 | ...overrides.head 28 | } as FileDiff["head"], 29 | base: { 30 | ...defaults.base, 31 | ...overrides.base 32 | } as FileDiff["base"] 33 | }; 34 | }; 35 | -------------------------------------------------------------------------------- /src/tests/factories/fileFactory.ts: -------------------------------------------------------------------------------- 1 | import { File } from "src/domain"; 2 | 3 | export const FileFactory = ( 4 | overrides: Partial = {} 5 | ): NonNullable => { 6 | const defaults: File = { 7 | sha: "5e7d0c3f74aef60373c83edf063b42cdd09041b4", 8 | filename: "EIPS/eip-2930.md", 9 | status: "modified", 10 | additions: 5, 11 | deletions: 5, 12 | changes: 10, 13 | blob_url: 14 | "https://github.com/ethereum/EIPs/blob/f661efb56fee4a4cdffe1ba3efe119d19c0ae9a7/EIPS/eip-2930.md", 15 | raw_url: 16 | "https://github.com/ethereum/EIPs/raw/f661efb56fee4a4cdffe1ba3efe119d19c0ae9a7/EIPS/eip-2930.md", 17 | contents_url: 18 | "https://api.github.com/repos/ethereum/EIPs/contents/EIPS/eip-2930.md?ref=f661efb56fee4a4cdffe1ba3efe119d19c0ae9a7", 19 | patch: [ 20 | "@@ -16,9 +16,9 @@ Adds a transaction type which contains an access list, a list of addresses and s\n", 21 | " \n", 22 | " ## Abstract\n", 23 | " \n", 24 | "-We introduce a new [EIP-2718](./eip-2718.md) transaction type, with the format `0x01 || rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, access_list, yParity, senderR, senderS])`.\n", 25 | "+We introduce a new [EIP-2718](./eip-2718.md) transaction type, with the format `0x01 || rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, accessList, signatureYParity, signatureR, signatureS])`.\n", 26 | " \n", 27 | "-The `access_list` specifies a list of addresses and storage keys; these addresses and storage keys are added into the `accessed_addresses` and `accessed_storage_keys` global sets (introduced in [EIP-2929](./eip-2929.md)). A gas cost is charged, though at a discount relative to the cost of accessing outside the list.\n", 28 | "+The `accessList` specifies a list of addresses and storage keys; these addresses and storage keys are added into the `accessed_addresses` and `accessed_storage_keys` global sets (introduced in [EIP-2929](./eip-2929.md)). A gas cost is charged, though at a discount relative to the cost of accessing outside the list.\n", 29 | " \n", 30 | " ## Motivation\n", 31 | " \n", 32 | "@@ -48,13 +48,13 @@ This EIP serves two functions:\n", 33 | " \n", 34 | " As of `FORK_BLOCK_NUMBER`, a new [EIP-2718](./eip-2718.md) transaction is introduced with `TransactionType` `1`.\n", 35 | " \n", 36 | "-The [EIP-2718](./eip-2718.md) `TransactionPayload` for this transaction is `rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, access_list, yParity, senderR, senderS])`.\n", 37 | "+The [EIP-2718](./eip-2718.md) `TransactionPayload` for this transaction is `rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, accessList, signatureYParity, signatureR, signatureS])`.\n", 38 | " \n", 39 | "-The `yParity, senderR, senderS` elements of this transaction represent a secp256k1 signature over `keccak256(0x01 || rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, access_list]))`.\n", 40 | "+The `signatureYParity, signatureR, signatureS` elements of this transaction represent a secp256k1 signature over `keccak256(0x01 || rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, accessList]))`.\n", 41 | " \n", 42 | " The [EIP-2718](./eip-2718.md) `ReceiptPayload` for this transaction is `rlp([status, cumulativeGasUsed, logsBloom, logs])`.\n", 43 | " \n", 44 | '-For the transaction to be valid, `access_list` must be of type `[[{20 bytes}, [{32 bytes}...]]...]`, where `...` means "zero or more of the thing to the left". For example, the following is a valid access list (all hex strings would in reality be in byte representation):\n', 45 | '+For the transaction to be valid, `accessList` must be of type `[[{20 bytes}, [{32 bytes}...]]...]`, where `...` means "zero or more of the thing to the left". For example, the following is a valid access list (all hex strings would in reality be in byte representation):\n', 46 | " \n", 47 | " ```\n", 48 | " [" 49 | ].join("") 50 | }; 51 | 52 | return { ...defaults, ...overrides }; 53 | }; 54 | -------------------------------------------------------------------------------- /src/tests/factories/prFactory.ts: -------------------------------------------------------------------------------- 1 | import { PR } from "src/domain"; 2 | import { getMockRecords } from "src/tests/assets/records"; 3 | 4 | export const PRFactory = async (overrides: Partial = {}) => { 5 | const Records = await getMockRecords(); 6 | const defaults: PR = Records.PR3596[0]?.res.data; 7 | return { 8 | ...defaults, 9 | ...overrides 10 | }; 11 | }; 12 | -------------------------------------------------------------------------------- /src/tests/factories/testResultsFactory.ts: -------------------------------------------------------------------------------- 1 | import { TestResults } from "src/domain"; 2 | import { RecursivePartial } from "src/tests/testutils"; 3 | import { FileDiffFactory } from "./fileDiffFactory"; 4 | 5 | const defaults: RecursivePartial & { 6 | fileDiff: TestResults["fileDiff"]; 7 | } = { 8 | fileDiff: FileDiffFactory() 9 | }; 10 | 11 | export const testResultsFactory = ( 12 | overrides: RecursivePartial = {} 13 | ): TestResults => { 14 | return { 15 | errors: { 16 | fileErrors: { 17 | ...defaults.errors?.fileErrors, 18 | ...overrides.errors?.fileErrors 19 | }, 20 | headerErrors: { 21 | ...defaults.errors?.headerErrors, 22 | ...overrides.errors?.headerErrors 23 | }, 24 | authorErrors: { 25 | ...defaults.errors?.authorErrors, 26 | ...overrides.errors?.authorErrors 27 | }, 28 | approvalErrors: { 29 | ...defaults.errors?.approvalErrors, 30 | ...overrides.errors?.approvalErrors 31 | } 32 | }, 33 | fileDiff: { 34 | base: { 35 | ...defaults.fileDiff.base, 36 | ...overrides.fileDiff?.base 37 | }, 38 | head: { 39 | ...defaults.fileDiff.head, 40 | ...overrides.fileDiff?.head 41 | } 42 | }, 43 | authors: overrides.authors || defaults.authors 44 | }; 45 | }; 46 | -------------------------------------------------------------------------------- /src/tests/testutils.ts: -------------------------------------------------------------------------------- 1 | import { Context } from "@actions/github/lib/context"; 2 | import actions from "@actions/github"; 3 | import _, { set } from "lodash"; 4 | import nock from "nock"; 5 | import MockedEnv from "mocked-env"; 6 | import * as core from "@actions/core"; 7 | 8 | export const getAllTruthyObjectPaths = (obj: object) => { 9 | function rKeys(o: object, path?: string) { 10 | if (!o) return; 11 | if (typeof o === "function") return; 12 | if (typeof o !== "object") return path; 13 | return Object.keys(o).map((key) => 14 | rKeys(o[key], path ? [path, key].join(".") : key) 15 | ); 16 | } 17 | 18 | return rKeys(obj).toString().split(",").filter(Boolean) as string[]; 19 | }; 20 | 21 | export const getAllPaths = (obj: object) => { 22 | function rKeys(o: object, path?: string) { 23 | if (typeof o !== "object" || _.isNull(o)) return path; 24 | return Object.keys(o).map((key) => 25 | rKeys(o[key], path ? [path, key].join(".") : key) 26 | ); 27 | } 28 | 29 | return rKeys(obj).toString().split(",").filter(Boolean) as string[]; 30 | }; 31 | 32 | export const expectError = async (fn, extraContext?: string) => { 33 | let error; 34 | try { 35 | await fn(); 36 | } catch (err) { 37 | error = err; 38 | } 39 | if (!error) 40 | throw `function ${fn.toString()} was expected to throw and error but it didn't\n\textra context: ${extraContext}`; 41 | }; 42 | 43 | export const expectErrorWithHandler = async ( 44 | fn, 45 | handler: (error: any) => void, 46 | extraContext?: string 47 | ) => { 48 | let error; 49 | try { 50 | await fn(); 51 | } catch (err) { 52 | handler && handler(err); 53 | error = err; 54 | } 55 | if (!error) 56 | throw `function ${fn.toString()} was expected to throw and error but it didn't\n\textra context: ${extraContext}`; 57 | }; 58 | 59 | export const clearContext = (context: Context) => { 60 | const paths = getAllTruthyObjectPaths(context); 61 | for (const path of paths) { 62 | set(context, path, undefined); 63 | } 64 | }; 65 | 66 | export type RecursivePartial = { 67 | [P in keyof T]?: T[P] extends {} ? RecursivePartial : T[P]; 68 | }; 69 | 70 | /** a utility that helps simplify mocking github context */ 71 | export const mockGithubContext = ( 72 | defaultContext: Partial = {}, 73 | getOctokit = () => {} 74 | ) => { 75 | beforeAll(() => { 76 | jest.mock("@actions/github", () => ({ 77 | context: {}, 78 | getOctokit: jest.fn() 79 | })); 80 | }); 81 | 82 | let mock: jest.Mocked; 83 | beforeEach(async () => { 84 | mock = (await import("@actions/github")) as any; 85 | mock.getOctokit.mockImplementation(getOctokit as any); 86 | 87 | // this is a typescript workaround for redefining const values 88 | const mocked = mock as { context: {} }; 89 | mocked.context = defaultContext; 90 | }); 91 | 92 | afterEach(() => { 93 | clearContext(mock.context); 94 | mock.getOctokit.mockClear(); 95 | }); 96 | 97 | afterAll(() => { 98 | jest.restoreAllMocks(); 99 | }); 100 | 101 | // initially returned value gets stale 102 | const getMock = () => mock; 103 | 104 | return getMock; 105 | }; 106 | 107 | export const initGeneralTestEnv = () => { 108 | const restore = MockedEnv(process.env); 109 | 110 | beforeAll(() => { 111 | if (!nock.isActive()) { 112 | nock.activate(); 113 | } 114 | nock.disableNetConnect(); 115 | }); 116 | 117 | beforeEach(() => { 118 | jest.resetModules(); 119 | jest.clearAllMocks(); 120 | nock.cleanAll(); 121 | }); 122 | 123 | afterEach(() => { 124 | restore(); 125 | }); 126 | 127 | afterAll(() => { 128 | jest.restoreAllMocks(); 129 | nock.restore(); 130 | nock.enableNetConnect(); 131 | }); 132 | }; 133 | 134 | export const getSetFailedMock = () => { 135 | const setFailedMock = jest 136 | .fn() 137 | .mockImplementation(core.setFailed) as jest.MockedFunction< 138 | typeof core.setFailed 139 | >; 140 | 141 | beforeEach(async () => { 142 | const core = await import("@actions/core"); 143 | jest.spyOn(core, "setFailed").mockImplementation(setFailedMock); 144 | setFailedMock.mockClear(); 145 | }); 146 | 147 | return setFailedMock; 148 | }; 149 | 150 | export const convertTrueToStringOnLeafs = ( 151 | _obj: Record, 152 | setNull = false 153 | ): Record => { 154 | const obj = _.cloneDeep(_obj); 155 | const paths = getAllPaths(obj); 156 | for (const path of paths) { 157 | const value = _.get(obj, path); 158 | if (value === true) { 159 | _.set(obj, path, "required"); 160 | } else if (setNull && _.isNull(value)) { 161 | _.set(obj, path, "optional"); 162 | } else { 163 | _.unset(obj, path); 164 | } 165 | } 166 | return obj as any; 167 | }; 168 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src/*"], 4 | "exclude": ["src/tests/*"] 5 | } 6 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "module": "CommonJS", 5 | "moduleResolution": "node", 6 | "allowSyntheticDefaultImports": true, 7 | "esModuleInterop": true, 8 | "isolatedModules": true, 9 | "pretty": true, 10 | "noEmit": false, 11 | "strict": true, 12 | "noEmitOnError": true, 13 | "noImplicitReturns": true, 14 | "noImplicitThis": true, 15 | "noUnusedLocals": true, 16 | "noUncheckedIndexedAccess": true, 17 | "noUnusedParameters": false, 18 | "noImplicitAny": false, 19 | "rootDir": "./", 20 | "outDir": "./build", 21 | "sourceMap": true, 22 | "inlineSources": true, 23 | "skipLibCheck": true, 24 | "resolveJsonModule": true, 25 | "baseUrl": "./", 26 | "paths": { 27 | "src/*": ["src/*"], 28 | "#/*": ["src/modules/*"] 29 | } 30 | }, 31 | "include": ["src"] 32 | } 33 | --------------------------------------------------------------------------------