├── .prettierrc ├── .dockerignore ├── .gitignore ├── .prettierignore ├── Dockerfile ├── src ├── emitter.ts ├── github.mock.ts ├── configFormat │ ├── json.test.ts │ ├── configFormat.ts │ ├── json.ne │ ├── yamlFormat.ts │ ├── json.js │ └── jsonFormat.ts ├── sentry.server.ts ├── util.ts ├── rdjson │ └── DiagnosticResult.jsonschema.d.ts ├── types.ts ├── github.ts ├── check.ts ├── index.ts ├── config.test.ts ├── config.ts └── configSchema.ts ├── test ├── test_multiple_rules.yaml ├── test_event_on.yaml ├── readme_rule.json └── readme_rule.yaml ├── renovate.json ├── tsconfig.json ├── .github └── workflows │ ├── actionlint.yaml │ └── lint.yaml ├── LICENSE ├── package.json ├── eslint.config.mjs └── README.md /.prettierrc: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | /dist/ 3 | /Dockerfile 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | .DS_Store 3 | /dist/ 4 | /dist-ssr/ 5 | *.local 6 | 7 | .env 8 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | /dist/ 2 | /node_modules/ 3 | /deploy/ 4 | /crates/*/pkg 5 | /crates/*/target 6 | src/configFormat/json.js 7 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22-bullseye-slim 2 | 3 | WORKDIR /app 4 | 5 | COPY package.json yarn.lock ./ 6 | RUN yarn install 7 | 8 | COPY ./ ./ 9 | RUN yarn build 10 | 11 | CMD ["npm", "start"] 12 | -------------------------------------------------------------------------------- /src/emitter.ts: -------------------------------------------------------------------------------- 1 | import mitt from "mitt"; 2 | import { type SemErrorPayload, type SynErrorPayload } from "./types"; 3 | 4 | type Events = { 5 | semerror: SemErrorPayload; 6 | synerror: SynErrorPayload; 7 | }; 8 | 9 | export const emitter = mitt(); 10 | -------------------------------------------------------------------------------- /test/test_multiple_rules.yaml: -------------------------------------------------------------------------------- 1 | version: "0" 2 | rules: 3 | - issue: 4 | assignees: 5 | - a 6 | project: 1 7 | 8 | - issue: 9 | assignees: 10 | - b 11 | project: 2 12 | 13 | - issue: 14 | assignees: 15 | - c 16 | project: 3 17 | -------------------------------------------------------------------------------- /test/test_event_on.yaml: -------------------------------------------------------------------------------- 1 | version: "0" 2 | rules: 3 | - project: 1 4 | on: 5 | issue: 6 | - opened 7 | - assigned 8 | issue: 9 | assignees: 10 | - assignee 11 | - project: 2 12 | on: 13 | issue: opened 14 | pr: opened 15 | repo: 16 | name: 17 | - autopj 18 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["config:recommended"], 3 | "packageRules": [ 4 | { 5 | "matchUpdateTypes": ["minor", "patch"], 6 | "automerge": false 7 | }, 8 | { 9 | "matchDatasources": ["npm"], 10 | "minimumReleaseAge": "7 days" 11 | } 12 | ], 13 | "postUpdateOptions": ["yarnDedupeHighest"] 14 | } 15 | -------------------------------------------------------------------------------- /src/github.mock.ts: -------------------------------------------------------------------------------- 1 | import { type Octokit } from "@octokit/core"; 2 | import { type OctokitResponse } from "@octokit/types"; 3 | 4 | export const OctokitRestMock = (f: () => Promise>) => { 5 | const octokit = { 6 | request: async (_route: string, _options?: any) => await f(), 7 | } as unknown as Octokit; 8 | return octokit; 9 | }; 10 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node22/tsconfig.json", 3 | "ts-node": { 4 | "transpileOnly": true, 5 | "files": true 6 | }, 7 | "compilerOptions": { 8 | "baseUrl": "./", 9 | "allowJs": true, 10 | "outDir": "dist", 11 | 12 | "sourceMap": true, 13 | "inlineSources": true, 14 | "sourceRoot": "/" 15 | }, 16 | "include": ["./src"] 17 | } 18 | -------------------------------------------------------------------------------- /src/configFormat/json.test.ts: -------------------------------------------------------------------------------- 1 | import { expect, it } from "vitest"; 2 | import { JsonFormat } from "./jsonFormat"; 3 | 4 | it("can parse empty json", () => { 5 | const parsed = new JsonFormat().parse("{}"); 6 | expect(parsed.is_ok).toBeTruthy(); 7 | }); 8 | 9 | it("can parse a json with trailing comma", () => { 10 | const parsed = new JsonFormat().parse('{"a": 1,}'); 11 | expect(parsed.is_ok).toBeTruthy(); 12 | }); 13 | -------------------------------------------------------------------------------- /src/sentry.server.ts: -------------------------------------------------------------------------------- 1 | import * as Sentry from "@sentry/node"; 2 | import { z } from "zod"; 3 | 4 | const envSchema = z.object({ 5 | SENTRY_DSN: z.string({ required_error: "SENTRY_DSN is required" }), 6 | }); 7 | const envParseResult = envSchema.safeParse(process.env); 8 | if (envParseResult.success) { 9 | const envInput = { 10 | SENTRY_DSN: envParseResult.data.SENTRY_DSN, 11 | }; 12 | Sentry.init({ 13 | dsn: envInput.SENTRY_DSN, 14 | tracesSampleRate: 0.1, 15 | }); 16 | } 17 | -------------------------------------------------------------------------------- /.github/workflows/actionlint.yaml: -------------------------------------------------------------------------------- 1 | name: reviewdog / actionlint 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - ".github/workflows/**" 7 | 8 | jobs: 9 | actionlint: 10 | name: actionlint with reviewdog 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | 15 | - name: actionlint 16 | uses: reviewdog/action-actionlint@v1.67.0 17 | with: 18 | github_token: ${{ secrets.GITHUB_TOKEN }} 19 | reporter: github-pr-review 20 | -------------------------------------------------------------------------------- /.github/workflows/lint.yaml: -------------------------------------------------------------------------------- 1 | name: lint and test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Check out the repo 16 | uses: actions/checkout@v4 17 | 18 | - name: Setup Node.js 19 | uses: actions/setup-node@v4.4.0 20 | with: 21 | node-version: 22 22 | 23 | - name: Install Dependencies 24 | run: yarn install --frozen-lockfile 25 | 26 | - name: Lint ESLint 27 | run: yarn lint:eslint 28 | 29 | - name: Lint Prettier 30 | run: yarn lint:prettier 31 | 32 | - name: TypeCheck 33 | run: yarn typecheck 34 | 35 | - name: Test 36 | run: yarn test 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 ArkEdge Space Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/readme_rule.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0", 3 | "rules": [ 4 | { 5 | "repo": { 6 | "name": ["example-repo1", "example-repo2", "/example-/"], 7 | "topics": ["example-topic"] 8 | }, 9 | "issue": { "assignees": ["octocat", "codercat"], "labels": "bug" }, 10 | "project": 1 11 | }, 12 | { 13 | "repo": { 14 | "full_name": "/the_owner/example-/", 15 | "description": "/repository|repo/", 16 | "private": false 17 | }, 18 | "issue": { "assignees": ["org/teams"] }, 19 | "project": [2, 3] 20 | }, 21 | { 22 | "repo": { "fork": false }, 23 | "issue": { "assignees": ["org/teams", "ghost"] }, 24 | "project": [4] 25 | }, 26 | { 27 | "project": [5], 28 | "repo": { "name": "autoproject" }, 29 | "pr": { 30 | "reviewers": ["org/teams", "octocat"], 31 | "assignees": ["org/teams", "octocat"] 32 | } 33 | }, 34 | { 35 | "project": 6, 36 | "repo": { "name": "autoproject" }, 37 | "on": { "pr": "assigned" } 38 | }, 39 | { "project": { "only": 9 }, "pr": { "head": { "ref": "/^renovate/.*/" } } } 40 | ] 41 | } 42 | -------------------------------------------------------------------------------- /src/configFormat/configFormat.ts: -------------------------------------------------------------------------------- 1 | import { type ObjPath, type Range2 } from "../types"; 2 | import { JsonFormat } from "./jsonFormat"; 3 | import { YamlFormat } from "./yamlFormat"; 4 | 5 | export type ParseResult = 6 | | { 7 | is_ok: true; 8 | value: T; 9 | } 10 | | { 11 | is_ok: false; 12 | error: E; 13 | }; 14 | 15 | export type ConfigFormatKind = "yaml" | "json"; 16 | type KindTagged = { 17 | kind: ConfigFormatKind; 18 | value: T; 19 | }; 20 | export type EitherFormat = YamlFormat | JsonFormat; 21 | 22 | export function getFormat(kind: ConfigFormatKind): EitherFormat { 23 | if (kind === "yaml") { 24 | return new YamlFormat(); 25 | } else { 26 | return new JsonFormat(); 27 | } 28 | } 29 | 30 | export interface Format { 31 | parse: (s: string) => ParseResult, KindTagged>; 32 | } 33 | 34 | export interface Document { 35 | toJS: () => unknown; 36 | asNode: () => DocumentNode; 37 | getIn: (path: ObjPath) => (DocumentNode & FindKey) | undefined; 38 | hasIn: (path: ObjPath) => boolean; 39 | } 40 | 41 | export interface FindKey { 42 | findKeyAsMap: (key: string) => DocumentNode | null; 43 | } 44 | 45 | export interface DocumentNode { 46 | loc: Range2; 47 | } 48 | -------------------------------------------------------------------------------- /test/readme_rule.yaml: -------------------------------------------------------------------------------- 1 | version: "0" 2 | rules: 3 | - repo: 4 | # 正規表現と文字列は共存できる 5 | name: 6 | - example-repo1 7 | - example-repo2 8 | - /example-/ 9 | # 1つでも"topic"ではなく"topics"とする 10 | topics: 11 | - example-topic 12 | issue: 13 | # 1つ(1人)でも"assignee"ではなく"assignees"とする 14 | assignees: 15 | # login nameの方(URLに書いてある方)を記入する 16 | - octocat 17 | - codercat 18 | labels: bug 19 | # projectの指定は必須.repo, issueは両方あっても両方なくても(この場合警告する)動作する 20 | project: 1 21 | - repo: 22 | # 正規表現リテラルでなく文字列としてパースされるためエスケープは不要 23 | full_name: /the_owner/example-/ 24 | description: /repository|repo/ 25 | private: false 26 | issue: 27 | assignees: 28 | # Teamsを指定できる.属する人を直接列挙するのと同等. 29 | # autopj起動後,初回判定時にメンバーを取得する. 30 | - org/teams 31 | # 複数の project idを指定できる 32 | project: 33 | - 2 34 | - 3 35 | - repo: 36 | fork: false 37 | issue: 38 | assignees: 39 | - org/teams 40 | - ghost 41 | project: 42 | - 4 43 | - project: # プロパティは順不同なので例えば先頭に書いても良い 44 | - 5 45 | repo: 46 | name: autoproject 47 | # issueと同時には指定できない.prを指定した場合prに関連するイベントでのみ発火. 48 | pr: 49 | reviewers: 50 | # reviewerにTeamを指定するものには未対応.userを複数指定するのと同じ. 51 | - org/teams 52 | - octocat 53 | assignees: 54 | - org/teams 55 | - octocat 56 | - project: 6 57 | repo: 58 | name: autoproject 59 | # 発火タイミングを指定できる.指定しない場合は全てのイベントで発火 60 | on: 61 | pr: assigned 62 | - project: 63 | # 除外ルール(ブラックリスト)の一つ 64 | # onlyに引っかかった場合,それ以外の規則を無視してこれのみになる 65 | # onlyとnotは通常より強い優先度を持つ 66 | only: 9 67 | pr: 68 | head: 69 | ref: /^renovate/.*/ 70 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "autoproject", 3 | "version": "1.1.0", 4 | "license": "MIT", 5 | "scripts": { 6 | "check": "node dist/check.js", 7 | "start": "node dist/index.js", 8 | "compileNearley": "echo '/* eslint-disable */' > src/configFormat/json.js && nearleyc src/configFormat/json.ne >> src/configFormat/json.js", 9 | "build": "tsc", 10 | "test": "vitest run", 11 | "typecheck": "tsc --noEmit", 12 | "lint:prettier": "prettier . --check", 13 | "lint:eslint": "eslint . --format stylish", 14 | "lint": "run-p lint:*", 15 | "fix:prettier": "yarn lint:prettier --write", 16 | "fix:eslint": "yarn lint:eslint --fix", 17 | "fix": "run-s fix:prettier fix:eslint" 18 | }, 19 | "dependencies": { 20 | "@octokit/app": "14.1.0", 21 | "@octokit/graphql-schema": "15.26.0", 22 | "@prantlf/jsonlint": "16.0.0", 23 | "@sentry/node": "^9.12.0", 24 | "dotenv": "16.6.1", 25 | "fuse.js": "7.1.0", 26 | "log4js": "6.9.1", 27 | "mitt": "3.0.1", 28 | "nearley": "2.20.1", 29 | "yaml": "2.8.1", 30 | "yargs": "18.0.0", 31 | "zod": "3.25.76" 32 | }, 33 | "devDependencies": { 34 | "@octokit/types": "13.10.0", 35 | "@tsconfig/node22": "22.0.2", 36 | "@types/nearley": "2.11.5", 37 | "@types/node": "22.18.1", 38 | "@types/yargs": "17.0.33", 39 | "@typescript-eslint/eslint-plugin": "8.42.0", 40 | "@typescript-eslint/parser": "8.42.0", 41 | "eslint": "9.35.0", 42 | "eslint-config-love": "121.0.0", 43 | "eslint-config-prettier": "10.1.8", 44 | "eslint-plugin-import": "2.32.0", 45 | "eslint-plugin-n": "17.21.3", 46 | "eslint-plugin-prettier": "5.5.4", 47 | "eslint-plugin-promise": "7.2.1", 48 | "npm-run-all2": "8.0.4", 49 | "prettier": "3.6.2", 50 | "ts-node": "10.9.2", 51 | "typescript": "5.9.2", 52 | "vitest": "3.2.4" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import { globalIgnores } from "eslint/config"; 2 | import tseslint from "typescript-eslint"; 3 | import globals from "globals"; 4 | import eslint from "@eslint/js"; 5 | import love from "eslint-config-love"; 6 | import eslintConfigPrettier from "eslint-config-prettier"; 7 | 8 | export default tseslint.config({ 9 | extends: [ 10 | globalIgnores(["dist/", "crates/*/pkg", "src/configFormat/json.js"]), 11 | eslint.configs.recommended, 12 | tseslint.configs.recommended, 13 | love, 14 | eslintConfigPrettier, 15 | ], 16 | languageOptions: { 17 | ecmaVersion: "latest", 18 | sourceType: "module", 19 | }, 20 | 21 | files: ["**/*.ts"], 22 | rules: { 23 | "@typescript-eslint/consistent-type-definitions": "off", 24 | "@typescript-eslint/explicit-function-return-type": "off", 25 | "@typescript-eslint/no-non-null-assertion": "off", 26 | "@typescript-eslint/no-magic-numbers": "off", 27 | "@typescript-eslint/no-import-type-side-effects": "off", 28 | "@typescript-eslint/no-unsafe-type-assertion": "off", 29 | "@typescript-eslint/no-explicit-any": "off", 30 | "@typescript-eslint/prefer-destructuring": "off", 31 | "@typescript-eslint/init-declarations": "off", 32 | "@typescript-eslint/class-methods-use-this": "off", 33 | "@typescript-eslint/no-unsafe-assignment": "off", 34 | "@typescript-eslint/no-unsafe-return": "off", 35 | "@typescript-eslint/no-unsafe-member-access": "off", 36 | "@typescript-eslint/no-unnecessary-condition": "off", 37 | "@typescript-eslint/require-await": "off", 38 | "@typescript-eslint/strict-boolean-expressions": "off", 39 | "@typescript-eslint/no-unnecessary-type-parameters": "off", 40 | "@typescript-eslint/no-unsafe-call": "off", 41 | "@typescript-eslint/no-unnecessary-type-arguments": "off", 42 | "@typescript-eslint/switch-exhaustiveness-check": "off", 43 | "eslint-comments/require-description": "off", 44 | "eslint-comments/no-unlimited-disable": "off", 45 | "no-console": "off", 46 | "arrow-body-style": "off", 47 | complexity: "off", 48 | }, 49 | }); 50 | -------------------------------------------------------------------------------- /src/configFormat/json.ne: -------------------------------------------------------------------------------- 1 | # http://www.json.org/ 2 | # http://www.asciitable.com/ 3 | @lexer lexer 4 | 5 | json -> _ value _ {% ([,val,]) => val %} 6 | 7 | object -> 8 | "{" _ "}" {% empty("Object") %} 9 | | "{" _ property (_ "," _ property):* _ ("," _ ):? "}" {% children("Object") %} 10 | 11 | array -> 12 | "[" _ "]" {% empty("Array") %} 13 | | "[" _ value (_ "," _ value):* _ ("," _ ):? "]" {% children("Array") %} 14 | 15 | value -> 16 | object {% id %} 17 | | array {% id %} 18 | | %true {% literal() %} 19 | | %false {% literal() %} 20 | | %null {% literal() %} 21 | | %number {% literal() %} 22 | | %string {% literal() %} 23 | 24 | property -> key _ ":" _ value {% property %} 25 | 26 | key -> %string {% literal("Identifier") %} 27 | 28 | _ -> null | %space {% d => null %} 29 | 30 | @{% 31 | const moo = require('moo'); 32 | 33 | let lexer = moo.compile({ 34 | space: {match: /\s+/, lineBreaks: true}, 35 | number: /-?(?:[0-9]|[1-9][0-9]+)(?:\.[0-9]+)?(?:[eE][-+]?[0-9]+)?\b/, 36 | string: /"(?:\\["bfnrt\/\\]|\\u[a-fA-F0-9]{4}|[^"\\])*"/, 37 | "{": "{", 38 | "}": "}", 39 | "[": "[", 40 | "]": "]", 41 | ",": ",", 42 | ":": ":", 43 | true: "true", 44 | false: "false", 45 | null: "null", 46 | }) 47 | 48 | function empty(type) { 49 | return function ([open,,close]) { 50 | return { 51 | type, 52 | children: [], 53 | loc: { start: pos(open), end: pos(close, 1) } 54 | }; 55 | }; 56 | } 57 | 58 | function children(type) { 59 | return function ([open,,first,rest,,,close]) { 60 | return { 61 | type, 62 | children: [ 63 | first, 64 | ...rest.map(([,,,property]) => property) 65 | ], 66 | loc: { start: pos(open), end: pos(close, 1) } 67 | }; 68 | }; 69 | } 70 | 71 | function literal() { 72 | return function ([token]) { 73 | return { 74 | type: "Literal", 75 | value: JSON.parse(token.value), 76 | raw: token.text, 77 | loc: { 78 | start: pos(token), 79 | end: pos(token, token.text.length) 80 | } 81 | }; 82 | }; 83 | } 84 | 85 | function property([key,,,,value]) { 86 | return { 87 | type: "Property", 88 | key, 89 | value, 90 | loc: { 91 | start: key.loc.start, 92 | end: value.loc.end 93 | } 94 | }; 95 | } 96 | 97 | function pos({ line, col, offset }, add = 0) { 98 | return { 99 | line, 100 | col: col + add, 101 | offset: offset + add 102 | }; 103 | } 104 | %} 105 | -------------------------------------------------------------------------------- /src/configFormat/yamlFormat.ts: -------------------------------------------------------------------------------- 1 | import YAML, { LineCounter, type ParsedNode } from "yaml"; 2 | import { 3 | type Document, 4 | type DocumentNode, 5 | type FindKey, 6 | type Format, 7 | type ParseResult, 8 | } from "./configFormat"; 9 | import { type ObjPath } from "src/types"; 10 | 11 | export type YamlTagged = { 12 | kind: "yaml"; 13 | value: T; 14 | }; 15 | 16 | function wrap(value: T): YamlTagged { 17 | return { 18 | kind: "yaml", 19 | value, 20 | }; 21 | } 22 | 23 | type ParseResultWrapped = ParseResult, YamlTagged>; 24 | 25 | export class YamlFormat implements Format { 26 | parse( 27 | s: string, 28 | ): ParseResultWrapped> { 29 | const lineCounter = new LineCounter(); 30 | const doc = YAML.parseDocument(s, { lineCounter, prettyErrors: true }); 31 | if (doc.errors.length !== 0) { 32 | return { 33 | is_ok: false, 34 | error: wrap(doc), 35 | }; 36 | } else { 37 | return { 38 | is_ok: true, 39 | value: wrap(new YamlDocument(doc, lineCounter)), 40 | }; 41 | } 42 | } 43 | } 44 | 45 | export class YamlDocument implements Document { 46 | constructor( 47 | private readonly doc: YAML.Document.Parsed, 48 | private readonly lineCounter: LineCounter, 49 | ) {} 50 | 51 | toJS() { 52 | return this.doc.toJS(); 53 | } 54 | 55 | asNode(): DocumentNode { 56 | return { 57 | loc: { 58 | start: this.lineCounter.linePos(this.doc.range[0]), 59 | end: this.lineCounter.linePos(this.doc.range[2]), 60 | }, 61 | }; 62 | } 63 | 64 | getIn(path: ObjPath): (DocumentNode & FindKey) | undefined { 65 | const n = this.doc.getIn(path, true) as ParsedNode | undefined; 66 | if (typeof n === "undefined") { 67 | return undefined; 68 | } else { 69 | return toDocumentNode(n, this.lineCounter); 70 | } 71 | } 72 | 73 | hasIn(path: ObjPath): boolean { 74 | return this.doc.hasIn(path); 75 | } 76 | } 77 | 78 | function toDocumentNode( 79 | node: YAML.ParsedNode, 80 | lineCounter: LineCounter, 81 | ): DocumentNode & FindKey { 82 | return { 83 | loc: { 84 | start: lineCounter.linePos(node.range[0]), 85 | end: lineCounter.linePos(node.range[1]), 86 | }, 87 | findKeyAsMap: findDelegator(node, lineCounter), 88 | }; 89 | } 90 | 91 | function findDelegator(node: YAML.ParsedNode | null, lineCounter: LineCounter) { 92 | return (keyName: string) => { 93 | if (YAML.isMap(node)) { 94 | const key = node?.items.find( 95 | (pair) => pair.key.toString() === keyName, 96 | )?.key; 97 | if (typeof key === "undefined") { 98 | return null; 99 | } 100 | return { 101 | loc: { 102 | start: lineCounter.linePos(key.range[0]), 103 | end: lineCounter.linePos(key.range[1]), 104 | }, 105 | }; 106 | } else { 107 | return null; 108 | } 109 | }; 110 | } 111 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # autoproject 2 | 3 | 自動で Issue を Project に登録する 4 | 5 | config ファイルに基づき,Issue と PR を Project に自動登録します.適切な環境変数のもとで,起動すると Webhooks を受け付け,Project 追加は graphql でリクエストを飛ばすという実装につき,動作には GitHub app 側で相応の設定が必要です. 6 | 7 | ## config 8 | 9 | サンプル:[test/readme_rule.yaml](/test/readme_rule.yaml) 10 | 11 | ルール毎に OR,プロパティ毎に AND,プロパティの値のリストは OR を取ったものとして扱います.各ルールを独立に追加し,ルールにおける絞り込みはプロパティの追加や正規表現で行うことを想定しています. 12 | 13 | スキーマは,typescript 的に次と同じ(型名は実際のものと異なる) 14 | 15 | ```ts 16 | type RulesYaml = (Partial & { project: Project })[]; 17 | 18 | type Project = 19 | | number 20 | | { 21 | not: number | number[]; 22 | } 23 | | Array< 24 | | number 25 | | { 26 | not: number | number[]; 27 | } 28 | > 29 | | { 30 | only: number | number[]; 31 | } 32 | | { 33 | reject: {}; 34 | }; 35 | 36 | type Payload = { 37 | repo: Partial; 38 | issue: Partial; 39 | pr: Partial; 40 | on: Partial; 41 | }; 42 | 43 | type Repo = { 44 | name: string | string[]; 45 | full_name: string | string[]; 46 | description: string | string[]; 47 | fork: boolean; 48 | private: boolean; 49 | topics: string | string[]; 50 | }; 51 | 52 | type Issue = { 53 | assignees: string | string[]; 54 | labels: string | string[]; 55 | }; 56 | 57 | type PullRequest = { 58 | reviewers: string | string[]; 59 | assignees: string | string[]; 60 | labels: string | string[]; 61 | head: { 62 | label: string | string[]; 63 | ref: string | string[]; 64 | }; 65 | }; 66 | 67 | type EventTarget = { 68 | issue: 69 | | "any" 70 | | "opened" 71 | | "assigned" 72 | | "labeled" 73 | | ("opened" | "assigned" | "labeled")[]; 74 | pr: 75 | | "any" 76 | | "opened" 77 | | "assigned" 78 | | "labeled" 79 | | ("opened" | "assigned" | "labeled")[]; 80 | }; 81 | ``` 82 | 83 | ## GitHub App の設定 84 | 85 | ### Webhook URL 86 | 87 | `/api/github/webhooks` 部分は固定です。 88 | 89 | ``` 90 | https://example.com/api/github/webhooks 91 | ``` 92 | 93 | ### Permissions 94 | 95 | - Repository permissions 96 | - Issues: Read-only 97 | - Pull requests: Read-only 98 | - Organization permissions 99 | - Members: Read-only 100 | - Projects: Read and Write 101 | 102 | ### Subscribe to events 103 | 104 | - Issues 105 | - Pull request 106 | 107 | ## Docker を使った実行方法 108 | 109 | 簡単に Docker を使って検証する方法を説明します. 110 | 他のオプションを指定する場合はコードを確認してください. 111 | 112 | ### 必要なリソースの用意 113 | 114 | いくつか環境変数の指定が必要なため,.env ファイルを作成します. 115 | 116 | ```bash 117 | RULES_FILE=/app/rules.yaml 118 | GITHUB_APP_ID=[github_app_id] 119 | PORT=8080 120 | WEBHOOK_SECRET=[github_webhook_secret] 121 | GITHUB_APP_PRIVATE_KEY_FILE=/app/github.key 122 | ``` 123 | 124 | ほか,root ディレクトリに以下のファイルを用意します. 125 | 126 | - rules.yaml 127 | - 上の記法を参考に用意する 128 | - github.key 129 | - Github App を登録してダウンロードする 130 | 131 | ### 実行コマンド 132 | 133 | 例として実行コマンドを示します. 134 | 135 | ```bash 136 | cd ./autoproject 137 | docker build . -t autoproject 138 | docker run --env-file .env -p 8080:8080 autoproject 139 | ``` 140 | 141 | ### 留意すべきポイント 142 | 143 | - 実行する際は Github から Webhook でアクセスできる必要があります 144 | - Github App に登録する際 Webhook 先のパスは `https://[your-host]/api/github/webhooks` としてください 145 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "node:fs"; 2 | import log4js, { getLogger } from "log4js"; 3 | import path from "node:path"; 4 | import YAML, { isMap, isScalar } from "yaml"; 5 | import { emitter } from "./emitter"; 6 | import { 7 | type ObjPath, 8 | type SemErrorPayload, 9 | stringifyObjPath, 10 | type SynErrorPayload, 11 | } from "./types"; 12 | import assert from "node:assert"; 13 | import { 14 | type ConfigFormatKind, 15 | type EitherFormat, 16 | getFormat, 17 | } from "./configFormat/configFormat"; 18 | 19 | export const readAndParse = ( 20 | filepath: string | undefined, 21 | parser: (format: EitherFormat, filepath: string, src: string) => T, 22 | ) => { 23 | if (typeof filepath === "undefined") { 24 | return null; 25 | } 26 | const raw = fs.readFileSync(filepath, "utf-8"); 27 | const ext = path.extname(filepath).substring(1); 28 | let formatKind: ConfigFormatKind; 29 | switch (ext) { 30 | case "json": 31 | formatKind = "json"; 32 | break; 33 | case "yaml": 34 | case "yml": 35 | formatKind = "yaml"; 36 | break; 37 | default: 38 | assert( 39 | false, 40 | "received unknown extension. please use `.json` or `.yaml` or `.yml`.", 41 | ); 42 | } 43 | return parser(getFormat(formatKind), filepath, raw); 44 | }; 45 | 46 | export const renameProp = ( 47 | doc: YAML.Document.Parsed, 48 | path: ObjPath, 49 | oldProp: string, 50 | newProp: string, 51 | ) => { 52 | const node = doc.getIn(path); 53 | assert(isMap(node)); 54 | YAML.visit(node, { 55 | Pair(_, pair) { 56 | if (isScalar(pair.key) && pair.key.value === oldProp) { 57 | pair.key.value = newProp; 58 | } 59 | return YAML.visit.SKIP; 60 | }, 61 | }); 62 | }; 63 | 64 | export const doLogError = () => { 65 | emitter.on("semerror", ({ filepath, diags }: SemErrorPayload) => { 66 | const cat = path.basename(filepath); 67 | getLogger(cat).error( 68 | "Some semantic errors occurred while validating config file. Below are reported:", 69 | ); 70 | for (const diag of diags) { 71 | const _path = stringifyObjPath(diag.objPath); 72 | const header = _path === "" ? "" : _path + ": "; 73 | getLogger(cat).error(`${header}${diag.msg}`); 74 | } 75 | }); 76 | 77 | emitter.on("synerror", ({ filepath, diags }: SynErrorPayload) => { 78 | const cat = path.basename(filepath); 79 | getLogger(cat).error( 80 | "Some syntactic errors occurred while validating config file. Below are reported:", 81 | ); 82 | for (const diag of diags) { 83 | const { range, msg } = diag; 84 | if (range !== null) { 85 | const { line, col } = range.start; 86 | getLogger(cat).error(`At line ${line}, column ${col}: ${msg}`); 87 | } else { 88 | getLogger(cat).error(`${msg} at unknown pos`); 89 | } 90 | } 91 | }); 92 | }; 93 | 94 | export const configLogger = () => { 95 | log4js.configure({ 96 | appenders: { 97 | console: { 98 | type: "console", 99 | layout: { 100 | type: "colored", 101 | }, 102 | }, 103 | stdout: { 104 | type: "stdout", 105 | layout: { 106 | type: "basic", 107 | }, 108 | }, 109 | }, 110 | categories: { 111 | default: { 112 | appenders: ["stdout"], 113 | level: "all", 114 | }, 115 | test: { 116 | appenders: ["console"], 117 | level: "all", 118 | }, 119 | }, 120 | }); 121 | }; 122 | 123 | export const readGithubAppPrivateKey = ( 124 | path: string | undefined, 125 | content: string | undefined, 126 | ): string => { 127 | if (path !== undefined) { 128 | getLogger().info(`read private key file: ${path}`); 129 | return fs.readFileSync(path, "utf-8"); 130 | } 131 | if (content !== undefined) { 132 | getLogger().info("use private key passed by value"); 133 | return content; 134 | } 135 | throw new Error("invalid args. path and content are undefined"); 136 | }; 137 | 138 | declare const _loggerCat: unique symbol; 139 | export type LoggerCat = string & { readonly [_loggerCat]: never }; 140 | 141 | export const getLoggerCat = (filepath: string) => 142 | path.basename(filepath) as LoggerCat; 143 | -------------------------------------------------------------------------------- /src/rdjson/DiagnosticResult.jsonschema.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * This file was automatically generated by json-schema-to-typescript. 4 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 5 | * and run json-schema-to-typescript to regenerate this file. 6 | */ 7 | 8 | /** 9 | * Result of diagnostic tool such as a compiler or a linter. 10 | * It's intended to be used as top-level structured format which represents a 11 | * whole result of a diagnostic tool. 12 | */ 13 | export interface DiagnosticResult { 14 | diagnostics?: { 15 | /** 16 | * The diagnostic's message. 17 | */ 18 | message?: string; 19 | /** 20 | * Location at which this diagnostic message applies. 21 | */ 22 | location?: { 23 | /** 24 | * File path. It could be either absolute path or relative path. 25 | */ 26 | path?: string; 27 | range?: Range; 28 | }; 29 | /** 30 | * This diagnostic's severity. 31 | * Optional. 32 | */ 33 | severity?: string | number; 34 | source?: Source; 35 | /** 36 | * This diagnostic's rule code. 37 | * Optional. 38 | */ 39 | code?: { 40 | /** 41 | * This rule's code/identifier. 42 | */ 43 | value?: string; 44 | /** 45 | * A URL to open with more information about this rule code. 46 | * Optional. 47 | */ 48 | url?: string; 49 | }; 50 | /** 51 | * Suggested fixes to resolve this diagnostic. 52 | * Optional. 53 | */ 54 | suggestions?: { 55 | range?: Range1; 56 | /** 57 | * A suggested text which replace the range. 58 | * For delete operations use an empty string. 59 | */ 60 | text?: string; 61 | }[]; 62 | /** 63 | * Experimental: If this diagnostic is converted from other formats, 64 | * original_output represents the original output which corresponds to this 65 | * diagnostic. 66 | * Optional. 67 | */ 68 | original_output?: string; 69 | }[]; 70 | source?: Source1; 71 | /** 72 | * This diagnostics' overall severity. 73 | * Optional. 74 | */ 75 | severity?: string | number; 76 | } 77 | /** 78 | * Range in the file path. 79 | * Optional. 80 | */ 81 | export interface Range { 82 | start?: Position; 83 | end?: Position1; 84 | } 85 | /** 86 | * Required. 87 | */ 88 | export interface Position { 89 | /** 90 | * Line number, starting at 1. 91 | * Optional. 92 | */ 93 | line?: number; 94 | /** 95 | * Column number, starting at 1 (byte count in UTF-8). 96 | * Example: 'a𐐀b' 97 | * The column of a: 1 98 | * The column of 𐐀: 2 99 | * The column of b: 6 since 𐐀 is represented with 4 bytes in UTF-8. 100 | * Optional. 101 | */ 102 | column?: number; 103 | } 104 | /** 105 | * end can be omitted. Then the range is handled as zero-length (start == end). 106 | * Optional. 107 | */ 108 | export interface Position1 { 109 | /** 110 | * Line number, starting at 1. 111 | * Optional. 112 | */ 113 | line?: number; 114 | /** 115 | * Column number, starting at 1 (byte count in UTF-8). 116 | * Example: 'a𐐀b' 117 | * The column of a: 1 118 | * The column of 𐐀: 2 119 | * The column of b: 6 since 𐐀 is represented with 4 bytes in UTF-8. 120 | * Optional. 121 | */ 122 | column?: number; 123 | } 124 | /** 125 | * The source of this diagnostic, e.g. 'typescript' or 'super lint'. 126 | * Optional. 127 | */ 128 | export interface Source { 129 | /** 130 | * A human-readable string describing the source of diagnostics, e.g. 131 | * 'typescript' or 'super lint'. 132 | */ 133 | name?: string; 134 | /** 135 | * URL to this source. 136 | * Optional. 137 | */ 138 | url?: string; 139 | } 140 | /** 141 | * Range at which this suggestion applies. 142 | * To insert text into a document create a range where start == end. 143 | */ 144 | export interface Range1 { 145 | start?: Position; 146 | end?: Position1; 147 | } 148 | /** 149 | * The source of diagnostics, e.g. 'typescript' or 'super lint'. 150 | * Optional. 151 | */ 152 | export interface Source1 { 153 | /** 154 | * A human-readable string describing the source of diagnostics, e.g. 155 | * 'typescript' or 'super lint'. 156 | */ 157 | name?: string; 158 | /** 159 | * URL to this source. 160 | * Optional. 161 | */ 162 | url?: string; 163 | } 164 | -------------------------------------------------------------------------------- /src/configFormat/json.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | // Generated automatically by nearley, version 2.20.1 3 | // http://github.com/Hardmath123/nearley 4 | (function () { 5 | function id(x) { return x[0]; } 6 | 7 | const moo = require('moo'); 8 | 9 | let lexer = moo.compile({ 10 | space: {match: /\s+/, lineBreaks: true}, 11 | number: /-?(?:[0-9]|[1-9][0-9]+)(?:\.[0-9]+)?(?:[eE][-+]?[0-9]+)?\b/, 12 | string: /"(?:\\["bfnrt\/\\]|\\u[a-fA-F0-9]{4}|[^"\\])*"/, 13 | "{": "{", 14 | "}": "}", 15 | "[": "[", 16 | "]": "]", 17 | ",": ",", 18 | ":": ":", 19 | true: "true", 20 | false: "false", 21 | null: "null", 22 | }) 23 | 24 | function empty(type) { 25 | return function ([open,,close]) { 26 | return { 27 | type, 28 | children: [], 29 | loc: { start: pos(open), end: pos(close, 1) } 30 | }; 31 | }; 32 | } 33 | 34 | function children(type) { 35 | return function ([open,,first,rest,,,close]) { 36 | return { 37 | type, 38 | children: [ 39 | first, 40 | ...rest.map(([,,,property]) => property) 41 | ], 42 | loc: { start: pos(open), end: pos(close, 1) } 43 | }; 44 | }; 45 | } 46 | 47 | function literal() { 48 | return function ([token]) { 49 | return { 50 | type: "Literal", 51 | value: JSON.parse(token.value), 52 | raw: token.text, 53 | loc: { 54 | start: pos(token), 55 | end: pos(token, token.text.length) 56 | } 57 | }; 58 | }; 59 | } 60 | 61 | function property([key,,,,value]) { 62 | return { 63 | type: "Property", 64 | key, 65 | value, 66 | loc: { 67 | start: key.loc.start, 68 | end: value.loc.end 69 | } 70 | }; 71 | } 72 | 73 | function pos({ line, col, offset }, add = 0) { 74 | return { 75 | line, 76 | col: col + add, 77 | offset: offset + add 78 | }; 79 | } 80 | var grammar = { 81 | Lexer: lexer, 82 | ParserRules: [ 83 | {"name": "json", "symbols": ["_", "value", "_"], "postprocess": ([,val,]) => val}, 84 | {"name": "object", "symbols": [{"literal":"{"}, "_", {"literal":"}"}], "postprocess": empty("Object")}, 85 | {"name": "object$ebnf$1", "symbols": []}, 86 | {"name": "object$ebnf$1$subexpression$1", "symbols": ["_", {"literal":","}, "_", "property"]}, 87 | {"name": "object$ebnf$1", "symbols": ["object$ebnf$1", "object$ebnf$1$subexpression$1"], "postprocess": function arrpush(d) {return d[0].concat([d[1]]);}}, 88 | {"name": "object$ebnf$2$subexpression$1", "symbols": [{"literal":","}, "_"]}, 89 | {"name": "object$ebnf$2", "symbols": ["object$ebnf$2$subexpression$1"], "postprocess": id}, 90 | {"name": "object$ebnf$2", "symbols": [], "postprocess": function(d) {return null;}}, 91 | {"name": "object", "symbols": [{"literal":"{"}, "_", "property", "object$ebnf$1", "_", "object$ebnf$2", {"literal":"}"}], "postprocess": children("Object")}, 92 | {"name": "array", "symbols": [{"literal":"["}, "_", {"literal":"]"}], "postprocess": empty("Array")}, 93 | {"name": "array$ebnf$1", "symbols": []}, 94 | {"name": "array$ebnf$1$subexpression$1", "symbols": ["_", {"literal":","}, "_", "value"]}, 95 | {"name": "array$ebnf$1", "symbols": ["array$ebnf$1", "array$ebnf$1$subexpression$1"], "postprocess": function arrpush(d) {return d[0].concat([d[1]]);}}, 96 | {"name": "array$ebnf$2$subexpression$1", "symbols": [{"literal":","}, "_"]}, 97 | {"name": "array$ebnf$2", "symbols": ["array$ebnf$2$subexpression$1"], "postprocess": id}, 98 | {"name": "array$ebnf$2", "symbols": [], "postprocess": function(d) {return null;}}, 99 | {"name": "array", "symbols": [{"literal":"["}, "_", "value", "array$ebnf$1", "_", "array$ebnf$2", {"literal":"]"}], "postprocess": children("Array")}, 100 | {"name": "value", "symbols": ["object"], "postprocess": id}, 101 | {"name": "value", "symbols": ["array"], "postprocess": id}, 102 | {"name": "value", "symbols": [(lexer.has("true") ? {type: "true"} : true)], "postprocess": literal()}, 103 | {"name": "value", "symbols": [(lexer.has("false") ? {type: "false"} : false)], "postprocess": literal()}, 104 | {"name": "value", "symbols": [(lexer.has("null") ? {type: "null"} : null)], "postprocess": literal()}, 105 | {"name": "value", "symbols": [(lexer.has("number") ? {type: "number"} : number)], "postprocess": literal()}, 106 | {"name": "value", "symbols": [(lexer.has("string") ? {type: "string"} : string)], "postprocess": literal()}, 107 | {"name": "property", "symbols": ["key", "_", {"literal":":"}, "_", "value"], "postprocess": property}, 108 | {"name": "key", "symbols": [(lexer.has("string") ? {type: "string"} : string)], "postprocess": literal("Identifier")}, 109 | {"name": "_", "symbols": []}, 110 | {"name": "_", "symbols": [(lexer.has("space") ? {type: "space"} : space)], "postprocess": d => null} 111 | ] 112 | , ParserStart: "json" 113 | } 114 | if (typeof module !== 'undefined'&& typeof module.exports !== 'undefined') { 115 | module.exports = grammar; 116 | } else { 117 | window.grammar = grammar; 118 | } 119 | })(); 120 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { type GetTeamMemberProp } from "./github"; 2 | import { 3 | type Team, 4 | type IssuesEvent, 5 | type PullRequestEvent, 6 | } from "@octokit/webhooks-types"; 7 | 8 | export interface Repo { 9 | name: string; 10 | full_name: string; 11 | private: boolean; 12 | description: string | null; 13 | fork: boolean; 14 | topics: string[]; 15 | } 16 | export interface User { 17 | login: string; 18 | } 19 | export interface Label { 20 | name: string; 21 | } 22 | export interface Issue { 23 | labels?: Label[]; 24 | assignees: User[]; 25 | } 26 | export interface PullRequestHead { 27 | label: string; 28 | ref: string; 29 | } 30 | export interface PullRequest { 31 | labels?: Label[]; 32 | requested_reviewers: Array; 33 | assignees: User[]; 34 | head: PullRequestHead; 35 | } 36 | 37 | export interface MatchArg { 38 | repository: Repo; 39 | sender: User; 40 | issue: (Issue & GetTeamMemberProp) | null; 41 | pr: (PullRequest & GetTeamMemberProp) | null; 42 | } 43 | 44 | export enum TargetProjKind { 45 | Number, 46 | Only, 47 | Reject, 48 | } 49 | 50 | export interface TargetProjNumber { 51 | isPositive: boolean; 52 | value: number[]; 53 | priority: number; 54 | } 55 | 56 | export interface TargetProjKindNumber { 57 | kind: TargetProjKind.Number; 58 | projectNumber: TargetProjNumber[]; 59 | } 60 | 61 | export interface TargetProjOnly { 62 | kind: TargetProjKind.Only; 63 | projectNumber: number[]; 64 | priority: number; 65 | } 66 | 67 | export interface TargetProjReject { 68 | kind: TargetProjKind.Reject; 69 | priority: number; 70 | } 71 | 72 | export type TargetProj = 73 | | TargetProjKindNumber 74 | | TargetProjOnly 75 | | TargetProjReject; 76 | 77 | export enum WebhookEventKind { 78 | Issue, 79 | PullRequest, 80 | } 81 | 82 | export type WebhookEvent = 83 | | { 84 | kind: WebhookEventKind.Issue; 85 | action: A1; 86 | } 87 | | { 88 | kind: WebhookEventKind.PullRequest; 89 | action: A2; 90 | }; 91 | 92 | export type WebhookEventAction = WebhookEvent< 93 | IssuesEvent["action"], 94 | PullRequestEvent["action"] 95 | >; 96 | 97 | export type EventTarget = 98 | | { 99 | kind: "any"; 100 | } 101 | | { 102 | kind: "oneof"; 103 | list: E[]; 104 | }; 105 | 106 | export type IssueActionTarget = { 107 | issueAction: EventTarget; 108 | }; 109 | 110 | export type PrActionTarget = { 111 | prAction: EventTarget; 112 | }; 113 | 114 | export type WebhookEventActionTarget = 115 | | ({ 116 | kind: "both"; 117 | } & IssueActionTarget & 118 | PrActionTarget) 119 | | ({ 120 | kind: WebhookEventKind.Issue; 121 | } & IssueActionTarget) 122 | | ({ 123 | kind: WebhookEventKind.PullRequest; 124 | } & PrActionTarget); 125 | 126 | export const isTargetWebhookEvent = ( 127 | et: WebhookEventActionTarget, 128 | event: WebhookEventAction, 129 | ) => { 130 | switch (event.kind) { 131 | case WebhookEventKind.Issue: 132 | return ( 133 | (et.kind === "both" || et.kind === WebhookEventKind.Issue) && 134 | isTargetEvent(et.issueAction, event.action) 135 | ); 136 | case WebhookEventKind.PullRequest: 137 | return ( 138 | (et.kind === "both" || et.kind === WebhookEventKind.PullRequest) && 139 | isTargetEvent(et.prAction, event.action) 140 | ); 141 | } 142 | }; 143 | 144 | export const isTargetEvent = (et: EventTarget, event: E) => { 145 | switch (et.kind) { 146 | case "any": 147 | return true; 148 | default: 149 | return et.list.includes(event); 150 | } 151 | }; 152 | 153 | export type ObjPath = Array; 154 | export const stringifyObjPath = (objPath: ObjPath) => 155 | objPath 156 | .map((p, index) => { 157 | if (index === 0) { 158 | return p.toString(); 159 | } 160 | if (typeof p === "string") { 161 | return `.${p}`; 162 | } else { 163 | return `[${p}]`; 164 | } 165 | }) 166 | .join(""); 167 | 168 | export type Pos = { 169 | line: number; 170 | col: number; 171 | }; 172 | 173 | export type Range = { 174 | start: Pos; 175 | end?: Pos; 176 | }; 177 | 178 | export type Range2 = { 179 | start: Pos; 180 | end: Pos; 181 | }; 182 | 183 | export type Diag = { 184 | range: Range | null; 185 | msg: string; 186 | }; 187 | 188 | export const nullPos: Pos = { 189 | line: 1, 190 | col: 1, 191 | }; 192 | 193 | export type SynDiag = Diag; 194 | 195 | export enum SemDiagKind { 196 | UnrecognizedKeys = "unrecognized_keys", 197 | Any = "any", 198 | } 199 | 200 | export type SemDiag = Diag & { 201 | objPath: ObjPath; 202 | diagKind: 203 | | { 204 | diagName: SemDiagKind.Any; 205 | } 206 | | { 207 | diagName: SemDiagKind.UnrecognizedKeys; 208 | key: { 209 | value: string; 210 | range: Range2; 211 | }; 212 | candidates: string[]; 213 | }; 214 | }; 215 | 216 | export type SynErrorPayload = { 217 | filepath: string; 218 | diags: SynDiag[]; 219 | }; 220 | 221 | export type SemErrorPayload = { 222 | filepath: string; 223 | diags: SemDiag[]; 224 | }; 225 | -------------------------------------------------------------------------------- /src/github.ts: -------------------------------------------------------------------------------- 1 | import { type Octokit } from "@octokit/core"; 2 | import assert from "node:assert"; 3 | import { type User as Login } from "./types.js"; 4 | import { type GraphqlResponseError } from "@octokit/graphql"; 5 | 6 | enum ProjectV2OwnerType { 7 | Organization = "org", 8 | User = "user", 9 | } 10 | 11 | const projectV2OwnerMap = new Map(); 12 | const orgMap = new Map(); 13 | const userMap = new Map(); 14 | 15 | // ad-hoc 16 | type Organization = { 17 | projectV2: { 18 | id: string; 19 | }; 20 | }; 21 | type User = Organization; 22 | 23 | async function fetchProjectIdCached( 24 | octokit: Octokit, 25 | login: string, 26 | projectNumber: number, 27 | ): Promise<{ 28 | projectId: string; 29 | }> { 30 | type OrgResponse = { organization: Organization }; 31 | type UserResponse = { user: User }; 32 | switch (projectV2OwnerMap.get(login)) { 33 | case ProjectV2OwnerType.Organization: { 34 | const id = orgMap.get(projectNumber); 35 | if (typeof id !== "undefined") { 36 | return { 37 | projectId: id, 38 | }; 39 | } 40 | const { 41 | organization: { projectV2 }, 42 | } = await octokit.graphql( 43 | `query($login: String!, $projectNumber: Int!) { 44 | organization(login: $login) { 45 | projectV2(number: $projectNumber) { 46 | id 47 | } 48 | } 49 | }`, 50 | { 51 | login, 52 | projectNumber, 53 | }, 54 | ); 55 | const projectId = projectV2.id; 56 | orgMap.set(projectNumber, projectId); 57 | return { 58 | projectId, 59 | }; 60 | } 61 | case ProjectV2OwnerType.User: { 62 | const id = userMap.get(projectNumber); 63 | if (typeof id !== "undefined") { 64 | return { 65 | projectId: id, 66 | }; 67 | } 68 | const { 69 | user: { projectV2 }, 70 | } = await octokit.graphql( 71 | `query($login: String!, $projectNumber: Int!) { 72 | user(login: $login) { 73 | projectV2(number: $projectNumber) { 74 | id 75 | } 76 | } 77 | }`, 78 | { 79 | login, 80 | projectNumber, 81 | }, 82 | ); 83 | const projectId = projectV2.id; 84 | userMap.set(projectNumber, projectId); 85 | return { 86 | projectId, 87 | }; 88 | } 89 | default: { 90 | let r; 91 | let err = null; 92 | try { 93 | r = await octokit.graphql>( 94 | `query($login: String!, $projectNumber: Int!) { 95 | organization(login: $login) { 96 | projectV2(number: $projectNumber) { 97 | id 98 | } 99 | } 100 | user(login: $login) { 101 | projectV2(number: $projectNumber) { 102 | id 103 | } 104 | } 105 | }`, 106 | { 107 | login, 108 | projectNumber, 109 | }, 110 | ); 111 | } catch (error) { 112 | err = error; 113 | r = (error as GraphqlResponseError>) 114 | .data; 115 | } 116 | if (typeof r.organization?.projectV2?.id !== "undefined") { 117 | const projectId = r.organization.projectV2.id; 118 | projectV2OwnerMap.set(login, ProjectV2OwnerType.Organization); 119 | orgMap.set(projectNumber, projectId); 120 | return { 121 | projectId, 122 | }; 123 | } 124 | if (typeof r.user?.projectV2?.id !== "undefined") { 125 | const projectId = r.user.projectV2.id; 126 | projectV2OwnerMap.set(login, ProjectV2OwnerType.User); 127 | userMap.set(projectNumber, projectId); 128 | return { 129 | projectId, 130 | }; 131 | } 132 | console.error(err); 133 | assert(false, `${login}/${projectNumber} doesn't exist`); 134 | } 135 | } 136 | } 137 | 138 | export async function addIssueToProject( 139 | octokit: Octokit, 140 | issueId: string, 141 | login: string, 142 | projectNumber: number, 143 | ) { 144 | const { projectId } = await fetchProjectIdCached( 145 | octokit, 146 | login, 147 | projectNumber, 148 | ); 149 | await octokit.graphql( 150 | `mutation($projectId: ID!, $contentId: ID!) { 151 | addProjectV2ItemById(input: { projectId: $projectId, contentId: $contentId }) { 152 | item { 153 | id 154 | } 155 | } 156 | }`, 157 | { 158 | projectId, 159 | contentId: issueId, 160 | }, 161 | ); 162 | } 163 | 164 | export interface GetTeamMemberProp { 165 | octokit: Octokit; 166 | } 167 | 168 | export async function getAllTeamMember( 169 | { octokit }: GetTeamMemberProp, 170 | organization: string, 171 | teamSlug: string, 172 | ): Promise { 173 | const resp = await octokit.request( 174 | "GET /orgs/{org}/teams/{team_slug}/members", 175 | { 176 | org: organization, 177 | team_slug: teamSlug, 178 | }, 179 | ); 180 | return resp.data ?? []; 181 | } 182 | -------------------------------------------------------------------------------- /src/check.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "node:fs"; 2 | import { ErrorKind, processRules } from "./config"; 3 | import { 4 | type SemErrorPayload, 5 | type SynErrorPayload, 6 | type SemDiag, 7 | type SynDiag, 8 | stringifyObjPath, 9 | SemDiagKind, 10 | type Range as MyRange, 11 | nullPos, 12 | } from "./types"; 13 | import { readAndParse, renameProp } from "./util"; 14 | import assert from "node:assert"; 15 | import { emitter } from "./emitter"; 16 | import * as dotenv from "dotenv"; 17 | import yargs from "yargs/yargs"; 18 | import { hideBin } from "yargs/helpers"; 19 | import { 20 | type DiagnosticResult, 21 | type Range, 22 | } from "./rdjson/DiagnosticResult.jsonschema"; 23 | 24 | const { 25 | input: inputPath, 26 | rdjson: outputRdjson, 27 | fix: doAutoFix, 28 | } = yargs(hideBin(process.argv)) 29 | .option("input", { 30 | alias: "i", 31 | type: "string", 32 | description: "Path to target file", 33 | }) 34 | .option("rdjson", { 35 | type: "boolean", 36 | description: "Output diagnosis as Reviewdog Diagnostic Format (rdjson)", 37 | }) 38 | .option("fix", { 39 | type: "boolean", 40 | description: "Fix problems if possible", 41 | }) 42 | .parseSync(); 43 | 44 | dotenv.config(); 45 | 46 | const input = inputPath ?? process.env.RULES_FILE; 47 | assert(input, "--input or process.env.RULES_FILE is required"); 48 | 49 | const printSemDiag = ( 50 | filepath: string, 51 | { range, msg, objPath, diagKind }: SemDiag, 52 | ) => { 53 | let semPos; 54 | if (objPath.length === 0) { 55 | semPos = ""; 56 | } else { 57 | semPos = `At \`${stringifyObjPath(objPath)}\`: `; 58 | } 59 | switch (diagKind.diagName) { 60 | case SemDiagKind.UnrecognizedKeys: { 61 | const { line, col } = diagKind.key.range.start; 62 | console.log(`${filepath}:${line}:${col}: ${semPos}${msg}`); 63 | break; 64 | } 65 | default: { 66 | const { line, col } = range?.start ?? nullPos; 67 | console.log(`${filepath}:${line}:${col}: ${semPos}${msg}`); 68 | break; 69 | } 70 | } 71 | }; 72 | 73 | const printSynDiag = (filepath: string, { range, msg }: SynDiag) => { 74 | const { line, col } = range?.start ?? nullPos; 75 | console.log(`${filepath}:${line}:${col}: ${msg}`); 76 | }; 77 | 78 | if (outputRdjson !== true) { 79 | emitter.on("semerror", ({ filepath, diags }: SemErrorPayload) => { 80 | for (const diag of diags) { 81 | printSemDiag(filepath, diag); 82 | } 83 | }); 84 | 85 | emitter.on("synerror", ({ filepath, diags }: SynErrorPayload) => { 86 | for (const diag of diags) { 87 | printSynDiag(filepath, diag); 88 | } 89 | }); 90 | } 91 | 92 | const { docResult, error } = readAndParse(input, processRules)!; 93 | if (outputRdjson === true) { 94 | const rdjson: DiagnosticResult = { 95 | source: { 96 | name: "autopjlint", 97 | }, 98 | diagnostics: [], 99 | }; 100 | for (const e of error) { 101 | switch (e.type) { 102 | case ErrorKind.Syn: { 103 | rdjson.diagnostics!.push({ 104 | message: e.diag.msg, 105 | location: { 106 | path: input, 107 | range: convertRange(e.diag.range), 108 | }, 109 | }); 110 | break; 111 | } 112 | case ErrorKind.Sem: { 113 | const suggestions: Array<{ range: Range | undefined; text: string }> = 114 | []; 115 | if (e.diag.diagKind.diagName === SemDiagKind.UnrecognizedKeys) { 116 | const { candidates } = e.diag.diagKind; 117 | if (candidates.length !== 0) { 118 | suggestions.push({ 119 | range: convertRange(e.diag.diagKind.key.range), 120 | text: candidates[0], 121 | }); 122 | } 123 | } 124 | rdjson.diagnostics!.push({ 125 | message: e.diag.msg, 126 | location: { 127 | path: input, 128 | range: convertRange(e.diag.range), 129 | }, 130 | suggestions: suggestions.length !== 0 ? suggestions : undefined, 131 | }); 132 | break; 133 | } 134 | default: 135 | break; 136 | } 137 | } 138 | console.log(JSON.stringify(rdjson)); 139 | } 140 | // failed to parse 141 | if (!docResult.is_ok) { 142 | if (doAutoFix === true && docResult.docRaw.kind === "yaml") { 143 | let modified = false; 144 | for (const e of error) { 145 | if ( 146 | e.type === ErrorKind.Sem && 147 | e.diag.diagKind.diagName === SemDiagKind.UnrecognizedKeys 148 | ) { 149 | const { key, candidates } = e.diag.diagKind; 150 | if (candidates.length !== 0) { 151 | renameProp( 152 | docResult.docRaw.value, 153 | e.diag.objPath, 154 | key.value, 155 | candidates[0], 156 | ); 157 | modified = true; 158 | } 159 | } 160 | } 161 | if (modified) { 162 | fs.writeFileSync(input, docResult.docRaw.value.toString(), "utf-8"); 163 | } 164 | } 165 | } 166 | 167 | function convertRange(r: MyRange | null): Range | undefined { 168 | if (r === null) { 169 | return undefined; 170 | } 171 | return { 172 | start: { 173 | line: r.start.line, 174 | column: r.start.col, 175 | }, 176 | end: 177 | r.end !== undefined 178 | ? { 179 | line: r.end.line, 180 | column: r.end.col, 181 | } 182 | : undefined, 183 | }; 184 | } 185 | -------------------------------------------------------------------------------- /src/configFormat/jsonFormat.ts: -------------------------------------------------------------------------------- 1 | import nearley from "nearley"; 2 | import grammar from "./json.js"; 3 | import { 4 | type Document, 5 | type DocumentNode, 6 | type FindKey, 7 | type Format, 8 | type ParseResult, 9 | } from "./configFormat"; 10 | import { type ObjPath } from "../types"; 11 | 12 | const parserGrammar = nearley.Grammar.fromCompiled( 13 | grammar as nearley.CompiledRules, 14 | ); 15 | 16 | export type JsonTagged = { 17 | kind: "json"; 18 | value: T; 19 | }; 20 | 21 | function wrap(value: T): JsonTagged { 22 | return { 23 | kind: "json", 24 | value, 25 | }; 26 | } 27 | 28 | type ParseResultWrapped = ParseResult, JsonTagged>; 29 | 30 | export class JsonFormat implements Format { 31 | parse(s: string): ParseResultWrapped { 32 | try { 33 | const parser = new nearley.Parser(parserGrammar); 34 | parser.feed(s); 35 | const [value]: Value[] = parser.finish(); 36 | return { 37 | is_ok: true, 38 | value: wrap(new JsonDocument(value)), 39 | }; 40 | } catch (e) { 41 | // nearley + moo discards linecol of erroneous token while throwing error 42 | // recover it if possible 43 | const lc = recoverLineCol(e); 44 | return { 45 | is_ok: false, 46 | error: wrap(lc), 47 | }; 48 | } 49 | } 50 | } 51 | 52 | export type LineCol = { 53 | line: number; 54 | col: number; 55 | }; 56 | 57 | const reLc = /at line (\d) col (\d)/; 58 | 59 | function recoverLineCol(e: any): LineCol | null { 60 | if (e instanceof Error) { 61 | const result = reLc.exec(e.message); 62 | if (result !== null) { 63 | return { 64 | line: Number(result[1]), 65 | col: Number(result[2]), 66 | }; 67 | } 68 | } 69 | return null; 70 | } 71 | 72 | export class JsonDocument implements Document { 73 | constructor(private readonly value: Value) {} 74 | toJS() { 75 | return valueToJS(this.value); 76 | } 77 | 78 | asNode(): DocumentNode { 79 | return { 80 | loc: this.value.loc, 81 | }; 82 | } 83 | 84 | getIn(path: ObjPath): (DocumentNode & FindKey) | undefined { 85 | let node: Value = this.value; 86 | for (const elem of path) { 87 | switch (node.type) { 88 | case "Object": { 89 | const pair = node.children.find((pair) => pair.key.value === elem); 90 | if (typeof pair === "undefined") { 91 | return undefined; 92 | } 93 | node = pair.value; 94 | break; 95 | } 96 | case "Array": { 97 | if (typeof elem !== "number") { 98 | return undefined; 99 | } 100 | node = node.children[elem]; 101 | break; 102 | } 103 | case "Literal": { 104 | return undefined; 105 | } 106 | } 107 | } 108 | return toDocumentNode(node); 109 | } 110 | 111 | hasIn(path: ObjPath): boolean { 112 | let node: Value = this.value; 113 | for (const elem of path) { 114 | switch (node.type) { 115 | case "Object": { 116 | const pair = node.children.find((pair) => pair.key.value === elem); 117 | if (typeof pair === "undefined") { 118 | return false; 119 | } 120 | node = pair.value; 121 | break; 122 | } 123 | case "Array": { 124 | if (typeof elem !== "number") { 125 | return false; 126 | } 127 | node = node.children[elem]; 128 | break; 129 | } 130 | case "Literal": { 131 | return false; 132 | } 133 | } 134 | } 135 | return true; 136 | } 137 | } 138 | 139 | interface Pos { 140 | line: number; 141 | col: number; 142 | offset: number; 143 | } 144 | 145 | interface Loc { 146 | start: Pos; 147 | end: Pos; 148 | } 149 | 150 | interface NodeBase { 151 | type: string; 152 | loc: Loc; 153 | } 154 | 155 | interface PrimitiveLiteral extends NodeBase { 156 | type: "Literal"; 157 | value: any; 158 | raw: string; 159 | } 160 | 161 | interface Identifier extends NodeBase { 162 | type: "Identifier"; 163 | value: any; 164 | raw: string; 165 | } 166 | 167 | interface ObjectNode extends NodeBase { 168 | type: "Object"; 169 | children: Property[]; 170 | } 171 | 172 | interface ArrayNode extends NodeBase { 173 | type: "Array"; 174 | children: Value[]; 175 | } 176 | 177 | type Value = PrimitiveLiteral | ObjectNode | ArrayNode; 178 | 179 | interface Property extends NodeBase { 180 | type: "Property"; 181 | key: Identifier; 182 | value: Value; 183 | } 184 | 185 | function valueToJS(value: Value): any { 186 | switch (value.type) { 187 | case "Literal": { 188 | return value.value; 189 | } 190 | case "Object": { 191 | const result: any = {}; 192 | for (const prop of value.children) { 193 | result[prop.key.value] = valueToJS(prop.value); 194 | } 195 | return result; 196 | } 197 | case "Array": { 198 | const result = []; 199 | for (const elem of value.children) { 200 | result.push(valueToJS(elem)); 201 | } 202 | return result; 203 | } 204 | } 205 | } 206 | 207 | function toDocumentNode(node: Value): DocumentNode & FindKey { 208 | return { 209 | loc: node.loc, 210 | findKeyAsMap: findDelegator(node), 211 | }; 212 | } 213 | 214 | function findDelegator(node: Value | null) { 215 | return (keyName: string): DocumentNode | null => { 216 | if (node?.type === "Object") { 217 | const key = node?.children.find( 218 | (pair) => pair.key.value.toString() === keyName, 219 | )?.key; 220 | if (typeof key === "undefined") { 221 | return null; 222 | } 223 | return { 224 | loc: key.loc, 225 | }; 226 | } else { 227 | return null; 228 | } 229 | }; 230 | } 231 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import "./sentry.server"; 2 | import * as http from "node:http"; 3 | import { App, createNodeMiddleware } from "@octokit/app"; 4 | import { type Octokit } from "@octokit/core"; 5 | import { 6 | type IssuesEvent, 7 | type PullRequestEvent, 8 | } from "@octokit/webhooks-types"; 9 | import { z } from "zod"; 10 | import log4js from "log4js"; 11 | import { processRulesV0, processRules, getMatchedProj } from "./config"; 12 | import { 13 | configLogger, 14 | doLogError, 15 | readAndParse, 16 | readGithubAppPrivateKey, 17 | } from "./util"; 18 | import { addIssueToProject } from "./github"; 19 | import { 20 | type MatchArg, 21 | type WebhookEventAction, 22 | WebhookEventKind, 23 | } from "./types"; 24 | 25 | configLogger(); 26 | doLogError(); 27 | 28 | const defaultPort = 8124; 29 | 30 | const logger = log4js.getLogger("general"); 31 | logger.info("process started."); 32 | 33 | const envSchema = z.object({ 34 | GITHUB_APP_ID: z.string({ required_error: "GITHUB_APP_ID is required" }), 35 | GITHUB_APP_PRIVATE_KEY_FILE: z.string().optional(), 36 | GITHUB_APP_PRIVATE_KEY: z.string().optional(), 37 | WEBHOOK_SECRET: z.string({ required_error: "WEBHOOK_SECRET is required" }), 38 | RULES_FILE: z.string({ required_error: "RULES_FILE is required" }).optional(), 39 | // legacy, for compatibility 40 | CONFIG_FILE: z.string().optional(), 41 | // legacy 42 | RULES_FILE_V0: z.string().optional(), 43 | // compatibility with old config format 44 | LABEL_CLASSIFICATION_FILE: z.string().optional(), 45 | // the port that http server listen on 46 | PORT: z.string().optional(), 47 | }); 48 | 49 | const envParseResult = envSchema.safeParse(process.env); 50 | if (!envParseResult.success) { 51 | logger.error("environment variable validation failed."); 52 | for (const issue of envParseResult.error.issues) { 53 | logger.error(issue.message); 54 | } 55 | process.exit(1); 56 | } 57 | const envInput = { 58 | GITHUB_APP_ID: envParseResult.data.GITHUB_APP_ID, 59 | GITHUB_APP_PRIVATE_KEY_FILE: envParseResult.data.GITHUB_APP_PRIVATE_KEY_FILE, 60 | GITHUB_APP_PRIVATE_KEY: envParseResult.data.GITHUB_APP_PRIVATE_KEY, 61 | WEBHOOK_SECRET: envParseResult.data.WEBHOOK_SECRET, 62 | RULES_FILE: envParseResult.data.RULES_FILE, 63 | RULES_FILE_V0: 64 | envParseResult.data.CONFIG_FILE ?? envParseResult.data.RULES_FILE_V0, 65 | LABEL_CLASSIFICATION_FILE: envParseResult.data.LABEL_CLASSIFICATION_FILE, 66 | PORT: envParseResult.data.PORT ?? defaultPort, 67 | }; 68 | if ( 69 | typeof envInput.RULES_FILE === "undefined" && 70 | typeof envInput.RULES_FILE_V0 === "undefined" 71 | ) { 72 | logger.error("environment variable validation failed."); 73 | logger.error("either RULES_FILE or CONFIG_FILE must be specified."); 74 | process.exit(1); 75 | } 76 | if ( 77 | typeof envInput.GITHUB_APP_PRIVATE_KEY_FILE === "undefined" && 78 | typeof envInput.GITHUB_APP_PRIVATE_KEY === "undefined" 79 | ) { 80 | logger.error("environment variable validation failed."); 81 | logger.error( 82 | "either GITHUB_APP_PRIVATE_KEY_FILE or GITHUB_APP_PRIVATE_KEY must be specified.", 83 | ); 84 | process.exit(1); 85 | } 86 | 87 | const privateKey = readGithubAppPrivateKey( 88 | envInput.GITHUB_APP_PRIVATE_KEY_FILE, 89 | envInput.GITHUB_APP_PRIVATE_KEY, 90 | ); 91 | 92 | const rules = readAndParse(envInput.RULES_FILE, processRules)?.content ?? []; 93 | const config = readAndParse(envInput.RULES_FILE_V0, processRulesV0); 94 | const labelClass = 95 | typeof envInput.LABEL_CLASSIFICATION_FILE !== "undefined" 96 | ? readAndParse(envInput.LABEL_CLASSIFICATION_FILE, processRulesV0) 97 | : null; 98 | 99 | logger.info("finish parsing and validating files."); 100 | 101 | const app = new App({ 102 | appId: envInput.GITHUB_APP_ID, 103 | privateKey, 104 | oauth: { 105 | clientId: "PLACEHOLDER", 106 | clientSecret: "PLACEHOLDER", 107 | }, 108 | webhooks: { 109 | secret: envInput.WEBHOOK_SECRET, 110 | }, 111 | }); 112 | 113 | async function determineDestination( 114 | webhookEvent: WebhookEventAction, 115 | arg: MatchArg, 116 | ): Promise> { 117 | const projectNumberSet = new Set(); 118 | 119 | const projectNumbers = config?.content?.get(arg.repository.full_name); 120 | if (typeof projectNumbers !== "undefined") { 121 | for (const projectNumber of projectNumbers) { 122 | projectNumberSet.add(projectNumber); 123 | } 124 | } 125 | 126 | const projectNumbers2 = await getMatchedProj(rules, webhookEvent, arg); 127 | for (const projectNumber of projectNumbers2) { 128 | projectNumberSet.add(projectNumber); 129 | } 130 | 131 | // compatibility with old config format 132 | if (labelClass !== null) { 133 | const labels = arg.issue?.labels ?? []; 134 | for (const label of labels) { 135 | const projectNumbers3 = labelClass.content?.get(label.name); 136 | if (typeof projectNumbers3 !== "undefined") { 137 | for (const projectNumber of projectNumbers3) { 138 | projectNumberSet.add(projectNumber); 139 | } 140 | } 141 | } 142 | } 143 | // end of compatibility 144 | 145 | return projectNumberSet; 146 | } 147 | 148 | type IssuesEventPayload = WithOctokit; 149 | 150 | const issuesEventPayloadToArg = ({ octokit, payload }: IssuesEventPayload) => { 151 | const arg: MatchArg = { 152 | repository: payload.repository, 153 | sender: payload.sender, 154 | issue: { ...payload.issue, octokit }, 155 | pr: null, 156 | }; 157 | const r: Extracted = { 158 | arg, 159 | webhookEvent: { 160 | kind: WebhookEventKind.Issue, 161 | action: payload.action, 162 | }, 163 | nodeId: payload.issue.node_id, 164 | login: payload.repository.owner.login, 165 | }; 166 | return r; 167 | }; 168 | 169 | type PullRequestEventPayload = WithOctokit; 170 | 171 | const pullRequestEventPayloadToArg = ({ 172 | octokit, 173 | payload, 174 | }: PullRequestEventPayload) => { 175 | const arg: MatchArg = { 176 | repository: payload.repository, 177 | sender: payload.sender, 178 | issue: null, 179 | pr: { ...payload.pull_request, octokit }, 180 | }; 181 | const r: Extracted = { 182 | arg, 183 | webhookEvent: { 184 | kind: WebhookEventKind.PullRequest, 185 | action: payload.action, 186 | }, 187 | nodeId: payload.pull_request.node_id, 188 | login: payload.repository.owner.login, 189 | }; 190 | return r; 191 | }; 192 | 193 | type WithOctokit = { 194 | octokit: Octokit; 195 | payload: X; 196 | }; 197 | 198 | type Extracted = { 199 | arg: MatchArg; 200 | webhookEvent: WebhookEventAction; 201 | nodeId: string; 202 | login: string; 203 | }; 204 | 205 | const handleEvent = 206 | (transformer: (_: WithOctokit) => Extracted) => 207 | async (w: WithOctokit) => { 208 | const { arg, nodeId, login, webhookEvent } = transformer(w); 209 | const destinations = await determineDestination(webhookEvent, arg); 210 | return await Promise.all( 211 | [...destinations].map(async (projectNumber) => { 212 | await addIssueToProject(w.octokit, nodeId, login, projectNumber); 213 | }), 214 | ); 215 | }; 216 | 217 | app.webhooks.on("issues.opened", handleEvent(issuesEventPayloadToArg)); 218 | app.webhooks.on("issues.assigned", handleEvent(issuesEventPayloadToArg)); 219 | app.webhooks.on("issues.labeled", handleEvent(issuesEventPayloadToArg)); 220 | 221 | app.webhooks.on( 222 | "pull_request.opened", 223 | handleEvent(pullRequestEventPayloadToArg), 224 | ); 225 | app.webhooks.on( 226 | "pull_request.assigned", 227 | handleEvent(pullRequestEventPayloadToArg), 228 | ); 229 | app.webhooks.on( 230 | "pull_request.labeled", 231 | handleEvent(pullRequestEventPayloadToArg), 232 | ); 233 | 234 | logger.info(`listening on port ${envInput.PORT}`); 235 | 236 | const middleware = createNodeMiddleware(app); 237 | http 238 | // eslint-disable-next-line @typescript-eslint/no-misused-promises 239 | .createServer(async (req, res) => { 240 | // `middleware` returns `false` when `req` is unhandled 241 | if (await middleware(req, res)) return; 242 | if (req.url === "/healthcheck") { 243 | res.writeHead(200); 244 | } else { 245 | res.writeHead(404); 246 | } 247 | res.end(); 248 | }) 249 | .listen(envInput.PORT); 250 | -------------------------------------------------------------------------------- /src/config.test.ts: -------------------------------------------------------------------------------- 1 | import type { 2 | Repo, 3 | Issue, 4 | User, 5 | Label, 6 | PullRequest, 7 | TargetProj, 8 | } from "./types"; 9 | import { TargetProjKind, WebhookEventKind } from "./types"; 10 | import { type OctokitResponse } from "@octokit/types"; 11 | import { processRules, getMatchedProj } from "./config"; 12 | import * as fs from "node:fs"; 13 | import { expect, it, vi } from "vitest"; 14 | import { type LoggerCat } from "./util"; 15 | import { type GetTeamMemberProp } from "./github"; 16 | import { OctokitRestMock } from "./github.mock"; 17 | import { getFormat } from "./configFormat/configFormat"; 18 | 19 | const loggerCat = "test" as LoggerCat; 20 | 21 | const processYamlRules = (yaml: string) => 22 | processRules(getFormat("yaml"), loggerCat, yaml); 23 | 24 | it("fails to parse empty rule without throw", () => { 25 | const yaml = ""; 26 | expect(processYamlRules(yaml).content).toEqual(null); 27 | }); 28 | 29 | it("can parse readme rules", () => { 30 | const yaml = fs.readFileSync("./test/readme_rule.yaml", "utf-8"); 31 | const rules = processYamlRules(yaml).content!; 32 | expect(rules.length).toBe(6); 33 | expect(rules[1].targetProj).toSatisfy((t: TargetProj) => { 34 | if (t.kind === TargetProjKind.Number) { 35 | if ( 36 | t.projectNumber.flatMap((n) => n.value).toString() === [2, 3].toString() 37 | ) { 38 | return true; 39 | } 40 | } 41 | return false; 42 | }); 43 | }); 44 | 45 | it("can match with readme rules", async () => { 46 | const yaml = fs.readFileSync("./test/readme_rule.yaml", "utf-8"); 47 | const repository: Repo = { 48 | name: "example-repo1", 49 | full_name: "the_owner/example-repo1", 50 | private: false, 51 | description: null, 52 | fork: false, 53 | topics: ["example-topic"], 54 | }; 55 | const user: User = { 56 | login: "octocat", 57 | }; 58 | const sender: User = { 59 | login: "sender", 60 | }; 61 | const user2: User = { 62 | login: "novemdog", 63 | }; 64 | const label: Label = { 65 | name: "bug", 66 | }; 67 | const mock = vi.fn().mockImplementation(async () => { 68 | const resp = { data: [user] }; 69 | return resp; 70 | }); 71 | const octokit = OctokitRestMock(mock); 72 | const issue: Issue & GetTeamMemberProp = { 73 | assignees: [user], 74 | labels: [label], 75 | octokit, 76 | }; 77 | const obj = { repository, sender, issue, pr: null }; 78 | const issue2: Issue & GetTeamMemberProp = { 79 | assignees: [user2], 80 | labels: [label], 81 | octokit, 82 | }; 83 | const pr: PullRequest & GetTeamMemberProp = { 84 | requested_reviewers: [user], 85 | assignees: [user], 86 | head: { 87 | label: "arkedge:renovate/regex-1.x", 88 | ref: "renovate/regex-1.x", 89 | }, 90 | octokit, 91 | }; 92 | const obj2 = { repository, sender, issue: issue2, pr: null }; 93 | const obj3 = { repository, sender, issue: null, pr }; 94 | const obj4 = { 95 | repository: { ...repository, name: "autoproject" }, 96 | sender, 97 | issue: null, 98 | pr, 99 | }; 100 | const rules = processYamlRules(yaml).content!; 101 | await expect(rules[0].test(obj)).resolves.toBeTruthy(); 102 | await expect(rules[1].test(obj)).resolves.toBeFalsy(); 103 | expect(mock).toHaveBeenCalledTimes(0); 104 | await expect(rules[2].test(obj)).resolves.toBeTruthy(); 105 | expect( 106 | mock, 107 | "acquire team member the first time we need", 108 | ).toHaveBeenCalledTimes(1); 109 | 110 | await expect( 111 | getMatchedProj( 112 | rules, 113 | { kind: WebhookEventKind.Issue, action: "assigned" }, 114 | obj, 115 | ), 116 | ).resolves.toEqual([1, 4]); 117 | 118 | await expect(rules[2].test(obj2)).resolves.toBeFalsy(); 119 | expect( 120 | mock, 121 | "once called, it will not be called again", 122 | ).toHaveBeenCalledTimes(1); 123 | await expect( 124 | getMatchedProj( 125 | rules, 126 | { kind: WebhookEventKind.Issue, action: "assigned" }, 127 | obj2, 128 | ), 129 | ).resolves.toEqual([]); 130 | 131 | await expect(rules[3].test(obj3)).resolves.toBeFalsy(); 132 | 133 | await expect(rules[3].test(obj4)).resolves.toBeTruthy(); 134 | await expect(rules[4].test(obj4)).resolves.toBeTruthy(); 135 | await expect(rules[5].test(obj4)).resolves.toBeTruthy(); 136 | await expect( 137 | getMatchedProj( 138 | rules, 139 | { kind: WebhookEventKind.PullRequest, action: "assigned" }, 140 | obj4, 141 | ), 142 | ).resolves.toEqual([9]); 143 | }); 144 | 145 | it("can treat multiple rules", async () => { 146 | const yaml = fs.readFileSync("./test/test_multiple_rules.yaml", "utf-8"); 147 | const repository = {} as unknown as Repo; 148 | const mock = vi.fn().mockImplementation(async () => { 149 | const resp = { data: [] } as unknown as OctokitResponse<[]>; 150 | return resp; 151 | }); 152 | const obj = (login: string) => { 153 | const user: User = { 154 | login, 155 | }; 156 | const sender: User = { 157 | login, 158 | }; 159 | const octokit = OctokitRestMock(mock); 160 | const issue: Issue & GetTeamMemberProp = { 161 | assignees: [user], 162 | octokit, 163 | }; 164 | const obj = { repository, sender, issue, pr: null }; 165 | return obj; 166 | }; 167 | const rules = processYamlRules(yaml).content!; 168 | await expect(rules[0].test(obj("z"))).resolves.toBeFalsy(); 169 | await expect(rules[0].test(obj("a"))).resolves.toBeTruthy(); 170 | await expect(rules[1].test(obj("a"))).resolves.toBeFalsy(); 171 | await expect(rules[2].test(obj("a"))).resolves.toBeFalsy(); 172 | await expect(rules[0].test(obj("b"))).resolves.toBeFalsy(); 173 | await expect(rules[1].test(obj("b"))).resolves.toBeTruthy(); 174 | await expect(rules[2].test(obj("b"))).resolves.toBeFalsy(); 175 | expect(mock).toHaveBeenCalledTimes(0); 176 | }); 177 | 178 | it("treat rules about issue and pr exclusively", () => { 179 | const ok = (yml: string, msg?: string) => { 180 | expect(processYamlRules(yml).content, msg).not.toEqual(null); 181 | }; 182 | const ng = (yml: string, msg?: string) => { 183 | expect(processYamlRules(yml).content, msg).toEqual(null); 184 | }; 185 | ok(` 186 | version: "0" 187 | rules: 188 | - issue: 189 | assignees: 190 | - name 191 | project: 1`); 192 | ok(` 193 | version: "0" 194 | rules: 195 | - pr: 196 | assignees: 197 | - name 198 | project: 1`); 199 | ng(` 200 | version: "0" 201 | rules: 202 | - issue: 203 | assignees: 204 | - name 205 | pr: 206 | assignees: 207 | - name 208 | project: 1`); 209 | ng( 210 | ` 211 | version: "0" 212 | rules: 213 | - issue: 214 | assignees: 215 | - name 216 | on: 217 | pr: 218 | - assigned 219 | project: 1`, 220 | "issue rule on pr event", 221 | ); 222 | }); 223 | 224 | it("can limit firing event", async () => { 225 | const yaml = fs.readFileSync("./test/test_event_on.yaml", "utf-8"); 226 | const repository = { 227 | name: "autopj", 228 | } as unknown as Repo; 229 | const mock = vi.fn().mockImplementation(async () => { 230 | const resp = { data: [] }; 231 | return resp; 232 | }); 233 | const arg = (login: string) => { 234 | const user: User = { 235 | login, 236 | }; 237 | const sender: User = { 238 | login, 239 | }; 240 | const octokit = OctokitRestMock(mock); 241 | const issue: Issue & GetTeamMemberProp = { 242 | assignees: [user], 243 | octokit, 244 | }; 245 | const obj = { repository, sender, issue, pr: null }; 246 | return obj; 247 | }; 248 | const rules = processYamlRules(yaml).content!; 249 | await expect( 250 | getMatchedProj( 251 | rules, 252 | { 253 | kind: WebhookEventKind.Issue, 254 | action: "assigned", 255 | }, 256 | arg("assignee"), 257 | ), 258 | ).resolves.toEqual([1]); 259 | await expect( 260 | getMatchedProj( 261 | rules, 262 | { 263 | kind: WebhookEventKind.Issue, 264 | action: "opened", 265 | }, 266 | arg("assignee"), 267 | ), 268 | ).resolves.toEqual([1, 2]); 269 | await expect( 270 | getMatchedProj( 271 | rules, 272 | { 273 | kind: WebhookEventKind.PullRequest, 274 | action: "opened", 275 | }, 276 | arg("assignee"), 277 | ), 278 | ).resolves.toEqual([2]); 279 | await expect( 280 | getMatchedProj( 281 | rules, 282 | { 283 | kind: WebhookEventKind.PullRequest, 284 | action: "assigned", 285 | }, 286 | arg("assignee"), 287 | ), 288 | ).resolves.toEqual([]); 289 | }); 290 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | import { type YAMLError } from "yaml"; 2 | import type YAML from "yaml"; 3 | import { z } from "zod"; 4 | import { emitter } from "./emitter"; 5 | import { 6 | type ObjPath, 7 | type SemDiag, 8 | SemDiagKind, 9 | type SynDiag, 10 | type Diag, 11 | type MatchArg, 12 | WebhookEventKind, 13 | type WebhookEventActionTarget, 14 | isTargetWebhookEvent, 15 | type WebhookEventAction, 16 | type EventTarget, 17 | type TargetProj, 18 | TargetProjKind, 19 | type TargetProjNumber, 20 | type TargetProjOnly, 21 | type TargetProjReject, 22 | } from "./types"; 23 | import Fuse from "fuse.js"; 24 | import assert from "node:assert"; 25 | import { getLogger } from "log4js"; 26 | import { 27 | type APred, 28 | type RuleSchema, 29 | type ZodPathComponentUsed, 30 | configSchema, 31 | forallAsync, 32 | flatten, 33 | versionedSchema, 34 | } from "./configSchema"; 35 | import { type YamlDocument, type YamlTagged } from "./configFormat/yamlFormat"; 36 | import { 37 | type JsonDocument, 38 | type JsonTagged, 39 | type LineCol, 40 | } from "./configFormat/jsonFormat"; 41 | import { type Document, type EitherFormat } from "./configFormat/configFormat"; 42 | 43 | const createSearcher = (path: ObjPath) => { 44 | return new Fuse(tryExtractPossibleKeysAt(path)); 45 | }; 46 | 47 | const tryExtractPossibleKeysAt = (path: ObjPath) => { 48 | try { 49 | let def: ZodPathComponentUsed = versionedSchema._def; 50 | for (const elem of path) { 51 | let next: ZodPathComponentUsed; 52 | if (typeof elem === "number") { 53 | assert(def.typeName === z.ZodFirstPartyTypeKind.ZodArray); 54 | next = def.type._def; 55 | } else { 56 | assert(def.typeName === z.ZodFirstPartyTypeKind.ZodObject); 57 | const shape = def.shape(); 58 | next = shape[elem]._def; 59 | } 60 | def = peelTransformer(next); 61 | } 62 | assert(def.typeName === z.ZodFirstPartyTypeKind.ZodObject); 63 | return Object.keys(def.shape()); 64 | } catch (e) { 65 | if (e instanceof Error) { 66 | getLogger("debug").error(e.message); 67 | } 68 | return []; 69 | } 70 | }; 71 | 72 | const peelTransformer = (def: ZodPathComponentUsed): ZodPathComponentUsed => { 73 | if (def.typeName === z.ZodFirstPartyTypeKind.ZodOptional) { 74 | const newDef: ZodPathComponentUsed = def.innerType._def; 75 | def = peelTransformer(newDef); 76 | } 77 | if (def.typeName === z.ZodFirstPartyTypeKind.ZodEffects) { 78 | const newDef: ZodPathComponentUsed = def.schema._def; 79 | def = peelTransformer(newDef); 80 | } 81 | return def; 82 | }; 83 | 84 | export interface Config { 85 | get: (x: string) => number[] | undefined; 86 | } 87 | 88 | interface Rule { 89 | test: APred; 90 | webhookEventTarget: WebhookEventActionTarget; 91 | targetProj: TargetProj; 92 | } 93 | 94 | export async function getMatchedProj( 95 | rules: Rule[], 96 | webhookEvent: WebhookEventAction, 97 | obj: MatchArg, 98 | ): Promise { 99 | const ruleMatchedProjectNumbers = await Promise.all( 100 | rules.map(async (rule) => { 101 | if (isTargetWebhookEvent(rule.webhookEventTarget, webhookEvent)) { 102 | if (await rule.test(obj)) { 103 | return rule.targetProj; 104 | } 105 | } 106 | return []; 107 | }), 108 | ); 109 | const targetProjs: TargetProj[] = ruleMatchedProjectNumbers.flat(); 110 | return filterTargetProjs(targetProjs); 111 | } 112 | 113 | function filterTargetProjs(targetProjs: TargetProj[]): number[] { 114 | // lift `priority` from `TargetProjNumber` 115 | const targetProjsFlat = targetProjs.flatMap( 116 | ( 117 | p: TargetProj, 118 | ): Array< 119 | | ({ kind: TargetProjKind.Number } & TargetProjNumber) 120 | | TargetProjOnly 121 | | TargetProjReject 122 | > => { 123 | if (p.kind === TargetProjKind.Number) { 124 | return p.projectNumber.map((n) => ({ 125 | kind: TargetProjKind.Number, 126 | ...n, 127 | })); 128 | } 129 | return [p]; 130 | }, 131 | ); 132 | // dictionary order (priority >> kind) 133 | targetProjsFlat.sort((a, b) => { 134 | if (a.priority === b.priority) { 135 | if (a.kind === b.kind) { 136 | return 0; 137 | } else { 138 | return a.kind - b.kind; 139 | } 140 | } else { 141 | return a.priority - b.priority; 142 | } 143 | }); 144 | let projSet = new Set(); 145 | for (const targetProj of targetProjsFlat) { 146 | switch (targetProj.kind) { 147 | case TargetProjKind.Number: { 148 | if (targetProj.isPositive) { 149 | for (const n of targetProj.value) { 150 | projSet.add(n); 151 | } 152 | } else { 153 | for (const n of targetProj.value) { 154 | projSet.delete(n); 155 | } 156 | } 157 | break; 158 | } 159 | case TargetProjKind.Only: { 160 | projSet = new Set(targetProj.projectNumber); 161 | break; 162 | } 163 | case TargetProjKind.Reject: { 164 | // no projects to add 165 | return []; 166 | } 167 | } 168 | } 169 | return [...projSet]; 170 | } 171 | 172 | export function processRulesV0( 173 | format: EitherFormat, 174 | filepath: string, 175 | src: string, 176 | ): ParseResult { 177 | const { docResult, content, error } = parseFile( 178 | format, 179 | configSchema, 180 | filepath, 181 | src, 182 | ); 183 | return { 184 | docResult, 185 | content: content !== null ? new Map(Object.entries(content)) : null, 186 | error, 187 | }; 188 | } 189 | 190 | export enum ErrorKind { 191 | Syn = "syn", 192 | Sem = "sem", 193 | Unknown = "?", 194 | } 195 | 196 | type ParseError = 197 | | { 198 | type: ErrorKind.Syn; 199 | diag: SynDiag; 200 | } 201 | | { 202 | type: ErrorKind.Sem; 203 | diag: SemDiag; 204 | } 205 | | { 206 | type: ErrorKind.Unknown; 207 | error: unknown; 208 | }; 209 | 210 | type YamlDocResult = 211 | | { 212 | is_ok: true; 213 | doc: JsonDocument | YamlDocument; 214 | } 215 | | { 216 | is_ok: false; 217 | docRaw: 218 | | YamlTagged> 219 | | JsonTagged; 220 | }; 221 | 222 | export type ParseResult = { 223 | docResult: YamlDocResult; 224 | content: T | null; 225 | error: ParseError[]; 226 | }; 227 | 228 | function parseFile>( 229 | format: EitherFormat, 230 | schema: Schema, 231 | filepath: string, 232 | yaml: string, 233 | ): ParseResult { 234 | const result = format.parse(yaml); 235 | if (!result.is_ok) { 236 | const doc = result.error; 237 | let diags; 238 | switch (doc.kind) { 239 | case "json": 240 | diags = extractJsonSynDiag(doc.value); 241 | break; 242 | case "yaml": 243 | diags = extractYamlSynDiag(doc.value.errors); 244 | break; 245 | default: 246 | assert(false, "configFormat is not exhaustive"); 247 | } 248 | emitter.emit("synerror", { filepath, diags }); 249 | return { 250 | docResult: { 251 | is_ok: false, 252 | docRaw: doc, 253 | }, 254 | content: null, 255 | error: diags.map((diag) => { 256 | return { type: ErrorKind.Syn, diag }; 257 | }), 258 | }; 259 | } 260 | const doc = result.value.value; 261 | const raw: unknown = doc.toJS(); 262 | const zResult = schema.safeParse(raw); 263 | if (zResult.success) { 264 | return { 265 | docResult: { 266 | is_ok: true, 267 | doc, 268 | }, 269 | content: zResult.data, 270 | error: [], 271 | }; 272 | } else { 273 | const diags = extractSemDiag(doc, zResult.error); 274 | emitter.emit("semerror", { filepath, diags }); 275 | return { 276 | docResult: { 277 | is_ok: true, 278 | doc, 279 | }, 280 | content: null, 281 | error: diags.map((diag) => { 282 | return { type: ErrorKind.Sem, diag }; 283 | }), 284 | }; 285 | } 286 | } 287 | 288 | const transformRule = (parsed: RuleSchema) => { 289 | const parts = [parsed.repo, parsed.issue, parsed.pr].flatMap((l) => l ?? []); 290 | const rule: Rule = { 291 | test: async (obj: MatchArg) => 292 | await forallAsync(parts, async (value) => await value(obj)), 293 | webhookEventTarget: getWebhookEventTarget(parsed), 294 | targetProj: parsed.project, 295 | }; 296 | return rule; 297 | }; 298 | 299 | const getEventTarget = (on: "any" | E | E[]): EventTarget => { 300 | if (on === "any") { 301 | return { 302 | kind: "any", 303 | }; 304 | } else { 305 | return { 306 | kind: "oneof", 307 | list: flatten(on), 308 | }; 309 | } 310 | }; 311 | 312 | function getWebhookEventTarget(parsed: RuleSchema): WebhookEventActionTarget { 313 | if (typeof parsed.pr === "undefined") { 314 | if (typeof parsed.issue === "undefined") { 315 | // repo only 316 | return { 317 | kind: "both", 318 | issueAction: getEventTarget(parsed.on?.issue ?? "any"), 319 | prAction: getEventTarget(parsed.on?.pr ?? "any"), 320 | }; 321 | } else { 322 | // issue rule 323 | return { 324 | kind: WebhookEventKind.Issue, 325 | issueAction: getEventTarget(parsed.on?.issue ?? "any"), 326 | }; 327 | } 328 | } else { 329 | return { 330 | kind: WebhookEventKind.PullRequest, 331 | prAction: getEventTarget(parsed.on?.pr ?? "any"), 332 | }; 333 | } 334 | } 335 | 336 | export function processRules( 337 | format: EitherFormat, 338 | filepath: string, 339 | src: string, 340 | ): ParseResult { 341 | const { docResult, content, error } = parseFile( 342 | format, 343 | versionedSchema, 344 | filepath, 345 | src, 346 | ); 347 | return { 348 | docResult, 349 | content: content?.rules.map(transformRule) ?? null, 350 | error, 351 | }; 352 | } 353 | 354 | const extractSemDiag = (doc: Document, error: z.ZodError) => { 355 | const extractFromDoc = (path: ObjPath, msg: string) => { 356 | const node = doc.getIn(path); 357 | if (typeof node !== "undefined") { 358 | const diag: SemDiag = { 359 | objPath: path, 360 | msg: msg.split("\n")[0], 361 | diagKind: { 362 | diagName: SemDiagKind.Any, 363 | }, 364 | range: { 365 | start: node.loc.start, 366 | end: node.loc.end, 367 | }, 368 | }; 369 | return diag; 370 | } else { 371 | throw new Error(`invalid object path: ${path.toString()}`); 372 | } 373 | }; 374 | const diags = error.issues.flatMap((issue) => { 375 | const path = issue.path; 376 | if (path.length === 0) { 377 | const diag: SemDiag = { 378 | objPath: path, 379 | msg: issue.message, 380 | range: { 381 | start: doc.asNode().loc.start, 382 | end: doc.asNode().loc.end, 383 | }, 384 | diagKind: { 385 | diagName: SemDiagKind.Any, 386 | }, 387 | }; 388 | return [diag]; 389 | } 390 | if (issue.code === "unrecognized_keys") { 391 | return issue.keys.map((k) => { 392 | const elem = doc.getIn(path)!; 393 | const key = elem.findKeyAsMap(k)!; 394 | const candidates = [ 395 | ...createSearcher(path) 396 | .search(k) 397 | .map((fr) => fr.item), 398 | ]; 399 | let msg = `Unrecognized key: '${k}'`; 400 | if (candidates.length !== 0) { 401 | msg += `. Did you mean '${candidates[0]}'?`; 402 | } 403 | const diag: SemDiag = { 404 | objPath: path, 405 | msg, 406 | diagKind: { 407 | diagName: SemDiagKind.UnrecognizedKeys, 408 | key: { 409 | value: k, 410 | range: { 411 | start: key.loc.start, 412 | end: key.loc.end, 413 | }, 414 | }, 415 | candidates, 416 | }, 417 | range: { 418 | start: elem.loc.start, 419 | end: elem.loc.end, 420 | }, 421 | }; 422 | return diag; 423 | }); 424 | } 425 | let msg; 426 | if ( 427 | (issue.code === "invalid_union" && 428 | issue.unionErrors.every((e) => 429 | e.errors.every((e) => e.message === "Required"), 430 | )) || 431 | issue.message === "Required" 432 | ) { 433 | msg = `'${path.at(-1)!}' is required`; 434 | } else { 435 | msg = issue.message; 436 | } 437 | const docPath = doc.hasIn(path) ? path : path.slice(0, -1); 438 | return [extractFromDoc(docPath, msg)]; 439 | }); 440 | return diags; 441 | }; 442 | 443 | const extractYamlSynDiag = (errors: YAMLError[]) => { 444 | return errors.map((error) => { 445 | const r: Diag = { 446 | msg: error.message, 447 | range: { 448 | start: error.linePos![0], 449 | end: error.linePos![1], 450 | }, 451 | }; 452 | return r; 453 | }); 454 | }; 455 | 456 | const extractJsonSynDiag = (lc: LineCol | null) => { 457 | const start = lc ?? { 458 | line: 1, 459 | col: 1, 460 | }; 461 | const d: SynDiag = { 462 | msg: "syntax error", 463 | range: { 464 | start, 465 | }, 466 | }; 467 | return [d]; 468 | }; 469 | -------------------------------------------------------------------------------- /src/configSchema.ts: -------------------------------------------------------------------------------- 1 | import { getAllTeamMember, type GetTeamMemberProp } from "./github"; 2 | import { z } from "zod"; 3 | import { 4 | type Issue, 5 | type Label, 6 | type Repo, 7 | type MatchArg, 8 | type PullRequest, 9 | type User, 10 | TargetProjKind, 11 | type TargetProjNumber, 12 | type TargetProjOnly, 13 | type TargetProjReject, 14 | type PullRequestHead, 15 | type TargetProjKindNumber, 16 | } from "./types"; 17 | 18 | type Pred = (x: X) => boolean; 19 | export type APred = (x: X) => Promise; 20 | 21 | type ArrayOrInner = T | T[]; 22 | 23 | export const flatten = (a: ArrayOrInner) => { 24 | if (Array.isArray(a)) { 25 | return a; 26 | } 27 | return [a]; 28 | }; 29 | 30 | const id = (x: T) => x; 31 | 32 | const propProj = 33 | (key: K) => 34 | (t: T) => 35 | t[key]; 36 | 37 | const existsAsync = async (arr: Y[], predicate: APred) => { 38 | for (const e of arr) { 39 | if (await predicate(e)) return true; 40 | } 41 | return false; 42 | }; 43 | 44 | export const forallAsync = async (arr: Y[], predicate: APred) => { 45 | for (const e of arr) { 46 | if (!(await predicate(e))) return false; 47 | } 48 | return true; 49 | }; 50 | 51 | const composeE = 52 | (f: (x: X) => ArrayOrInner, g: Array<(x: Y) => Z>) => 53 | (x: X) => { 54 | const y = flatten(f(x)); 55 | return g.some((g) => y.some((y) => g(y))); 56 | }; 57 | 58 | const composeEAsync = 59 | (f: (x: X) => ArrayOrInner, g: Array<(x: Y) => Promise>) => 60 | async (x: X) => { 61 | const y = flatten(f(x)); 62 | return await existsAsync( 63 | g, 64 | async (g) => await existsAsync(y, async (y) => await g(y)), 65 | ); 66 | }; 67 | 68 | const composeA = 69 | (f: (x: X) => ArrayOrInner, g: Array<(x: Y) => Z>) => 70 | (x: X) => { 71 | const y = flatten(f(x)); 72 | return g.every((g) => y.some((y) => g(y))); 73 | }; 74 | 75 | const composeAAsync = 76 | (f: (x: X) => ArrayOrInner, g: Array<(x: Y) => Promise>) => 77 | async (x: X) => { 78 | const y = flatten(f(x)); 79 | return await forallAsync( 80 | g, 81 | async (g) => await existsAsync(y, async (y) => await g(y)), 82 | ); 83 | }; 84 | 85 | const extendE = 86 | (f: (_: X) => Y) => 87 | (ps: ArrayOrInner>) => 88 | composeE(f, flatten(ps)); 89 | 90 | const extendEAsync = 91 | (f: (_: X) => Y) => 92 | (ps: ArrayOrInner>) => 93 | composeEAsync(f, flatten(ps)); 94 | 95 | const extendPropE = 96 | () => 97 | (key: K) => 98 | (ps: ArrayOrInner>) => 99 | composeE(propProj(key), flatten(ps)); 100 | 101 | const extendPropA = 102 | () => 103 | (key: K) => 104 | (ps: ArrayOrInner>) => 105 | composeA(propProj(key), flatten(ps)); 106 | 107 | const extendPropAAsync = 108 | () => 109 | (key: K) => 110 | (ps: ArrayOrInner>) => 111 | composeAAsync(propProj(key), flatten(ps)); 112 | 113 | const asAsync = 114 | (p: Pred) => 115 | async (x: X) => 116 | p(x); 117 | 118 | const arrayOneOf = (ps: ArrayOrInner>) => 119 | composeE((x: X[]) => x, flatten(ps)); 120 | 121 | const nullTolerantAAsync = 122 | (ps: ArrayOrInner>) => 123 | async (x: X | null) => { 124 | if (x === null) { 125 | // identity element of `and` 126 | return true; 127 | } else { 128 | return await composeAAsync(id, flatten(ps))(x); 129 | } 130 | }; 131 | 132 | const unit = (a: T) => [a]; 133 | 134 | const _number = z.number().int().nonnegative(); 135 | 136 | const numberParser = z.union([_number.transform(unit), _number.array()], { 137 | errorMap: (issue, ctx) => { 138 | if ( 139 | issue.code === z.ZodIssueCode.invalid_union && 140 | issue.unionErrors.length === 2 141 | ) { 142 | const [num, arr] = issue.unionErrors; 143 | if (num.issues.every((i) => i.code === z.ZodIssueCode.invalid_type)) { 144 | const issue = num.issues.at(0) as z.ZodInvalidTypeIssue; 145 | if ( 146 | num.issues.every( 147 | (i) => 148 | // i.code === z.ZodIssueCode.invalid_type is required because of type inference 149 | i.code === z.ZodIssueCode.invalid_type && 150 | i.received === z.ZodParsedType.array, 151 | ) 152 | ) { 153 | // error is array of something 154 | // return `arr` to delegate error message 155 | return arr; 156 | } else { 157 | return { 158 | message: `Expected number or array of number, received ${issue.received}`, 159 | }; 160 | } 161 | } else { 162 | // received a number, but invalid as a number (will be negative or float) 163 | // return `num` to delegate error message 164 | return num; 165 | } 166 | } 167 | return { message: ctx.defaultError }; 168 | }, 169 | }); 170 | 171 | export const configSchema = z.record(numberParser); 172 | 173 | const stringEq = (value: string, ctx: z.RefinementCtx): Pred => { 174 | if (value.startsWith("/") && value.endsWith("/")) { 175 | try { 176 | const regexp = new RegExp(value.substring(1, value.length - 1)); 177 | return (prop: string | null) => regexp.test(prop ?? ""); 178 | } catch (e) { 179 | if (e instanceof SyntaxError) { 180 | ctx.addIssue({ 181 | code: z.ZodIssueCode.custom, 182 | message: `Invalid regexp: ${e.message}`, 183 | }); 184 | } 185 | return z.NEVER; 186 | } 187 | } else { 188 | return (prop: string | null) => prop === value; 189 | } 190 | }; 191 | 192 | const curryingStrictEq = 193 | (x: T) => 194 | (y: T) => 195 | x === y; 196 | 197 | const _string = z.string().transform(stringEq); 198 | 199 | const stringish = _string.transform(unit).or(_string.array()); 200 | 201 | const booleanParser = z.boolean().transform(curryingStrictEq); 202 | 203 | const repoSchema = z.object({ 204 | name: stringish.transform(extendPropE()("name")), 205 | full_name: stringish.transform(extendPropE()("full_name")), 206 | description: stringish.transform(extendPropE()("description")), 207 | fork: booleanParser.transform(extendPropE()("fork")), 208 | private: booleanParser.transform(extendPropE()("private")), 209 | topics: stringish 210 | .transform(arrayOneOf) 211 | .transform(extendPropE()("topics")), 212 | }); 213 | 214 | const lazyAsync = (f: (i: X) => Promise) => { 215 | let p: Promise | undefined; 216 | return async (i: X) => { 217 | if (typeof p === "undefined") { 218 | p = f(i); 219 | } 220 | return await p; 221 | }; 222 | }; 223 | 224 | const teamRegex = 225 | /^([a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,38})\/([a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,38})$/i; 226 | 227 | const loginEq = 228 | (proj: (_: X) => User[]) => 229 | (value: string): APred => { 230 | const r = teamRegex.exec(value); 231 | if (r === null) { 232 | return async (x: X) => { 233 | return proj(x).some((g) => curryingStrictEq(g.login)(value)); 234 | }; 235 | } else { 236 | const p = lazyAsync(async (i: X & GetTeamMemberProp) => { 237 | return await getAllTeamMember(i, r[1], r[2]); 238 | }); 239 | return async (x: X & GetTeamMemberProp) => { 240 | const y = await p(x); 241 | return proj(x).some((g) => 242 | y.some((y) => curryingStrictEq(g.login)(y.login)), 243 | ); 244 | }; 245 | } 246 | }; 247 | 248 | const loginStr = (proj: (_: X) => User[]) => 249 | z.string().transform(loginEq(proj)); 250 | 251 | const loginParser = (proj: (_: X) => User[]) => 252 | loginStr(proj).transform(unit).or(loginStr(proj).array()); 253 | 254 | const labelParser = stringish 255 | .transform(extendPropE