├── logs └── .gitignore ├── nodemon.json ├── .babelrc ├── .gitignore ├── start-development.js ├── scripts ├── phpcs.js ├── deploy.sh ├── compare-commits.js ├── test-commit.js ├── test-pr.js └── get-logs.js ├── lambda-test.js ├── index.js ├── src ├── metadata.js ├── diff.js ├── config.js ├── util.js ├── run.js ├── index.js ├── linters │ ├── index.js │ ├── stylelint │ │ └── index.js │ ├── eslint │ │ └── index.js │ └── phpcs │ │ └── index.js ├── review.js ├── format.js └── hooks.js ├── package.json ├── fixtures ├── installation_repositories.added.json ├── push.json ├── pull_request.opened.json ├── pull_request.synchronize.json └── lambda-test-event.json └── README.md /logs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "watch": [ 3 | "./src", 4 | "./start-development.js" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["transform-async-to-generator"], 3 | "presets": ["es2015", "stage-0"] 4 | } 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pem 2 | .env 3 | build/ 4 | node_modules 5 | probot/ 6 | src/linters/phpcs/vendor 7 | bin/ 8 | lib/ 9 | event.json 10 | lambda-function.zip 11 | package-lock.json 12 | -------------------------------------------------------------------------------- /start-development.js: -------------------------------------------------------------------------------- 1 | const { probot } = require( '@humanmade/probot-util' ); 2 | 3 | // Probot setup 4 | const bot = probot.create(); 5 | 6 | bot.load( require( './src' ) ); 7 | bot.start(); 8 | -------------------------------------------------------------------------------- /scripts/phpcs.js: -------------------------------------------------------------------------------- 1 | const phpcs = require( '../linters/phpcs' ); 2 | 3 | const codedir = process.argv[2]; 4 | 5 | phpcs( codedir ) 6 | .then( results => { 7 | console.log( results ); 8 | }) 9 | .catch( err => { 10 | console.error( `error: ${err.message}` ) 11 | }) 12 | -------------------------------------------------------------------------------- /lambda-test.js: -------------------------------------------------------------------------------- 1 | const hmLinter = require('./index'); 2 | const fs = require('fs') 3 | 4 | const testEvent = JSON.parse( fs.readFileSync('./fixtures/lambda-test-event.json') ) 5 | hmLinter.probotHandler( testEvent, null, function( err, success ) { 6 | console.log( err ) 7 | console.log( success ) 8 | } ) 9 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const { probot } = require( '@humanmade/probot-util' ); 2 | 3 | // Probot setup 4 | const bot = probot.create(); 5 | 6 | // Load Probot plugins from the `./src` folder 7 | bot.load( require( './src' ) ); 8 | 9 | // Lambda Handler 10 | module.exports.probotHandler = probot.buildHandler( bot ); 11 | -------------------------------------------------------------------------------- /scripts/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | if [[ -z "$LAMBDA_FUNCTION" ]]; then 4 | echo "LAMBDA_FUNCTION must be set in .env" 5 | exit 1 6 | fi 7 | 8 | : "${LAMBDA_REGION:=us-east-1}" 9 | 10 | echo "Deploying to $LAMBDA_FUNCTION in $LAMBDA_REGION" 11 | aws lambda update-function-code --function-name $LAMBDA_FUNCTION --region $LAMBDA_REGION --zip-file fileb://lambda-function.zip 12 | -------------------------------------------------------------------------------- /src/metadata.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Regex pattern for evaluating HM Linter comments and pulling a JSON blob out. 3 | * 4 | * @type {RegExp} 5 | */ 6 | const regex = /\n\n/; 7 | 8 | /** 9 | * Parse HM Linter data from HTML comment text. 10 | * 11 | * @param {String} text Original text. 12 | * @returns {Object} 13 | */ 14 | function parse( text ) { 15 | const match = text.replace( /\r\n/g, '\n' ).match( regex ); 16 | if ( ! match ) { 17 | return null; 18 | } 19 | 20 | return JSON.parse( match[1] ); 21 | } 22 | 23 | /** 24 | * Convert a JSON object into an HTML comment for HM Linter. 25 | * 26 | * @param {Object} data Data to convert. 27 | * @returns {string} 28 | */ 29 | function serialize( data ) { 30 | return `\n\n`; 31 | } 32 | 33 | module.exports = { 34 | parse, 35 | serialize, 36 | }; 37 | -------------------------------------------------------------------------------- /src/diff.js: -------------------------------------------------------------------------------- 1 | const parseDiff = require( 'parse-diff' ); 2 | 3 | /** 4 | * Work out which files and lines are contained in this PR. 5 | * 6 | * @param {Object} pushConfig Data about the push being evaluated. 7 | * @param {Number} number Run number. 8 | * @param {Object} github GitHub API. 9 | * @returns {Promise} 10 | */ 11 | module.exports.getDiffMapping = async ( pushConfig, number, github ) => { 12 | const diff = await github.pullRequests.get({ 13 | owner: pushConfig.owner, 14 | repo: pushConfig.repo, 15 | number, 16 | 17 | headers: { 18 | Accept: 'application/vnd.github.v3.diff', 19 | } 20 | }); 21 | 22 | // Form mapping. 23 | const mapping = {}; 24 | const parsedFiles = parseDiff( diff.data ); 25 | parsedFiles.forEach( file => { 26 | let position = 0; 27 | mapping[ file.to ] = {}; 28 | file.chunks.forEach( (chunk, index) => { 29 | if (index !== 0) { 30 | position++; 31 | } 32 | chunk.changes.forEach( change => { 33 | position++; 34 | if ( change.type === "add" ) { 35 | mapping[ file.to ][ change.ln || change.ln2 ] = position; 36 | } 37 | } ); 38 | } ); 39 | } ); 40 | return mapping; 41 | }; 42 | -------------------------------------------------------------------------------- /scripts/compare-commits.js: -------------------------------------------------------------------------------- 1 | const githubApi = require( 'github' ); 2 | 3 | const run = require( '../src/run' ); 4 | const { formatComparison, formatReviewChange, formatSummary, resultsByFile } = require( '../src/format' ); 5 | const { compareRuns } = require( '../src/review' ); 6 | 7 | const printUsage = () => { 8 | console.log( 'node compare-commits.js / ' ); 9 | console.log( ' Compare linting from two commits or branches.' ); 10 | }; 11 | 12 | const main = async argv => { 13 | const [ fullRepo, firstCommit, secondCommit ] = argv.slice( 2 ); 14 | if ( ! fullRepo || ! firstCommit || ! secondCommit ) { 15 | printUsage(); 16 | process.exit( 1 ); 17 | } 18 | 19 | const [ owner, repo ] = fullRepo.split( '/' ); 20 | if ( ! owner || ! repo ) { 21 | printUsage(); 22 | process.exit( 1 ); 23 | } 24 | 25 | const commonConfig = { owner, repo }; 26 | const github = new githubApi(); 27 | 28 | if ( process.env.HM_LINTER_GITHUB_TOKEN ) { 29 | github.authenticate( { 30 | type: 'token', 31 | token: process.env.HM_LINTER_GITHUB_TOKEN, 32 | } ); 33 | } 34 | 35 | const firstRun = await run( 36 | { ...commonConfig, commit: firstCommit }, 37 | github 38 | ); 39 | const secondRun = await run( 40 | { ...commonConfig, commit: secondCommit }, 41 | github 42 | ); 43 | 44 | console.log( `First: ${ formatSummary( firstRun ) }` ); 45 | console.log( `Second: ${ formatSummary( secondRun ) }\n` ); 46 | 47 | const comparison = compareRuns( firstRun, secondRun ); 48 | const formatted = formatComparison( comparison ); 49 | console.log( formatted ); 50 | }; 51 | 52 | main( process.argv ); 53 | -------------------------------------------------------------------------------- /src/config.js: -------------------------------------------------------------------------------- 1 | const yaml = require( 'js-yaml' ); 2 | const path = require( 'path' ); 3 | 4 | /** 5 | * Default configuration. 6 | * 7 | * @type {Object} 8 | */ 9 | const DEFAULT_CONFIG = { 10 | version: 'latest', 11 | phpcs: { 12 | enabled: true, 13 | version: 'inherit', 14 | }, 15 | eslint: { 16 | enabled: true, 17 | version: 'inherit', 18 | }, 19 | stylelint: { 20 | enabled: true, 21 | version: 'inherit', 22 | }, 23 | }; 24 | 25 | /** 26 | * Filename by which to find a custom configuration in a project. 27 | * 28 | * This can be customized by setting the CONFIG_FILE environment variable. 29 | * 30 | * @type {string} 31 | */ 32 | const FILENAME = process.env.CONFIG_FILE || 'hmlinter.yml'; 33 | 34 | /** 35 | * Reads the app configuration from the given YAML file in the `.github` 36 | * directory of the repository. 37 | * 38 | * @internal Ported from probot, but adapted to read from the current branch. 39 | * 40 | * @param context Context from Probot 41 | * @param head SHA of the head commit we're running against. 42 | * @return Configuration object read from the file 43 | */ 44 | module.exports = async ( context, head ) => { 45 | const params = { 46 | ...context.repo( { 47 | path: path.posix.join( '.github', FILENAME ), 48 | } ), 49 | ref: head, 50 | }; 51 | 52 | try { 53 | const res = await context.github.repos.getContent( params ); 54 | const config = yaml.safeLoad( Buffer.from( res.data.content, 'base64' ).toString() ) || {}; 55 | return { 56 | ...DEFAULT_CONFIG, 57 | ...config 58 | }; 59 | } catch ( err ) { 60 | if ( err.code === 404 ) { 61 | return DEFAULT_CONFIG; 62 | } else { 63 | throw err; 64 | } 65 | } 66 | }; 67 | -------------------------------------------------------------------------------- /src/util.js: -------------------------------------------------------------------------------- 1 | const fs = require( 'fs' ); 2 | const path = require( 'path' ); 3 | 4 | const githubApi = require( '@octokit/rest' ); 5 | 6 | const GIST_ACCESS_TOKEN = process.env.GIST_ACCESS_TOKEN || null; 7 | 8 | /** 9 | * Combine results-by-linter into a single results object. 10 | * 11 | * @param {Array} results Results from linting. 12 | * @returns {*} 13 | */ 14 | function combineLinters( results ) { 15 | return results 16 | .map( linter => linter.files ) 17 | .reduce( ( allResults, linterResults ) => { 18 | Object.keys( linterResults ).forEach( file => { 19 | if ( allResults[ file ] ) { 20 | allResults[ file ].push( ...linterResults[ file ] ); 21 | } else { 22 | allResults[ file ] = linterResults[ file ]; 23 | } 24 | } ); 25 | return allResults; 26 | }, {} ); 27 | } 28 | 29 | /** 30 | * Create a GitHub Gist (used for logging raw lint data). 31 | * 32 | * @param {String} description A descriptive name for this gist. 33 | * @param {String} filename Filename for the gist. 34 | * @param {String} content The content of the file. 35 | * @returns {Promise<*>} 36 | */ 37 | const createGist = async ( description, filename, content ) => { 38 | if ( ! GIST_ACCESS_TOKEN ) { 39 | console.warn( 'Missing GIST_ACCESS_TOKEN for Gist creation' ); 40 | return null; 41 | } 42 | 43 | const gh = new githubApi(); 44 | gh.authenticate( { 45 | type: 'token', 46 | token: GIST_ACCESS_TOKEN 47 | } ); 48 | const response = await gh.gists.create( { 49 | files: { 50 | [ filename ]: { content }, 51 | }, 52 | public: false, 53 | description, 54 | } ); 55 | return response.data.html_url; 56 | }; 57 | 58 | module.exports = { 59 | combineLinters, 60 | createGist, 61 | }; 62 | -------------------------------------------------------------------------------- /src/run.js: -------------------------------------------------------------------------------- 1 | const probotUtil = require( '@humanmade/probot-util' ); 2 | const fs = require( 'fs' ); 3 | const https = require( 'https' ); 4 | const path = require( 'path' ); 5 | const pify = require( 'pify' ); 6 | const rimraf = require( 'rimraf' ); 7 | const tar = require( 'tar' ); 8 | 9 | const realpath = pify( fs.realpath ); 10 | 11 | const getLinters = require( './linters' ); 12 | const TEMP_DIR = process.env.TEMP_DIR || '/tmp' 13 | const REPO_DIR = `${ TEMP_DIR }/repos`; 14 | 15 | module.exports = async ( pushConfig, config, github, allowReuse = false ) => { 16 | const { commit, owner, repo, id } = pushConfig; 17 | 18 | // Start setting up the linters. 19 | const linterPromise = getLinters( config ); 20 | 21 | await probotUtil.file.ensureDirectory( REPO_DIR ); 22 | const extractDir = path.join( await realpath( REPO_DIR ), `${owner}-${repo}-${commit}-${id}` ); 23 | 24 | if ( ! allowReuse || ! fs.existsSync( extractDir ) ) { 25 | await probotUtil.repo.download( extractDir, pushConfig, github ); 26 | } 27 | 28 | // Now that we have the code, start linting! 29 | const linters = await linterPromise; 30 | const results = await Promise.all( linters.map( linter => linter( extractDir, pushConfig ) ) ); 31 | 32 | if ( ! allowReuse ) { 33 | // Remove the temporary directory. 34 | await pify( rimraf )( extractDir ); 35 | } 36 | 37 | // Calculate totals across all tools. 38 | const totals = results.reduce( ( totals, result ) => { 39 | if ( result.totals.errors ) { 40 | totals.errors += result.totals.errors; 41 | } 42 | if ( result.totals.warnings ) { 43 | totals.warnings += result.totals.warnings; 44 | } 45 | return totals; 46 | }, { errors: 0, warnings: 0 } ); 47 | 48 | return { 49 | passed: totals.errors === 0 && totals.warnings === 0, 50 | totals, 51 | results, 52 | }; 53 | }; 54 | -------------------------------------------------------------------------------- /scripts/test-commit.js: -------------------------------------------------------------------------------- 1 | const githubApi = require( 'github' ); 2 | 3 | const run = require( '../src/run' ); 4 | const { formatSummary, resultsByFile } = require( '../src/format' ); 5 | 6 | const printUsage = () => { 7 | console.log( 'node test-commit.js / ' ); 8 | console.log( ' Run a specific commit or branch via the linters for testing.' ); 9 | }; 10 | 11 | const main = argv => { 12 | const [ fullRepo, commit ] = argv.slice( 2 ); 13 | if ( ! fullRepo || ! commit ) { 14 | printUsage(); 15 | process.exit( 1 ); 16 | } 17 | 18 | const [ owner, repo ] = fullRepo.split( '/' ); 19 | if ( ! owner || ! repo ) { 20 | printUsage(); 21 | process.exit( 1 ); 22 | } 23 | 24 | const pushConfig = { 25 | owner, 26 | repo, 27 | commit, 28 | }; 29 | const github = new githubApi(); 30 | 31 | if ( process.env.HM_LINTER_GITHUB_TOKEN ) { 32 | github.authenticate( { 33 | type: 'token', 34 | token: process.env.HM_LINTER_GITHUB_TOKEN, 35 | } ); 36 | } 37 | 38 | run( pushConfig, github, true ) 39 | .then( results => { 40 | const summary = formatSummary( results ); 41 | if ( results.passed ) { 42 | console.log( `Passed: ${ summary }` ); 43 | return; 44 | } 45 | 46 | console.log( `Failed: ${ summary }` ); 47 | 48 | const byFile = resultsByFile( results.results ); 49 | Object.keys( byFile ).forEach( file => { 50 | if ( Object.keys( byFile[ file ] ).length < 1 ) { 51 | return; 52 | } 53 | 54 | console.log( `${ file }:` ); 55 | Object.keys( byFile[ file ] ).forEach( line => { 56 | console.log( ` L${ line }:` ); 57 | byFile[ file ][ line ].forEach( error => { 58 | console.log( ` ${ error }` ); 59 | } ); 60 | } ); 61 | } ); 62 | process.exit( 2 ); 63 | } ) 64 | .catch( err => { 65 | console.error( err ); 66 | process.exit( 3 ); 67 | } ); 68 | }; 69 | 70 | main( process.argv ); 71 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const fs = require( 'fs' ); 2 | const path = require( 'path' ); 3 | const pify = require( 'pify' ); 4 | const rimraf = require( 'rimraf' ); 5 | const realpath = pify( fs.realpath ); 6 | 7 | const { onAdd, onCheck, onPush, onOpenPull, onUpdatePull } = require( './hooks' ); 8 | 9 | const TEMP_DIR = process.env.TEMP_DIR || '/tmp' 10 | const clean = async ( context ) => { 11 | const { payload } = context; 12 | let id, head_sha; 13 | 14 | if ( payload.check_suite ) { 15 | ( { head_sha, id } = payload.check_suite ); 16 | } else if ( payload.check_run && payload.check_run.check_suite ) { 17 | ( { head_sha, id } = payload.check_run.check_suite ); 18 | } else if( payload.pull_request ) { 19 | head_sha = payload.pull_request.head.sha; 20 | id = payload.pull_request.id; 21 | } else { 22 | throw 'No pull-request and commit data available for the request.'; 23 | } 24 | 25 | const owner = payload.repository.owner.login; 26 | const repo = payload.repository.name; 27 | const extractDir = path.join( await realpath( `${ TEMP_DIR }/repos` ), `${owner}-${repo}-${head_sha}-${id}` ); 28 | 29 | console.log( `Cleaning up ${ extractDir }` ); 30 | 31 | const rmrf = pify( rimraf ); 32 | await rmrf( extractDir ); 33 | await rmrf( `${ TEMP_DIR }/downloads` ); 34 | }; 35 | 36 | const withClean = func => async ( args ) => { 37 | const context = args; 38 | const REPO_DIR = `${ TEMP_DIR }/repos`; 39 | await fs.promises.mkdir( REPO_DIR, { recursive: true } ); // Create recursive directory if root is not present. 40 | func( args ).finally( () => { 41 | clean( context ); 42 | }); 43 | } 44 | 45 | module.exports = robot => { 46 | robot.on( 'installation_repositories.added', withClean( onAdd ) ); 47 | robot.on( [ 'check_suite.requested', 'check_suite.rerequested' ], withClean( onCheck ) ); 48 | robot.on( 'check_run.rerequested', withClean( onCheck ) ); 49 | robot.on( [ 'pull_request.opened', 'pull_request.reopened' ], withClean( onCheck ) ); 50 | }; 51 | -------------------------------------------------------------------------------- /scripts/test-pr.js: -------------------------------------------------------------------------------- 1 | const githubApi = require( 'github' ); 2 | 3 | const runForRepo = require( '../src/run' ); 4 | const { getDiffMapping } = require( '../src/diff' ); 5 | const { formatReview, formatSummary } = require( '../src/format' ); 6 | 7 | const printUsage = () => { 8 | console.log( 'node test-commit.js / ' ); 9 | console.log( ' Run a specific PR via the linters for testing.' ); 10 | }; 11 | 12 | const main = argv => { 13 | const [ fullRepo, number ] = argv.slice( 2 ); 14 | if ( ! fullRepo || ! number ) { 15 | printUsage(); 16 | process.exit( 1 ); 17 | } 18 | 19 | const [ owner, repo ] = fullRepo.split( '/' ); 20 | if ( ! owner || ! repo ) { 21 | printUsage(); 22 | process.exit( 1 ); 23 | } 24 | 25 | const github = new githubApi(); 26 | 27 | if ( process.env.HM_LINTER_GITHUB_TOKEN ) { 28 | github.authenticate( { 29 | type: 'token', 30 | token: process.env.HM_LINTER_GITHUB_TOKEN, 31 | } ); 32 | } 33 | 34 | github.pullRequests.get( { owner, repo, number } ) 35 | .then( ( { data } ) => { 36 | const commit = data.head.sha; 37 | const number = data.number; 38 | 39 | // Run the linter, and also fetch the PR diff. 40 | const pushConfig = { commit, owner, repo }; 41 | return Promise.all( [ 42 | runForRepo( pushConfig, github ), 43 | getDiffMapping( pushConfig, number, github ), 44 | ] ); 45 | }) 46 | .then( ( [ state, mapping ] ) => { 47 | const summary = formatSummary( state ); 48 | if ( state.passed ) { 49 | console.log( `Passed: ${ summary }` ); 50 | return; 51 | } 52 | 53 | const review = formatReview( state, mapping ); 54 | console.log( review.body ); 55 | 56 | console.log( '\n----\n' ); 57 | 58 | review.comments.map( comment => { 59 | console.log( `${ comment.path } @ ${ comment.position }:` ); 60 | console.log( ` ${ comment.body }` ); 61 | } ); 62 | } ) 63 | .catch( err => console.error( err ) ); 64 | }; 65 | 66 | main( process.argv ); 67 | -------------------------------------------------------------------------------- /scripts/get-logs.js: -------------------------------------------------------------------------------- 1 | const child_process = require( 'child_process' ); 2 | const fs = require( 'fs' ); 3 | 4 | const reqId = process.argv[2]; 5 | const HOUR_LIMIT = process.env.HOUR_LIMIT || 48; 6 | 7 | const now = Math.floor( Date.now() / 1000 ); 8 | const start = now - ( 60 * 60 * HOUR_LIMIT ); 9 | const region = 'us-east-1'; 10 | const group = '/aws/lambda/hm-linter'; 11 | const query = `fields @message | sort @timestamp asc | filter @requestId = '${ reqId }'`; 12 | const logDir = './logs'; 13 | 14 | process.stderr.write( `Querying for ${ reqId }\n` ); 15 | const proc = child_process.spawnSync( 16 | 'aws', 17 | [ 18 | 'logs', 19 | 'start-query', 20 | '--region', 21 | region, 22 | '--log-group-name', 23 | group, 24 | '--start-time', 25 | start, 26 | '--end-time', 27 | now, 28 | '--query-string', 29 | query, 30 | ] 31 | ); 32 | 33 | if ( proc.status !== 0 ) { 34 | console.log( '' + proc.output[2] ); 35 | return process.exit( 1 ); 36 | } 37 | 38 | const { queryId } = JSON.parse( proc.output[1] ); 39 | 40 | const dataRegex = /^.+?\t.+?\t(\{.+)/s; 41 | 42 | process.stderr.write( 'Waiting for results…\n' ); 43 | setTimeout( function () { 44 | const logFile = fs.openSync( `${ logDir }/${ reqId }.log`, 'w' ); 45 | const viewProc = child_process.spawnSync( 46 | 'aws', 47 | [ 48 | 'logs', 49 | 'get-query-results', 50 | '--region', 51 | region, 52 | '--query-id', 53 | queryId 54 | ] 55 | ); 56 | if ( viewProc.status !== 0 ) { 57 | console.log( '' + viewProc.output[2] ); 58 | return process.exit( 1 ); 59 | } 60 | 61 | const data = JSON.parse( viewProc.output[1] ); 62 | 63 | let rawData = null; 64 | data.results.slice( 0, 3 ).forEach( row => { 65 | const message = row.find( f => f.field === '@message' ).value; 66 | 67 | if ( ! rawData ) { 68 | const hasMatch = message.match( dataRegex ); 69 | if ( hasMatch ) { 70 | rawData = hasMatch[1]; 71 | } 72 | } 73 | 74 | fs.writeSync( logFile, message ); 75 | } ); 76 | 77 | fs.closeSync( logFile ); 78 | fs.writeFileSync( `${ logDir }/${ reqId }.json`, rawData ); 79 | process.stderr.write( `Log saved to:\t\tlogs/${ reqId }.log\n` ); 80 | process.stderr.write( `Raw data saved to:\tlogs/${ reqId }.json\n` ); 81 | }, 2000 ); 82 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "private": true, 3 | "dependencies": { 4 | "@humanmade/probot-util": "^0.2.0", 5 | "lodash.chunk": "^4.2.0", 6 | "module-alias": "^2.1.0", 7 | "parse-diff": "^0.4.0", 8 | "pify": "^3.0.0", 9 | "probot": "^6.1.0", 10 | "rimraf": "^2.6.2", 11 | "serialize-error": "^2.1.0", 12 | "smee-client": "^1.2.2", 13 | "tar": "^4.0.1" 14 | }, 15 | "devDependencies": { 16 | "dotenv": "^5.0.1", 17 | "eslint": "^7.31.0", 18 | "nodemon": "^2.0.4", 19 | "run.env": "^1.1.0" 20 | }, 21 | "scripts": { 22 | "build:babel-watch": "babel src --out-dir build --ignore src/linters/phpcs/vendor -D -w", 23 | "build:bin": "aws s3 sync s3://hm-linter/bin ./bin && chmod +x ./bin/*", 24 | "build:lib": "aws s3 sync s3://hm-linter/lib ./lib && chmod +x ./lib/*", 25 | "build:npm": "docker run --rm -v \"${PWD}\":/var/task lambci/lambda:build-nodejs12.x npm install", 26 | "build": "npm run build:bin && npm run build:lib && npm run build:npm", 27 | "clean:package": "( test -f lambda-function.zip && rm lambda-function.zip ) || true", 28 | "deploy:check": "test -f bin/php && test -f lib/libcrypt.so && test -f production.private-key.pem", 29 | "deploy:check-dev": "test -f bin/php && test -f lib/libcrypt.so && test -f development.private-key.pem", 30 | "deploy:package": "zip --symlinks -9 -x lambda-function.zip -r lambda-function *", 31 | "deploy:package-dev": "npm run deploy:package && cp development.private-key.pem private-key.pem && zip -9 lambda-function private-key.pem", 32 | "deploy:package-production": "npm run deploy:package && cp production.private-key.pem private-key.pem && zip -9 lambda-function private-key.pem", 33 | "deploy:push": "NODE_ENV=production run.env scripts/deploy.sh", 34 | "deploy:push-dev": "NODE_ENV=development run.env scripts/deploy.sh", 35 | "deploy": "npm run clean:package && npm run build && npm run deploy:check && npm run deploy:package-production && npm run deploy:push", 36 | "deploy:dev": "npm run clean:package && npm run build && npm run deploy:check-dev && npm run deploy:package-dev && npm run deploy:push-dev", 37 | "start": "docker run -it --rm --env-file .env.dev -e NO_UPDATE_NOTIFIER=1 -v \"$PWD\":/var/task:ro --entrypoint /var/task/node_modules/.bin/nodemon lambci/lambda:nodejs12.x /var/task/start-development.js", 38 | "test": "docker run -i --rm --env-file .env -e DOCKER_LAMBDA_USE_STDIN=1 -v \"$PWD\":/var/task:ro lambci/lambda:nodejs12.x index.probotHandler" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /fixtures/installation_repositories.added.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "added", 3 | "installation": { 4 | "id": 61767, 5 | "account": { 6 | "login": "humanmade", 7 | "id": 644666, 8 | "avatar_url": "https://avatars1.githubusercontent.com/u/644666?v=4", 9 | "gravatar_id": "", 10 | "url": "https://api.github.com/users/humanmade", 11 | "html_url": "https://github.com/humanmade", 12 | "followers_url": "https://api.github.com/users/humanmade/followers", 13 | "following_url": "https://api.github.com/users/humanmade/following{/other_user}", 14 | "gists_url": "https://api.github.com/users/humanmade/gists{/gist_id}", 15 | "starred_url": "https://api.github.com/users/humanmade/starred{/owner}{/repo}", 16 | "subscriptions_url": "https://api.github.com/users/humanmade/subscriptions", 17 | "organizations_url": "https://api.github.com/users/humanmade/orgs", 18 | "repos_url": "https://api.github.com/users/humanmade/repos", 19 | "events_url": "https://api.github.com/users/humanmade/events{/privacy}", 20 | "received_events_url": "https://api.github.com/users/humanmade/received_events", 21 | "type": "Organization", 22 | "site_admin": false 23 | }, 24 | "repository_selection": "selected", 25 | "access_tokens_url": "https://api.github.com/installations/61767/access_tokens", 26 | "repositories_url": "https://api.github.com/installation/repositories", 27 | "html_url": "https://github.com/organizations/humanmade/settings/installations/61767", 28 | "app_id": 5455, 29 | "target_id": 644666, 30 | "target_type": "Organization", 31 | "permissions": { 32 | "pull_requests": "write", 33 | "statuses": "write", 34 | "contents": "read", 35 | "metadata": "read" 36 | }, 37 | "events": [ 38 | "pull_request", 39 | "push" 40 | ], 41 | "created_at": 1508697521, 42 | "updated_at": 1517926029, 43 | "single_file_name": null 44 | }, 45 | "repository_selection": "selected", 46 | "repositories_added": [ 47 | { 48 | "id": 120388807, 49 | "name": "repress", 50 | "full_name": "humanmade/repress" 51 | } 52 | ], 53 | "repositories_removed": [ 54 | 55 | ], 56 | "sender": { 57 | "login": "rmccue", 58 | "id": 21655, 59 | "avatar_url": "https://avatars3.githubusercontent.com/u/21655?v=4", 60 | "gravatar_id": "", 61 | "url": "https://api.github.com/users/rmccue", 62 | "html_url": "https://github.com/rmccue", 63 | "followers_url": "https://api.github.com/users/rmccue/followers", 64 | "following_url": "https://api.github.com/users/rmccue/following{/other_user}", 65 | "gists_url": "https://api.github.com/users/rmccue/gists{/gist_id}", 66 | "starred_url": "https://api.github.com/users/rmccue/starred{/owner}{/repo}", 67 | "subscriptions_url": "https://api.github.com/users/rmccue/subscriptions", 68 | "organizations_url": "https://api.github.com/users/rmccue/orgs", 69 | "repos_url": "https://api.github.com/users/rmccue/repos", 70 | "events_url": "https://api.github.com/users/rmccue/events{/privacy}", 71 | "received_events_url": "https://api.github.com/users/rmccue/received_events", 72 | "type": "User", 73 | "site_admin": false 74 | } 75 | } -------------------------------------------------------------------------------- /src/linters/index.js: -------------------------------------------------------------------------------- 1 | const probotUtil = require( '@humanmade/probot-util' ); 2 | const fs = require( 'fs' ); 3 | const https = require( 'https' ); 4 | const tar = require( 'tar' ); 5 | 6 | const available = { 7 | eslint: require( './eslint' ), 8 | phpcs: require( './phpcs' ), 9 | stylelint: require( './stylelint' ), 10 | }; 11 | const enabled = ( process.env.ENABLED_LINTERS || 'eslint,phpcs,stylelint' ).split( ',' ); 12 | const TEMP_DIR = process.env.TEMP_DIR || '/tmp' 13 | const STANDARDS_DIR = `${ TEMP_DIR }/hmlinter-standards`; 14 | const BASE_URL = process.env.STANDARD_URL || 'https://make.hmn.md/hmlinter/standards'; 15 | 16 | /** 17 | * Send a HTTP request. 18 | * 19 | * Promisified version of Node's https.get 20 | * 21 | * @param args Arguments available to https.get. See https://nodejs.org/api/https.html#https_https_get_url_options_callback 22 | * @returns {Promise} 23 | */ 24 | const httpGet = ( ...args ) => { 25 | return new Promise( ( resolve, reject ) => { 26 | const req = https.get( ...args, res => { 27 | const data = []; 28 | 29 | res.on( 'data', chunk => data.push( chunk ) ); 30 | res.on( 'end', () => { 31 | resolve( { ...res, body: Buffer.concat( data ) } ); 32 | } ); 33 | } ); 34 | req.on( 'error', err => reject( err ) ); 35 | } ); 36 | }; 37 | 38 | /** 39 | * Download an external standards file. 40 | * 41 | * @param {String} url URL of the file to download. 42 | * @param {String} filename Local filename to save to. 43 | * @returns {Promise<*>} 44 | */ 45 | const downloadFile = async ( url, filename ) => { 46 | await probotUtil.file.ensureDirectory( STANDARDS_DIR ); 47 | 48 | console.log( `Fetching ${ url }` ); 49 | const res = await httpGet( url ); 50 | 51 | if ( res.statusCode !== 200 ) { 52 | throw new Error( `Could not fetch ${ url }: ${ res.statusCode } ${ res.statusReason }` ); 53 | } 54 | 55 | console.log( `Saving to ${ filename }` ); 56 | return await probotUtil.file.saveDownloadedFile( res.body, filename ); 57 | }; 58 | 59 | /** 60 | * Download and build a linter instance. 61 | * 62 | * @param {String} linter Which linter to setup. 63 | * @param {String} version Standards version to use. 64 | * @returns {Promise<*>} 65 | */ 66 | const prepareLinter = async ( linter, version ) => { 67 | const filename = `${ linter }-${ version }.tar.gz`; 68 | const url = `${ BASE_URL }/${ filename }`; 69 | const directory = `${ STANDARDS_DIR }/${ linter }-${ version }`; 70 | 71 | console.log( `Downloading ${ linter } standard from ${ url }` ); 72 | const tarball = await downloadFile( url, filename ); 73 | 74 | console.log( `Extracting standard to ${ directory }` ); 75 | 76 | await probotUtil.file.ensureDirectory( directory ); 77 | 78 | const extracted = await tar.extract( { 79 | cwd: directory, 80 | file: tarball, 81 | } ); 82 | 83 | fs.unlink( tarball, () => {} ); 84 | 85 | const buildLinter = available[ linter ]; 86 | return buildLinter( `${ directory }/` ); 87 | }; 88 | 89 | /** 90 | * Run all linters. 91 | * 92 | * @param {Promise} configPromise 93 | * @returns {Promise} 94 | */ 95 | module.exports = async configPromise => { 96 | // Ensure we actually have the config. 97 | const config = await configPromise; 98 | 99 | console.log( 'Preparing linters using config:' ); 100 | console.log( config ); 101 | 102 | const linters = Object.keys( available ).map( type => { 103 | if ( enabled.indexOf( type ) === -1 ) { 104 | console.log( `Skipping ${ type }, not enabled in ENABLED_LINTERS` ); 105 | return null; 106 | } 107 | 108 | const lintConfig = config[ type ] || {}; 109 | if ( ! lintConfig.enabled ) { 110 | return null; 111 | } 112 | 113 | const version = lintConfig.version === 'inherit' ? config.version : ( lintConfig.version || config.version ); 114 | 115 | // Download and extract the linter in the background. 116 | const linterPromise = prepareLinter( type, version ); 117 | 118 | // Ensure we don't trigger any uncaught exception errors. 119 | linterPromise.catch( err => console.log( `Error setting up ${ type }` ) ); 120 | 121 | return async ( ...args ) => { 122 | // Only await when needed. 123 | const linter = await linterPromise; 124 | return linter( ...args ); 125 | }; 126 | } ); 127 | 128 | return linters.filter( Boolean ); 129 | }; 130 | -------------------------------------------------------------------------------- /src/linters/stylelint/index.js: -------------------------------------------------------------------------------- 1 | const fs = require( 'fs' ); 2 | const process = require( 'process' ); 3 | const moduleAlias = require( 'module-alias' ); 4 | const path = require( 'path' ); 5 | 6 | /** 7 | * Convert an error message from the stylelint format into one acceptable for GitHub 8 | * 9 | * @param {Object} message Data about a warning from stylelint. 10 | * @returns {Object} 11 | */ 12 | const formatMessage = message => ( { 13 | line: message.line, 14 | column: message.column, 15 | severity: message.severity, 16 | message: message.text, 17 | source: message.rule, 18 | } ); 19 | 20 | /** 21 | * Fetch a count of the total errors and warnings for our response. 22 | * 23 | * @param {Object} files Formatted warnings against specific files. 24 | * @returns {{warnings: number, errors: number}} 25 | */ 26 | const getTotals = ( files ) => { 27 | const allErrors = Object.keys( files ).reduce( ( acc, key ) => [ ...acc, ...files[ key ] ], [] ); 28 | 29 | return { 30 | errors: allErrors.filter( errorData => errorData.severity === 'error' ).length, 31 | warnings: allErrors.filter( errorData => errorData.severity === 'warning' ).length, 32 | }; 33 | }; 34 | 35 | /** 36 | * Properly Format the return for GitHub. 37 | * 38 | * @param {Object} data Raw data from stylelint Node runner. 39 | * @param {String} codepath Path against which to check files. 40 | * @returns {{files: {}, totals: {warnings: number, errors: number}}} 41 | */ 42 | const formatOutput = ( data, codepath ) => { 43 | const files = {}; 44 | 45 | // There were no errors, simply bounce. 46 | if ( ! data.maxWarningsExceeded || data.maxWarningsExceeded.foundWarnings.length < 1 ) { 47 | return { 48 | totals: { 49 | errors: 0, 50 | warnings: 0, 51 | }, 52 | files: [], 53 | }; 54 | } 55 | 56 | data.results.forEach( result => { 57 | // Only parse through CSS or SCSS files. 58 | if ( ! result.source.match( /\.s?css$/ ) ) { 59 | return; 60 | } 61 | 62 | // Exclude any empty files. 63 | if ( ! result.warnings.length ) { 64 | return; 65 | } 66 | 67 | const relPath = path.relative( codepath, result.source ); 68 | files[ relPath ] = result.warnings.map( formatMessage ); 69 | } ); 70 | 71 | return { 72 | totals: getTotals( files ), 73 | files, 74 | }; 75 | }; 76 | 77 | /** 78 | * Run stylelint checks. 79 | * 80 | * @param {String} standardPath Path against which to check files. 81 | * @returns {() => Promise} 82 | */ 83 | module.exports = standardPath => codepath => { 84 | const options = { 85 | files: codepath, 86 | configBasedir: `${ standardPath }node_modules`, 87 | // Force a count of all warnings and errors from the Node return. 88 | maxWarnings: 0, 89 | }; 90 | 91 | // stylelint use `resolve-from` internally which looks at specific directories only for configs. 92 | // We need to copy the files for our standard to the node_modules directory so that stylelint 93 | // can correctly find our standard alongside the others. 94 | // 95 | // Copying the stylelint files so that stylelint can find and use our standard set alongside the others. 96 | const actualStandardPath = `${ standardPath }/node_modules/@humanmade/stylelint-config`; 97 | fs.mkdir( actualStandardPath, { recursive: true }, () => { 98 | fs.copyFileSync( `${ standardPath }/package.json`, `${ actualStandardPath }/package.json` ); 99 | fs.copyFileSync( `${ standardPath }/.stylelintrc.json`, `${ actualStandardPath }/.stylelintrc.json` ); 100 | } ); 101 | 102 | moduleAlias.addAlias( '@runner-packages', `${ standardPath }node_modules` ); 103 | const { lint } = require( '@runner-packages/stylelint' ); 104 | moduleAlias.reset(); 105 | 106 | const oldCwd = process.cwd(); 107 | try { 108 | process.chdir( codepath ); 109 | } catch { 110 | console.log( 'Stylelint cwd directory change failed' ); 111 | } 112 | 113 | return new Promise( resolve => { 114 | let output; 115 | 116 | output = lint( options ) 117 | .then( resultObject => { 118 | process.chdir( oldCwd ); 119 | return formatOutput( resultObject, codepath ); 120 | } ) 121 | .catch( error => { 122 | // code 78 is a configuration not found, which means we can't access @humanmade/stylelint-config. 123 | // Run with our default configuration; most projects only use this anyway. 124 | if ( error.code === 78 ) { 125 | console.log( 'Running stylelint with default config on path', codepath ); 126 | 127 | const results = lint( { ...options, configFile: `${ standardPath }/.stylelintrc.json` } ) 128 | .then( resultObject => formatOutput( resultObject, codepath ) ); 129 | 130 | process.chdir( oldCwd ); 131 | 132 | return results; 133 | } else { 134 | process.chdir( oldCwd ); 135 | console.log( error ); 136 | throw error; 137 | } 138 | } ); 139 | 140 | resolve( output ); 141 | } ); 142 | }; 143 | -------------------------------------------------------------------------------- /src/linters/eslint/index.js: -------------------------------------------------------------------------------- 1 | const fs = require( 'fs' ); 2 | const Module = require( 'module' ); 3 | const path = require( 'path' ); 4 | const moduleAlias = require( 'module-alias' ); 5 | 6 | /** 7 | * Convert a ESLint error into formatOutput-style results. 8 | * 9 | * @param {Object} message Raw message data from eslint. 10 | * @returns {Object} 11 | */ 12 | const formatMessage = message => { 13 | return { 14 | line: message.line, 15 | column: message.column, 16 | severity: message.severity >= 2 ? 'error' : 'warning', 17 | message: message.message, 18 | source: message.ruleId, 19 | }; 20 | }; 21 | 22 | /** 23 | * Convert ESLint results into common output format. 24 | * 25 | * @param {Object} data Warnings and errors from eslint. 26 | * @param {String} codepath Path to the code getting linted. 27 | * @returns {{files, totals: {warnings: *, errors: *}}} 28 | */ 29 | const formatOutput = ( data, codepath ) => { 30 | const totals = { 31 | errors: data.errorCount, 32 | warnings: data.warningCount, 33 | }; 34 | const files = {}; 35 | // console.log( data ); 36 | data.results.forEach( result => { 37 | // Exclude any empty files. 38 | if ( ! result.messages.length ) { 39 | return; 40 | } 41 | 42 | const relPath = path.relative( codepath, result.filePath ); 43 | files[ relPath ] = result.messages.map( formatMessage ); 44 | } ); 45 | 46 | return { totals, files }; 47 | }; 48 | 49 | /** 50 | * Run the ESLint engine against the given path. 51 | * 52 | * Handles ignoring any errors that aren't relevant. 53 | * 54 | * @param {Object} engine ESLint engine instance. 55 | * @param {String} codepath Path to run linter on. 56 | * @return {Object} ESLint results object. 57 | */ 58 | const run = ( engine, codepath ) => { 59 | try { 60 | return engine.executeOnFiles( [ codepath ] ); 61 | } catch ( err ) { 62 | switch ( err.messageTemplate ) { 63 | case 'file-not-found': 64 | case 'all-files-ignored': 65 | // No JS files in the repo! Ignore this, as it's not really 66 | // an error. 67 | return { 68 | errorCount: 0, 69 | warningCount: 0, 70 | results: [], 71 | }; 72 | 73 | default: 74 | throw err; 75 | } 76 | } 77 | }; 78 | 79 | /** 80 | * ESLint standard to use when running eslint. 81 | * 82 | * This can be customized by setting the DEFAULT_STANDARD_ESLINT environment variable. 83 | * 84 | * @type {string} 85 | */ 86 | const DEFAULT_STANDARD = process.env.DEFAULT_STANDARD_ESLINT || 'eslint-config-humanmade'; 87 | 88 | /** 89 | * Alternative (new) package name for the eslint standard. 90 | * 91 | * This cannot be customized, and should be swapped with the fallback for the 92 | * DEFAULT_STANDARD_ESLINT variable once v1.0.0 is the default "latest" version. 93 | * 94 | * @type {string} 95 | */ 96 | const ALTERNATE_STANDARD = '@humanmade/eslint-config'; 97 | 98 | /** 99 | * Run eslint linting. 100 | * 101 | * @param {String} standardPath Path to custom standard set. 102 | */ 103 | module.exports = standardPath => codepath => { 104 | const options = { 105 | cwd: codepath, 106 | }; 107 | 108 | // We need to use node_modules from the standard directory, but because 109 | // we're not invoking eslint over the CLI, we can't change where `require()` 110 | // loads modules from unless we override the module loader. 111 | // 112 | // This ensures dependencies load from the standards instead, and the 113 | // standard itself is loaded from the right place. 114 | moduleAlias.addPath( `${ standardPath }/node_modules` ); 115 | moduleAlias.addAlias( DEFAULT_STANDARD, standardPath ); 116 | moduleAlias.addAlias( ALTERNATE_STANDARD, standardPath ); 117 | 118 | const actualStandardPath = require.resolve( DEFAULT_STANDARD ); 119 | const origFindPath = Module._findPath; 120 | Module._findPath = ( name, ...args ) => { 121 | const path = origFindPath( name, ...args ); 122 | if ( ! path && ( name === DEFAULT_STANDARD || name === ALTERNATE_STANDARD ) ) { 123 | return actualStandardPath; 124 | } 125 | return path; 126 | }; 127 | 128 | const { CLIEngine } = require( 'eslint' ); 129 | const engine = new CLIEngine( options ); 130 | 131 | return new Promise( ( resolve, reject ) => { 132 | let output; 133 | try { 134 | output = run( engine, codepath ); 135 | } catch ( err ) { 136 | if ( err.messageTemplate === 'no-config-found' ) { 137 | // Try with default configuration. 138 | const engine = new CLIEngine( { ...options, configFile: `${ standardPath }/index.js` } ); 139 | console.log( 'Running eslint with default config on path', codepath ); 140 | output = run( engine, codepath ); 141 | } else { 142 | console.log( err ); 143 | throw err; 144 | } 145 | } 146 | 147 | // Reset path loader. 148 | moduleAlias.reset(); 149 | Module._findPath = origFindPath; 150 | 151 | resolve( formatOutput( output, codepath ) ); 152 | } ); 153 | }; 154 | -------------------------------------------------------------------------------- /src/linters/phpcs/index.js: -------------------------------------------------------------------------------- 1 | const child_process = require( 'child_process' ); 2 | const fs = require( 'fs' ); 3 | const path = require( 'path' ); 4 | 5 | const CONFIG_NAMES = [ 6 | '.phpcs.xml', 7 | 'phpcs.xml', 8 | '.phpcs.xml.dist', 9 | 'phpcs.xml.dist', 10 | 'phpcs.ruleset.xml', 11 | ]; 12 | 13 | /** 14 | * Convert a phpcs error into formatOutput-style results. 15 | * 16 | * @param {Object} message Raw message data from PHPCS. 17 | * @returns {Object} 18 | */ 19 | const formatMessage = message => { 20 | const details = `
Error details${message.source} from phpcs
`; 21 | const text = `${message.message}`; 22 | 23 | return { 24 | line: message.line, 25 | column: message.column, 26 | severity: message.severity === 5 ? 'error' : 'warning', 27 | message: text, 28 | source: message.source, 29 | }; 30 | }; 31 | 32 | /** 33 | * Convert phpcs results into common output format. 34 | * 35 | * @param {Object} data Warnings and errors from PHPCS. 36 | * @param {String} codepath Path to the code getting linted. 37 | * @returns {{files, totals: {warnings: *, errors: *}}} 38 | */ 39 | const formatOutput = ( data, codepath ) => { 40 | const totals = { 41 | errors: 0, 42 | warnings: 0, 43 | }; 44 | const files = {}; 45 | Object.keys( data.files ).forEach( file => { 46 | // Ensure the path has a leading slash. 47 | const fullPath = file.replace( /^([^\/])/,'/$1' ); 48 | const relPath = path.relative( codepath, fullPath ); 49 | const messages = data.files[ file ].messages.map( formatMessage ); 50 | totals.errors = messages.reduce( ( count, message ) => { 51 | return message.severity === 'error' ? count + 1 : count; 52 | }, totals.errors ); 53 | totals.warnings = messages.reduce( ( count, message ) => { 54 | return message.severity === 'warning' ? count + 1 : count; 55 | }, totals.warnings ); 56 | files[ relPath ] = messages; 57 | } ); 58 | 59 | return { totals, files }; 60 | }; 61 | 62 | /** 63 | * Run PHPCS linting. 64 | * 65 | * @param {String} standardPath Path to custom standard set. 66 | */ 67 | module.exports = standardPath => codepath => { 68 | const phpcsPath = path.join( standardPath, 'vendor', 'bin', 'phpcs' ); 69 | 70 | // Detect a ruleset file if we can, otherwise use default. 71 | return Promise.all( CONFIG_NAMES.map( filename => { 72 | return new Promise( resolve => { 73 | const filepath = path.join( codepath, filename ); 74 | fs.access( filepath, err => { 75 | resolve( err ? null : filepath ); 76 | } ); 77 | } ); 78 | } ) ).then( rulesetFiles => { 79 | let standard; 80 | if ( process.env.FORCE_STANDARD_PHPCS ) { 81 | standard = process.env.FORCE_STANDARD_PHPCS; 82 | } else { 83 | standard = rulesetFiles.find( file => !! file ) || process.env.DEFAULT_STANDARD_PHPCS || 'vendor/humanmade/coding-standards'; 84 | } 85 | 86 | const installedPaths = [ 87 | 'vendor/fig-r/psr2r-sniffer', 88 | 'vendor/humanmade/coding-standards/HM', 89 | 'vendor/phpcompatibility/php-compatibility', 90 | 'vendor/phpcompatibility/phpcompatibility-paragonie', 91 | 'vendor/phpcompatibility/phpcompatibility-wp', 92 | 'vendor/wp-coding-standards/wpcs', 93 | ] 94 | 95 | // Only include HM-Minimum if the path exists within this version of the standards. 96 | if ( fs.existsSync( path.join( standardPath, 'vendor', 'humanmade', 'coding-standards', 'HM-Minimum' ) ) ) { 97 | installedPaths.push( 'vendor/humanmade/coding-standards/HM-Minimum' ); 98 | } 99 | 100 | // Only include the VIP WPCS if the path exists within this version of the standards. 101 | if ( fs.existsSync( path.join( standardPath, 'vendor', 'automattic', 'vipwpcs' ) ) ) { 102 | installedPaths.push( 'vendor/automattic/vipwpcs' ); 103 | } 104 | 105 | const args = [ 106 | phpcsPath, 107 | '-d', 108 | 'memory_limit=1G', 109 | '-q', 110 | '--runtime-set', 111 | 'installed_paths', 112 | installedPaths.join( ',' ), 113 | `--standard=${standard}`, 114 | '--report=json', 115 | codepath 116 | ]; 117 | const opts = { 118 | cwd: standardPath, 119 | env: process.env, 120 | }; 121 | 122 | return new Promise( ( resolve, reject ) => { 123 | console.log( 'Spawning PHP process', args, opts ); 124 | const proc = child_process.spawn( 'php', args, opts ); 125 | let stdout = ''; 126 | let stderr = ''; 127 | proc.stdout.on( 'data', data => stdout += data ); 128 | proc.stderr.on( 'data', data => stderr += data ); 129 | proc.on( 'error', e => { console.log(e) } ); 130 | proc.on( 'close', errCode => { 131 | // 0: no errors found 132 | // 1: errors found 133 | // 2: fixable errors found 134 | // 3: processing error 135 | if ( errCode > 2 ) { 136 | return reject( stderr || stdout ); 137 | } 138 | 139 | let data; 140 | try { 141 | data = JSON.parse( stdout ); 142 | } catch ( e ) { 143 | // Couldn't decode JSON, so likely a human readable error. 144 | console.log(stdout) 145 | console.log(e) 146 | return reject( stdout ); 147 | } 148 | 149 | resolve( formatOutput( data, codepath ) ); 150 | } ); 151 | } ); 152 | } ); 153 | }; 154 | -------------------------------------------------------------------------------- /src/review.js: -------------------------------------------------------------------------------- 1 | const metadata = require( './metadata' ); 2 | const { combineLinters } = require( './util' ); 3 | 4 | /** 5 | * Fetch all previous runs against a branch or repo. 6 | * 7 | * @param {Object} github GitHub API access. 8 | * @param {String} owner Username of the code owner. 9 | * @param {String} repo Name of the repository being linted. 10 | * @param {Number} number Run number. 11 | * @returns {Promise<*>} 12 | */ 13 | async function getAll( github, owner, repo, number ) { 14 | // Paginate and exhaust. 15 | let response = await github.pullRequests.getReviews( { 16 | owner, 17 | repo, 18 | number, 19 | per_page: 100, 20 | } ) 21 | let reviews = response.data; 22 | 23 | while ( github.hasNextPage( response ) ) { 24 | response = await github.getNextPage( response ) 25 | reviews = reviews.concat( response.data ); 26 | } 27 | 28 | return reviews; 29 | } 30 | 31 | /** 32 | * Fetch data about a previous run against a branch or repo. 33 | * 34 | * @param {Object} github GitHub API access. 35 | * @param {String} owner Username of the code owner. 36 | * @param {String} repo Name of the repository being linted. 37 | * @param {Number} number Run number. 38 | * @returns {Promise} 39 | */ 40 | async function getPreviousRun( github, owner, repo, number ) { 41 | const reviews = await getAll( github, owner, repo, number ); 42 | 43 | return reviews 44 | .filter( review => review.user.type === 'Bot' && ( review.user.login === 'hm-linter' || review.user.login === 'hm-linter-development' ) ) 45 | .reverse() 46 | .map( review => metadata.parse( review.body ) ) 47 | .find( data => !! data ); 48 | } 49 | 50 | /** 51 | * Generate a unique ID string for an error. 52 | * 53 | * @param {Object} error Data about a particular error. 54 | * @returns {string} Unique error ID. 55 | */ 56 | const errorId = error => `L${ error.line }C${ error.column || 0 }-${ error.source }`; 57 | 58 | /** 59 | * Convert an array of errors into a more meaningful keyed object. 60 | * 61 | * @param {Array} list List of errors. 62 | * @returns {Object} An organized set of errors. 63 | */ 64 | const errorsById = list => list.reduce( ( errs, error ) => { 65 | return { 66 | ...errs, 67 | [ errorId( error ) ]: error, 68 | } 69 | }, {} ); 70 | 71 | /** 72 | * Compare two sets of error data to find differences between them. 73 | * 74 | * @param {Array} left First error set. 75 | * @param {Array} right Second error set. 76 | * @returns {Object} Lists of errors unique to each set of runs. 77 | */ 78 | function diffErrors( left, right ) { 79 | const leftIds = errorsById( left ); 80 | const rightIds = errorsById( right ); 81 | 82 | return { 83 | onlyLeft: Object.keys( leftIds ).filter( id => ! rightIds[ id ] ).map( id => leftIds[ id ] ), 84 | onlyRight: Object.keys( rightIds ).filter( id => ! leftIds[ id ] ).map( id => rightIds[ id ] ), 85 | }; 86 | } 87 | 88 | /** 89 | * Count the number of errors in an error list. 90 | * 91 | * @param {Array} errors List of errors. 92 | * @returns {Number} Quantity of errors. 93 | */ 94 | const countErrors = errors => errors.reduce( ( total, err ) => err.severity === 'error' ? total + 1 : total, 0 ); 95 | 96 | /** 97 | * Count the number of warnings in an error list. 98 | * 99 | * @param {Array} errors List of errors. 100 | * @returns {Number} Quantity of warnings. 101 | */ 102 | const countWarnings = errors => errors.reduce( ( total, err ) => err.severity === 'warning' ? total + 1 : total, 0 ); 103 | 104 | /** 105 | * Compare two Linter runs against each other. 106 | * 107 | * @param {Object} previous Previous linting data. 108 | * @param {Object} current Current linting data. 109 | * @returns {Object} Organized data about the difference between two linter runs. 110 | */ 111 | function compareRuns( previous, current ) { 112 | const totals = { 113 | newErrors: 0, 114 | newWarnings: 0, 115 | fixedErrors: 0, 116 | fixedWarnings: 0, 117 | }; 118 | 119 | const newIssues = {}; 120 | const fixed = {}; 121 | 122 | const allPrevious = combineLinters( previous.results ); 123 | const allCurrent = combineLinters( current.results ); 124 | 125 | // Process all files in the previous report... 126 | Object.keys( allPrevious ).forEach( file => { 127 | const prevFile = allPrevious[ file ]; 128 | const currentFile = allCurrent[ file ]; 129 | 130 | // If the file is not in the current report, the whole file was fixed. 131 | if ( ! currentFile ) { 132 | // Skip any files which never had any errors anyway. 133 | if ( ! prevFile.length ) { 134 | return; 135 | } 136 | 137 | fixed[ file ] = prevFile; 138 | 139 | // Calculate totals. 140 | totals.fixedErrors += countErrors( prevFile ); 141 | totals.fixedWarnings += countWarnings( prevFile ); 142 | return; 143 | } 144 | 145 | const { onlyLeft, onlyRight } = diffErrors( prevFile, currentFile ); 146 | if ( onlyLeft.length > 0 ) { 147 | fixed[ file ] = onlyLeft; 148 | totals.fixedErrors += countErrors( onlyLeft ); 149 | totals.fixedWarnings += countWarnings( onlyLeft ); 150 | } 151 | 152 | if ( onlyRight.length > 0 ) { 153 | newIssues[ file ] = onlyRight; 154 | totals.newErrors += countErrors( onlyRight ); 155 | totals.newWarnings += countWarnings( onlyRight ); 156 | } 157 | } ); 158 | Object.keys( allCurrent ) 159 | .filter( file => ! allPrevious[ file ] ) 160 | .forEach( file => { 161 | // If the file wasn't in the previous report, the whole file is errors. 162 | newIssues[ file ] = allCurrent[ file ]; 163 | totals.newErrors += countErrors( allCurrent[ file ] ); 164 | totals.newWarnings += countWarnings( allCurrent[ file ] ); 165 | } ); 166 | 167 | const changed = Object.keys( fixed ).length > 0 || Object.keys( newIssues ).length > 0; 168 | return { 169 | changed, 170 | totals, 171 | newIssues, 172 | fixed, 173 | }; 174 | } 175 | 176 | module.exports = { 177 | compareRuns, 178 | getPreviousRun, 179 | }; 180 | -------------------------------------------------------------------------------- /fixtures/push.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref": "refs/heads/test-branch", 3 | "before": "7453cc6f6996ca0f08d9257061a887cd82050026", 4 | "after": "ffdfa08956906ade311b82e1d06f2c74941bf866", 5 | "created": false, 6 | "deleted": false, 7 | "forced": false, 8 | "base_ref": null, 9 | "compare": "https://github.com/rmccue/test-linter/compare/7453cc6f6996...ffdfa0895690", 10 | "commits": [ 11 | { 12 | "id": "ffdfa08956906ade311b82e1d06f2c74941bf866", 13 | "tree_id": "70e9e13ca0d4a6c64b2b4b08d6afc68e4e53da4b", 14 | "distinct": true, 15 | "message": "Bump version", 16 | "timestamp": "2017-09-24T19:56:30+10:00", 17 | "url": "https://github.com/rmccue/test-linter/commit/ffdfa08956906ade311b82e1d06f2c74941bf866", 18 | "author": { 19 | "name": "Ryan McCue", 20 | "email": "me@ryanmccue.info", 21 | "username": "rmccue" 22 | }, 23 | "committer": { 24 | "name": "Ryan McCue", 25 | "email": "me@ryanmccue.info", 26 | "username": "rmccue" 27 | }, 28 | "added": [ 29 | 30 | ], 31 | "removed": [ 32 | 33 | ], 34 | "modified": [ 35 | "mercator.php" 36 | ] 37 | } 38 | ], 39 | "head_commit": { 40 | "id": "ffdfa08956906ade311b82e1d06f2c74941bf866", 41 | "tree_id": "70e9e13ca0d4a6c64b2b4b08d6afc68e4e53da4b", 42 | "distinct": true, 43 | "message": "Bump version", 44 | "timestamp": "2017-09-24T19:56:30+10:00", 45 | "url": "https://github.com/rmccue/test-linter/commit/ffdfa08956906ade311b82e1d06f2c74941bf866", 46 | "author": { 47 | "name": "Ryan McCue", 48 | "email": "me@ryanmccue.info", 49 | "username": "rmccue" 50 | }, 51 | "committer": { 52 | "name": "Ryan McCue", 53 | "email": "me@ryanmccue.info", 54 | "username": "rmccue" 55 | }, 56 | "added": [ 57 | 58 | ], 59 | "removed": [ 60 | 61 | ], 62 | "modified": [ 63 | "mercator.php" 64 | ] 65 | }, 66 | "repository": { 67 | "id": 104633902, 68 | "name": "test-linter", 69 | "full_name": "rmccue/test-linter", 70 | "owner": { 71 | "name": "rmccue", 72 | "email": "me@ryanmccue.info", 73 | "login": "rmccue", 74 | "id": 21655, 75 | "avatar_url": "https://avatars3.githubusercontent.com/u/21655?v=4", 76 | "gravatar_id": "", 77 | "url": "https://api.github.com/users/rmccue", 78 | "html_url": "https://github.com/rmccue", 79 | "followers_url": "https://api.github.com/users/rmccue/followers", 80 | "following_url": "https://api.github.com/users/rmccue/following{/other_user}", 81 | "gists_url": "https://api.github.com/users/rmccue/gists{/gist_id}", 82 | "starred_url": "https://api.github.com/users/rmccue/starred{/owner}{/repo}", 83 | "subscriptions_url": "https://api.github.com/users/rmccue/subscriptions", 84 | "organizations_url": "https://api.github.com/users/rmccue/orgs", 85 | "repos_url": "https://api.github.com/users/rmccue/repos", 86 | "events_url": "https://api.github.com/users/rmccue/events{/privacy}", 87 | "received_events_url": "https://api.github.com/users/rmccue/received_events", 88 | "type": "User", 89 | "site_admin": false 90 | }, 91 | "private": true, 92 | "html_url": "https://github.com/rmccue/test-linter", 93 | "description": null, 94 | "fork": false, 95 | "url": "https://github.com/rmccue/test-linter", 96 | "forks_url": "https://api.github.com/repos/rmccue/test-linter/forks", 97 | "keys_url": "https://api.github.com/repos/rmccue/test-linter/keys{/key_id}", 98 | "collaborators_url": "https://api.github.com/repos/rmccue/test-linter/collaborators{/collaborator}", 99 | "teams_url": "https://api.github.com/repos/rmccue/test-linter/teams", 100 | "hooks_url": "https://api.github.com/repos/rmccue/test-linter/hooks", 101 | "issue_events_url": "https://api.github.com/repos/rmccue/test-linter/issues/events{/number}", 102 | "events_url": "https://api.github.com/repos/rmccue/test-linter/events", 103 | "assignees_url": "https://api.github.com/repos/rmccue/test-linter/assignees{/user}", 104 | "branches_url": "https://api.github.com/repos/rmccue/test-linter/branches{/branch}", 105 | "tags_url": "https://api.github.com/repos/rmccue/test-linter/tags", 106 | "blobs_url": "https://api.github.com/repos/rmccue/test-linter/git/blobs{/sha}", 107 | "git_tags_url": "https://api.github.com/repos/rmccue/test-linter/git/tags{/sha}", 108 | "git_refs_url": "https://api.github.com/repos/rmccue/test-linter/git/refs{/sha}", 109 | "trees_url": "https://api.github.com/repos/rmccue/test-linter/git/trees{/sha}", 110 | "statuses_url": "https://api.github.com/repos/rmccue/test-linter/statuses/{sha}", 111 | "languages_url": "https://api.github.com/repos/rmccue/test-linter/languages", 112 | "stargazers_url": "https://api.github.com/repos/rmccue/test-linter/stargazers", 113 | "contributors_url": "https://api.github.com/repos/rmccue/test-linter/contributors", 114 | "subscribers_url": "https://api.github.com/repos/rmccue/test-linter/subscribers", 115 | "subscription_url": "https://api.github.com/repos/rmccue/test-linter/subscription", 116 | "commits_url": "https://api.github.com/repos/rmccue/test-linter/commits{/sha}", 117 | "git_commits_url": "https://api.github.com/repos/rmccue/test-linter/git/commits{/sha}", 118 | "comments_url": "https://api.github.com/repos/rmccue/test-linter/comments{/number}", 119 | "issue_comment_url": "https://api.github.com/repos/rmccue/test-linter/issues/comments{/number}", 120 | "contents_url": "https://api.github.com/repos/rmccue/test-linter/contents/{+path}", 121 | "compare_url": "https://api.github.com/repos/rmccue/test-linter/compare/{base}...{head}", 122 | "merges_url": "https://api.github.com/repos/rmccue/test-linter/merges", 123 | "archive_url": "https://api.github.com/repos/rmccue/test-linter/{archive_format}{/ref}", 124 | "downloads_url": "https://api.github.com/repos/rmccue/test-linter/downloads", 125 | "issues_url": "https://api.github.com/repos/rmccue/test-linter/issues{/number}", 126 | "pulls_url": "https://api.github.com/repos/rmccue/test-linter/pulls{/number}", 127 | "milestones_url": "https://api.github.com/repos/rmccue/test-linter/milestones{/number}", 128 | "notifications_url": "https://api.github.com/repos/rmccue/test-linter/notifications{?since,all,participating}", 129 | "labels_url": "https://api.github.com/repos/rmccue/test-linter/labels{/name}", 130 | "releases_url": "https://api.github.com/repos/rmccue/test-linter/releases{/id}", 131 | "deployments_url": "https://api.github.com/repos/rmccue/test-linter/deployments", 132 | "created_at": 1506246519, 133 | "updated_at": "2017-09-24T09:51:51Z", 134 | "pushed_at": 1506247006, 135 | "git_url": "git://github.com/rmccue/test-linter.git", 136 | "ssh_url": "git@github.com:rmccue/test-linter.git", 137 | "clone_url": "https://github.com/rmccue/test-linter.git", 138 | "svn_url": "https://github.com/rmccue/test-linter", 139 | "homepage": null, 140 | "size": 0, 141 | "stargazers_count": 0, 142 | "watchers_count": 0, 143 | "language": "PHP", 144 | "has_issues": true, 145 | "has_projects": true, 146 | "has_downloads": true, 147 | "has_wiki": true, 148 | "has_pages": false, 149 | "forks_count": 0, 150 | "mirror_url": null, 151 | "open_issues_count": 1, 152 | "forks": 0, 153 | "open_issues": 1, 154 | "watchers": 0, 155 | "default_branch": "master", 156 | "stargazers": 0, 157 | "master_branch": "master" 158 | }, 159 | "pusher": { 160 | "name": "rmccue", 161 | "email": "me@ryanmccue.info" 162 | }, 163 | "sender": { 164 | "login": "rmccue", 165 | "id": 21655, 166 | "avatar_url": "https://avatars3.githubusercontent.com/u/21655?v=4", 167 | "gravatar_id": "", 168 | "url": "https://api.github.com/users/rmccue", 169 | "html_url": "https://github.com/rmccue", 170 | "followers_url": "https://api.github.com/users/rmccue/followers", 171 | "following_url": "https://api.github.com/users/rmccue/following{/other_user}", 172 | "gists_url": "https://api.github.com/users/rmccue/gists{/gist_id}", 173 | "starred_url": "https://api.github.com/users/rmccue/starred{/owner}{/repo}", 174 | "subscriptions_url": "https://api.github.com/users/rmccue/subscriptions", 175 | "organizations_url": "https://api.github.com/users/rmccue/orgs", 176 | "repos_url": "https://api.github.com/users/rmccue/repos", 177 | "events_url": "https://api.github.com/users/rmccue/events{/privacy}", 178 | "received_events_url": "https://api.github.com/users/rmccue/received_events", 179 | "type": "User", 180 | "site_admin": false 181 | }, 182 | "installation": { 183 | "id": 55050 184 | } 185 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 10 | 11 | 12 | 15 | 18 | 19 |
4 | hm-linter bot
5 | Automatically run the HM Coding Standards on any repository. 6 |
8 | 🤖 9 |
13 | A Human Made project. Maintained by @rmccue. 14 | 16 | 17 |
20 | 21 | Automatically run the [HM Coding standards](https://github.com/humanmade/coding-standards) on any repository. 22 | 23 | 24 | 25 | 26 | ## Installation 27 | 28 | To enable on any repository on GitHub, simply [head to the app page](https://github.com/apps/hm-linter) and Install/Configure. You'll get an initial report as a new issue if you have any existing linting errors in your codebase. 29 | 30 | Every repository is different, and you might want to customise the rules that the linter runs. Good news: you can do just that. hm-linter detects custom configuration files automatically, just create a `phpcs.ruleset.xml` file for phpcs, [`eslintrc.*`](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) file for ESLint, or [`.stylelintrc`](https://stylelint.io/user-guide/configure) for stylelint. 31 | 32 | See the [HM Coding standards](https://github.com/humanmade/coding-standards) documentation for how to configure specific rules. 33 | 34 | **Note:** Custom configuration can only use rules/standards which hm-linter has available. This includes the HM Coding Standards as well as any dependencies of it (such as the WP Coding Standards). 35 | 36 | 37 | ## Configuration 38 | 39 | By default, hmlinter will use the latest version of the Human Made coding standards. You can configure it to use an older or fixed version by creating a `.github/hmlinter.yml` file. This file should look like: 40 | 41 | ```yaml 42 | # GLOBAL SETTINGS 43 | 44 | # By default, the version is set to "latest". This can be set to any version 45 | # from 0.4.2 and above, but you MUST include the full version number. 46 | # If you wish to increase the security releases automatically set the 47 | # version to be 'X.Y', otherwise it will be 'X.Y.Z'. 48 | version: latest 49 | 50 | # PER-STANDARD SETTINGS 51 | phpcs: 52 | # Set to false to disable phpcs 53 | enabled: true 54 | 55 | # Set to "inherit" to use the global version, "latest" for the latest 56 | # version, or a specific full version number. 57 | version: inherit 58 | 59 | eslint: 60 | enabled: true 61 | version: inherit 62 | 63 | stylelint: 64 | enabled: true 65 | version: inherit 66 | ``` 67 | 68 | Versions **MUST** be specified in full format (i.e. `0.5.0`). `latest` is available as a convenient shorthand for the latest published version, but note that this will be updated and may cause your code to fail future checks. 69 | 70 | 71 | ## Development 72 | 73 | hm-linter is a GitHub bot built on top of the [Probot framework](https://probot.github.io/). It runs on Lambda, which runs Node 12.x. 74 | 75 | To get started on development of hm-linter: 76 | 77 | 1. Clone this repository 78 | 2. `npm install` or `yarn install` the dependencies 79 | 80 | 81 | ## Testing 82 | 83 | ### Live Testing 84 | 85 | The easiest and best way to test hm-linter is to run the bot in development mode. This will run the bot locally, and uses a proxy to forward all webhook events from GitHub. 86 | 87 | `yarn start` will run a development copy of the linter bot inside a Lambda-like Docker container. 88 | 89 | To set this up: 90 | 91 | 1. Download "hm-linter-development Private Key" from the team 1Password Documents. 92 | 2. Save this file into the linter-bot directory as `development.private-key.pem` 93 | 3. Download "hm-linter-development .env" from the team 1Password Documents. 94 | 4. Save this file into the linter-bot directory as `.env` 95 | 5. Run `yarn start` 96 | 97 | The development mode is set up only on the [linter-bot-test](https://github.com/humanmade/linter-bot-test) repository. You can add it to other repositories on the `humanmade` organisation, but **please only do this temporarily**. You should remove any repositories you add as soon as you're finished testing. 98 | 99 | Webhook events for the development bot are sent to Smee.io, which [logs all events](https://smee.io/rpFoxbfDjkw5Srji). If you visit this page, you can also replay events; you should use this while developing/testing rather than repeatedly opening new PRs. 100 | 101 | A typical development process looks like this: 102 | 103 | 1. Generate the test event you want or find one that already exists 104 | 2. Write the first version of your code to handle it 105 | 3. `yarn start` 106 | 4. [Replay the event on Smee](https://smee.io/rpFoxbfDjkw5Srji) 107 | 5. Check that your code did what you expect 108 | 6. If your code worked, you're done 🙌 109 | 7. If your code didn't work, kill the bot 110 | 8. Repeat steps 2-7 until your code works. 111 | 112 | 113 | ### Replicating production issues 114 | 115 | The first step to replicating production issues is to understand the request being sent to hm-linter. Note that when running against these events, you are **testing against the live GitHub API**, so be careful. 116 | 117 | Access the CloudWatch Logs for hm-linter (ask the Cloud team for access) and find the request you received. If you have the AWS CLI installed, you can do this by running the `scripts/get-logs.js` command. 118 | 119 | Each check status page lists the various request IDs. The **Lambda ID** is the ID you need for pulling down the relevant logs and data. 120 | 121 | This will write the logs to `{id}.log`, and save the raw data to `{id}.json`. 122 | 123 | ```sh 124 | # For request deadbeef-badd-ecaf-dead-beefbaddecaf: 125 | node scripts/get-logs.js deadbeef-badd-ecaf-dead-beefbaddecaf 126 | 127 | # By default, this will only check the last 48 hours; to override, set HOUR_LIMIT: 128 | HOUR_LIMIT=192 node scripts/get-logs.js deadbeef-badd-ecaf-dead-beefbaddecaf 129 | ``` 130 | 131 | ``` 132 | Querying for deadbeef-badd-ecaf-dead-beefbaddecaf 133 | Waiting for results… 134 | Log saved to: deadbeef-badd-ecaf-dead-beefbaddecaf.log 135 | Raw data saved to: deadbeef-badd-ecaf-dead-beefbaddecaf.json 136 | ``` 137 | 138 | You can then run the handler against a simulated Lambda environment locally (using Docker): 139 | 140 | ``` 141 | # Run build at least once: 142 | npm run build 143 | 144 | # Run the handler. 145 | npm run test < deadbeef-badd-ecaf-dead-beefbaddecaf.json 146 | ``` 147 | 148 | **Note:** The format of the JSON data passed to `test` **must** be in API Gateway format (i.e. from the get-logs script). If you get an `Cannot read property 'x-github-event' of undefined` error, you're passing a GitHub event instead (i.e. from Smee). 149 | 150 | 151 | ### Deployment 152 | 153 | hm-linter is deployed on a Lambda instance. Deployment is handled via npm scripts, which you run via `npm run