├── .editorconfig ├── .github └── workflows │ ├── deploy.yml │ ├── test-evals.yml │ └── test-unit.yml ├── .gitignore ├── .husky └── pre-commit ├── .node-version ├── .npmrc ├── .nvmrc ├── .prettierignore ├── .prettierrc ├── README.md ├── bun.lock ├── docker-compose.yaml ├── env.example ├── experiments └── change_point_detection.ipynb ├── jest.config.js ├── overview.md ├── package-lock.json ├── package.json ├── prisma ├── migrations │ ├── 20241115145014_init │ │ └── migration.sql │ ├── 20241122114843_add_alert_context_model │ │ └── migration.sql │ ├── 20241213104042_create_release_model │ │ └── migration.sql │ ├── 20241213143026_add_on_delete_cascade │ │ └── migration.sql │ ├── 20250115142547_add_raw_release │ │ └── migration.sql │ ├── 20250116111332_add_dynamic_knowledge │ │ └── migration.sql │ ├── 20250116130851_add_knowledge_source │ │ └── migration.sql │ ├── 20250127145837_add_deployment │ │ └── migration.sql │ ├── 20250207115247_add_feedback_tables │ │ └── migration.sql │ └── migration_lock.toml └── schema.prisma ├── requirements.txt ├── scripts ├── import-historical-data.ts └── init-assistant.ts ├── src ├── aggregator │ ├── ContextAggregator.ts │ ├── chains.ts │ ├── checkly-aggregator.spec.ts │ ├── checkly-aggregator.ts │ ├── github-aggregator.spec.ts │ ├── github-aggregator.ts │ ├── knowledge-aggregator.ts │ └── slack-channel-aggregator.ts ├── ai │ ├── Assistant.ts │ ├── Tool.ts │ ├── openai.ts │ └── utils.ts ├── api.main.ts ├── checkly │ ├── PrometheusMetric.ts │ ├── PrometheusParser.ts │ ├── alertDTO.ts │ ├── bla.ts │ ├── checklyAlertEventDTO.ts │ ├── checklyclient.spec.ts │ ├── checklyclient.ts │ ├── client.ts │ ├── models.ts │ └── utils.ts ├── data-import │ ├── AthenaImporter.ts │ ├── DataInserter.ts │ ├── NotionImporter.ts │ └── PublicApiImporter.ts ├── data-syncer.main.ts ├── data-syncing.ts ├── db │ ├── check-groups.ts │ ├── check-results.ts │ ├── check-sync-status.ts │ ├── check.ts │ ├── checks-merged.ts │ ├── error-cluster.ts │ ├── knexfile.ts │ ├── learnings.ts │ ├── migrations │ │ ├── 20250304153255_add_pgvector.ts │ │ ├── 20250304200309_add_checks.ts │ │ ├── 20250304211520_add_check_groups.ts │ │ ├── 20250305132710_add_check_results.ts │ │ ├── 20250306083009_error_cluster.ts │ │ ├── 20250310075322_add_check_sync_status.ts │ │ ├── 20250311144357_drop_foreign_key_error_cluster_membership.ts │ │ ├── 20250321171451_improve_indexing.ts │ │ ├── 20250322154023_add_learnings.ts │ │ └── 20250325083143_improve_error_cluster_membership_indexes.ts │ └── postgres.ts ├── github │ ├── agent.spec.ts │ ├── agent.ts │ ├── github.spec.ts │ ├── github.ts │ └── slackBlock.ts ├── grafana │ ├── grafanaclient.spec.ts │ ├── grafanaclient.ts │ ├── lokiclient.spec.ts │ └── lokiclient.ts ├── instrumentation.ts ├── knowledge-base │ └── knowledgeBase.ts ├── langfuse │ ├── index.ts │ └── utils.ts ├── lib │ ├── async-utils.spec.ts │ ├── async-utils.ts │ └── init-config.ts ├── log.ts ├── notion │ └── notion.ts ├── prisma.ts ├── prompts │ ├── alerts.ts │ ├── checkly-data.spec.ts │ ├── checkly-data.ts │ ├── checkly.eval.spec.fixtures │ │ ├── heatmaps │ │ │ ├── heatmap-001.png │ │ │ ├── heatmap-002.png │ │ │ ├── heatmap-003.png │ │ │ └── heatmap-004.png │ │ └── should find similar errors for check │ │ │ ├── 005fd7bd-81f9-43e0-bcc5-4ac57002b8cd │ │ │ ├── expected.json │ │ │ └── result-summary.json │ │ │ ├── 3c4264bc-4355-4f7f-ba6c-5d79a647e0bc │ │ │ ├── expected.json │ │ │ └── result-summary.json │ │ │ ├── 5ee8e373-f204-45e4-b193-d652dba7e928 │ │ │ ├── expected.json │ │ │ └── result-summary.json │ │ │ ├── 683f229f-48d2-4b97-9161-db029f9d9a32 │ │ │ ├── expected.json │ │ │ └── result-summary.json │ │ │ ├── 84d25fb6-a6a7-4127-9c99-64cd3d754817 │ │ │ ├── expected.json │ │ │ └── result-summary.json │ │ │ ├── e9ac8920-ee84-40aa-bd67-f3d04babc3db │ │ │ ├── expected.json │ │ │ └── result-summary.json │ │ │ └── generate-data.ts │ ├── checkly.eval.spec.ts │ ├── checkly.flywheel.script.ts │ ├── checkly.script.ts │ ├── checkly.ts │ ├── common.ts │ ├── github.eval.spec.ts │ ├── github.ts │ ├── search.eval.spec.ts │ ├── search.ts │ ├── slack.ts │ ├── sre-assistant.ts │ ├── stability.prompt.ts │ ├── summarizeCheckGoals.ts │ ├── timeframe.ts │ ├── toScoreMatcher.ts │ ├── validation.spec.ts │ └── validation.ts ├── routes │ ├── checklywebhook.ts │ ├── githubwebhook.spec.ts │ └── githubwebhook.ts ├── slack │ ├── slack.spec.ts │ └── slack.ts ├── slackbot │ ├── accountSummaryCommandHandler.ts │ ├── analysis │ │ └── analyseStability.ts │ ├── app.ts │ ├── blocks │ │ ├── __snapshots__ │ │ │ ├── accountSummaryBlock.spec.ts.snap │ │ │ ├── checkBlock.spec.ts.snap │ │ │ ├── checkResultBlock.spec.ts.snap │ │ │ ├── errorPatternBlock.spec.ts.snap │ │ │ └── failingChecksBlock.spec.ts.snap │ │ ├── accountSummaryBlock.script.ts │ │ ├── accountSummaryBlock.spec.ts │ │ ├── accountSummaryBlock.ts │ │ ├── checkBlock.spec.ts │ │ ├── checkBlock.ts │ │ ├── checkResultBlock.spec.ts │ │ ├── checkResultBlock.ts │ │ ├── errorPatternBlock.script.ts │ │ ├── errorPatternBlock.spec.ts │ │ ├── errorPatternBlock.ts │ │ ├── failingChecksBlock.script.ts │ │ ├── failingChecksBlock.spec.ts │ │ ├── failingChecksBlock.ts │ │ ├── multipleChecksAnalysisBlock.ts │ │ ├── newCheckSummaryBlock.script.ts │ │ └── newCheckSummaryBlock.ts │ ├── channel-summary.ts │ ├── check-result-slices.ts │ ├── check-results-labeled.ts │ ├── checkly-integration-utils.ts │ ├── checkly.ts │ ├── commands │ │ ├── check-summary.script.ts │ │ └── check-summary.ts │ ├── config.ts │ ├── feedback.ts │ ├── index.ts │ ├── listErrorPatternActionHandler.ts │ ├── listFailingChecksActionHandler.ts │ ├── noopActionHandler.ts │ ├── ops-channel.spec.ts │ ├── ops-channel │ │ └── analyse-alert.ts │ ├── utils.ts │ └── web-client.ts ├── sre-assistant │ ├── SreAssistant.ts │ ├── sre-assistant.spec.ts │ └── tools │ │ ├── ChecklyTool.ts │ │ ├── GitHubTool.ts │ │ ├── GithubAgentInteractionTool.ts │ │ ├── KnowledgeTool.ts │ │ ├── SearchContextTool.ts │ │ └── TimeframeTranslationTool.ts └── use-cases │ └── analyse-multiple │ ├── analyse-multiple-checks.ts │ └── find-target-checks.ts └── tsconfig.json /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deployment 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | Deploy-Render: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Install Render CLI 13 | run: | 14 | curl -L https://github.com/render-oss/cli/releases/download/v2.1.1/cli_2.1.1_linux_amd64.zip -o render.zip 15 | unzip render.zip 16 | sudo mv cli_v2.1.1 /usr/local/bin/render 17 | 18 | - name: Authenticate with Render 19 | env: 20 | RENDER_API_KEY: ${{ secrets.RENDER_API_KEY }} 21 | run: | 22 | render login --output json 23 | 24 | - name: Deploy and Wait for All Services in Parallel 25 | env: 26 | RENDER_API_KEY: ${{ secrets.RENDER_API_KEY }} 27 | run: | 28 | set -e 29 | 30 | deploy() { 31 | SERVICE_ID=$1 32 | echo "Starting deployment for service $SERVICE_ID" 33 | render deploys create "$SERVICE_ID" --output json --confirm --wait 34 | echo "✅ Deployment for $SERVICE_ID completed" 35 | } 36 | 37 | # Start all deployments in parallel 38 | deploy "${{ secrets.RENDER_SERVICE_ID }}" & 39 | PID1=$! 40 | 41 | deploy "${{ secrets.RENDER_DATA_SYNCER_SERVICE_ID }}" & 42 | PID2=$! 43 | 44 | deploy "${{ secrets.RENDER_SERVICE_ID_DEV }}" & 45 | PID3=$! 46 | 47 | deploy "${{ secrets.RENDER_DATA_SYNCER_SERVICE_ID_DEV }}" & 48 | PID4=$! 49 | 50 | # Wait for all to finish 51 | wait $PID1 52 | wait $PID2 53 | wait $PID3 54 | wait $PID4 55 | 56 | echo "✅ All deployments finished" 57 | -------------------------------------------------------------------------------- /.github/workflows/test-evals.yml: -------------------------------------------------------------------------------- 1 | name: Prompt Eval Tests 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | paths: 7 | - "src/prompts/**" 8 | pull_request: 9 | branches: [main] 10 | paths: 11 | - "src/prompts/**" 12 | 13 | jobs: 14 | build: 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v4 20 | 21 | - name: Read Node.js version 22 | id: node-version 23 | run: echo "NODE_VERSION=$(cat .node-version)" >> $GITHUB_ENV 24 | 25 | - name: Set up Node.js 26 | uses: actions/setup-node@v4 27 | with: 28 | node-version: ${{ env.NODE_VERSION }} 29 | cache: "npm" 30 | 31 | - name: Set up environment variables 32 | run: | 33 | echo "CHECKLY_API_KEY=${{ secrets.CHECKLY_API_KEY }}" >> $GITHUB_ENV 34 | echo "CHECKLY_ACCOUNT_ID=${{ secrets.CHECKLY_ACCOUNT_ID }}" >> $GITHUB_ENV 35 | echo "PROMETHEUS_INTEGRATION_KEY=${{ secrets.PROMETHEUS_INTEGRATION_KEY }}" >> $GITHUB_ENV 36 | echo "CHECKLY_GITHUB_TOKEN=${{ secrets.CHECKLY_GITHUB_TOKEN }}" >> $GITHUB_ENV 37 | echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> $GITHUB_ENV 38 | 39 | - name: Install dependencies 40 | run: npm ci 41 | 42 | - name: Build API 43 | run: npm run build 44 | 45 | - name: Run tests 46 | run: npm run test:evals -- --ci --reporters=default --reporters=jest-junit 47 | env: 48 | JEST_JUNIT_OUTPUT_DIR: reports/ 49 | CHECKLY_API_KEY: ${{ secrets.CHECKLY_API_KEY }} 50 | CHECKLY_ACCOUNT_ID: ${{ secrets.CHECKLY_ACCOUNT_ID }} 51 | PROMETHEUS_INTEGRATION_KEY: ${{ secrets.PROMETHEUS_INTEGRATION_KEY }} 52 | CHECKLY_GITHUB_TOKEN: ${{ secrets.CHECKLY_GITHUB_TOKEN }} 53 | OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} 54 | 55 | - name: Test Report 56 | uses: dorny/test-reporter@v1 57 | if: success() || failure() # run this step even if previous step failed 58 | with: 59 | name: Prompt Eval Tests # Name of the check run which will be created 60 | path: reports/*.xml # Path to test results 61 | reporter: jest-junit # Format of test results 62 | -------------------------------------------------------------------------------- /.github/workflows/test-unit.yml: -------------------------------------------------------------------------------- 1 | name: Unit Tests 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout repository 15 | uses: actions/checkout@v4 16 | 17 | - name: Read Node.js version 18 | id: node-version 19 | run: echo "NODE_VERSION=$(cat .node-version)" >> $GITHUB_ENV 20 | 21 | - name: Set up Node.js 22 | uses: actions/setup-node@v4 23 | with: 24 | node-version: ${{ env.NODE_VERSION }} 25 | cache: "npm" 26 | 27 | - name: Set up environment variables 28 | run: | 29 | echo "CHECKLY_API_KEY=${{ secrets.CHECKLY_API_KEY }}" >> $GITHUB_ENV 30 | echo "CHECKLY_ACCOUNT_ID=${{ secrets.CHECKLY_ACCOUNT_ID }}" >> $GITHUB_ENV 31 | echo "PROMETHEUS_INTEGRATION_KEY=${{ secrets.PROMETHEUS_INTEGRATION_KEY }}" >> $GITHUB_ENV 32 | echo "CHECKLY_GITHUB_TOKEN=${{ secrets.CHECKLY_GITHUB_TOKEN }}" >> $GITHUB_ENV 33 | echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> $GITHUB_ENV 34 | 35 | - name: Install dependencies 36 | run: npm ci 37 | 38 | - name: Build API 39 | run: npm run build 40 | 41 | - name: Run tests 42 | run: npm run test:unit -- --ci --reporters=default --reporters=jest-junit 43 | env: 44 | CHECKLY_API_KEY: ${{ secrets.CHECKLY_API_KEY }} 45 | CHECKLY_ACCOUNT_ID: ${{ secrets.CHECKLY_ACCOUNT_ID }} 46 | PROMETHEUS_INTEGRATION_KEY: ${{ secrets.PROMETHEUS_INTEGRATION_KEY }} 47 | CHECKLY_GITHUB_TOKEN: ${{ secrets.CHECKLY_GITHUB_TOKEN }} 48 | OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.zip 2 | # compiled output 3 | /dist 4 | /node_modules 5 | /build 6 | .next 7 | .run 8 | 9 | # Logs 10 | logs 11 | *.log 12 | npm-debug.log* 13 | pnpm-debug.log* 14 | yarn-debug.log* 15 | yarn-error.log* 16 | lerna-debug.log* 17 | 18 | # OS 19 | .DS_Store 20 | 21 | # Tests 22 | /coverage 23 | /.nyc_output 24 | 25 | # IDEs and editors 26 | /.idea 27 | .project 28 | .classpath 29 | .c9/ 30 | *.launch 31 | .settings/ 32 | *.sublime-workspace 33 | 34 | # IDE - VSCode 35 | .vscode/* 36 | !.vscode/settings.json 37 | !.vscode/tasks.json 38 | !.vscode/launch.json 39 | !.vscode/extensions.json 40 | 41 | # dotenv environment variable files 42 | .env 43 | .env.development.local 44 | .env.test.local 45 | .env.production.local 46 | .env.local 47 | 48 | # temp directory 49 | .temp 50 | .tmp 51 | 52 | # Runtime data 53 | pids 54 | *.pid 55 | *.seed 56 | *.pid.lock 57 | 58 | # Diagnostic reports (https://nodejs.org/api/report.html) 59 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 60 | .vercel 61 | 62 | # React Component Preview 63 | previewConfig.json 64 | 65 | reports/ 66 | junit.xml 67 | 68 | results/ 69 | 70 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | if command -v bunx &> /dev/null 2 | then 3 | echo "Using bunx to run lint-staged..." 4 | bunx lint-staged 5 | else 6 | echo "Bun not found—falling back to npm (npx lint-staged)..." 7 | npx lint-staged 8 | npx tsc --noEmit 9 | fi 10 | -------------------------------------------------------------------------------- /.node-version: -------------------------------------------------------------------------------- 1 | v22.11.0 2 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | save-exact=true -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v20 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Ignore artifacts: 2 | build 3 | coverage 4 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | db: 5 | image: pgvector/pgvector:pg15 6 | environment: 7 | POSTGRES_USER: postgres 8 | POSTGRES_PASSWORD: postgres 9 | POSTGRES_DB: postgres 10 | ports: 11 | - "5438:5432" 12 | volumes: 13 | - postgres_data:/var/lib/postgresql/data 14 | 15 | volumes: 16 | postgres_data: 17 | -------------------------------------------------------------------------------- /env.example: -------------------------------------------------------------------------------- 1 | # Database 2 | DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres" 3 | 4 | # OpenAI 5 | OPENAI_API_KEY=your_openai_key 6 | OPENAI_ASSISTANT_ID=your_assistant_id 7 | 8 | # Slack 9 | SLACK_SIGNING_SECRET=your_slack_signing_secret 10 | SLACK_AUTH_TOKEN=your_slack_auth_token 11 | SLACK_APP_TOKEN=your_slack_app_token 12 | SLACK_ALERT_CHANNEL_ID=your_alert_channel_id 13 | SLACK_RELEASE_CHANNEL_ID=your_release_channel_id 14 | 15 | # GitHub 16 | GITHUB_ORG=your_github_org 17 | CHECKLY_GITHUB_TOKEN=your_github_token 18 | GH_WEBHOOK_SECRET=your_github_webhook_secret 19 | 20 | # Checkly 21 | CHECKLY_API_KEY=your_checkly_api_key 22 | CHECKLY_ACCOUNT_ID=your_checkly_account_id 23 | PROMETHEUS_INTEGRATION_KEY=your_prometheus_key 24 | ATHENA_ACCESS_ENDPOINT_URL=athena_access_endpoint_url 25 | 26 | # Langfuse 27 | LANGFUSE_SECRET_KEY= 28 | LANGFUSE_PUBLIC_KEY= 29 | LANGFUSE_BASEURL= 30 | 31 | 32 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: "ts-jest", 3 | testEnvironment: "node", 4 | testMatch: ["**/*.spec.ts"], 5 | }; 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "scripts": { 3 | "build": "npx tsc", 4 | "start": "node --max-old-space-size=512 dist/api.main.js", 5 | "start:data-syncer": "node --max-old-space-size=512 dist/data-syncer.main.js", 6 | "start:watch": "nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/api.main.ts", 7 | "start:data-syncer:watch": "nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/data-syncer.main.ts", 8 | "lint": "prettier --check .", 9 | "lint:fix": "prettier --write --ignore-unknown .", 10 | "test": "node --experimental-vm-modules ./node_modules/.bin/jest", 11 | "test:unit": "npm run test -- --testPathIgnorePatterns=src/prompts/.*.eval.spec.ts", 12 | "test:evals": "npm run test -- --testPathPattern=src/prompts/.*.eval.spec.ts --retryTimes=1", 13 | "db:migrate": "prisma migrate dev", 14 | "db:studio": "prisma studio", 15 | "db:deploy": "prisma migrate deploy", 16 | "db:generate": "prisma generate", 17 | "db:migrate:latest": "knex migrate:latest --knexfile=src/db/knexfile.ts", 18 | "db:migrate:rollback": "knex migrate:rollback --knexfile=src/db/knexfile.ts", 19 | "postinstall": "prisma generate", 20 | "smee": "npx smee-client", 21 | "prepare": "husky" 22 | }, 23 | "author": "", 24 | "license": "ISC", 25 | "dependencies": { 26 | "@ai-sdk/openai": "1.2.5", 27 | "@notionhq/client": "2.2.15", 28 | "@opentelemetry/api-logs": "0.57.1", 29 | "@opentelemetry/auto-instrumentations-node": "0.56.0", 30 | "@opentelemetry/instrumentation": "0.57.1", 31 | "@opentelemetry/sdk-logs": "0.57.1", 32 | "@opentelemetry/sdk-node": "0.57.1", 33 | "@prisma/client": "5.22.0", 34 | "@slack/bolt": "4.2.1", 35 | "ai": "4.1.61", 36 | "autoevals": "0.0.119", 37 | "axios": "1.7.9", 38 | "canvas": "3.1.0", 39 | "class-transformer": "0.5.1", 40 | "class-validator": "0.14.1", 41 | "commander": "13.1.0", 42 | "csv-parse": "5.6.0", 43 | "data-forge": "1.10.2", 44 | "data-forge-fs": "0.0.9", 45 | "date-fns": "4.1.0", 46 | "dotenv": "16.4.5", 47 | "echarts": "5.6.0", 48 | "express": "4.21.1", 49 | "gray-matter": "4.0.3", 50 | "knex": "3.1.0", 51 | "langfuse": "3.34.1", 52 | "langfuse-vercel": "3.34.1", 53 | "llamaindex": "0.8.5", 54 | "lodash": "4.17.21", 55 | "moment": "2.30.1", 56 | "octokit": "3.2.1", 57 | "pg": "8.13.3", 58 | "pgvector": "0.2.0", 59 | "pino": "9.6.0", 60 | "pino-logfmt": "0.1.1", 61 | "react": "18.3.1", 62 | "react-dom": "18.3.1", 63 | "reflect-metadata": "0.2.2", 64 | "skmeans": "0.11.3", 65 | "typescript": "5.6.3", 66 | "yaml": "2.6.0", 67 | "zod": "3.23.8" 68 | }, 69 | "devDependencies": { 70 | "@types/express": "5.0.0", 71 | "@types/jest": "29.5.14", 72 | "@types/react": "18.3.12", 73 | "concurrently": "9.1.0", 74 | "husky": "9.1.7", 75 | "jest": "29.7.0", 76 | "jest-junit": "16.0.0", 77 | "lint-staged": "15.4.3", 78 | "nodemon": "3.1.7", 79 | "prettier": "3.4.2", 80 | "prisma": "5.22.0", 81 | "ts-jest": "29.2.5", 82 | "ts-node": "10.9.2" 83 | }, 84 | "lint-staged": { 85 | "**/*": "prettier --write --ignore-unknown" 86 | }, 87 | "name": "srebot", 88 | "version": "1.0.0", 89 | "description": "bajo construcción", 90 | "main": "api.main.js" 91 | } 92 | -------------------------------------------------------------------------------- /prisma/migrations/20241115145014_init/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Alert" ( 3 | "id" TEXT NOT NULL, 4 | "data" JSONB NOT NULL, 5 | "context" TEXT NOT NULL, 6 | "summary" TEXT NOT NULL, 7 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 8 | "updatedAt" TIMESTAMP(3) NOT NULL, 9 | 10 | CONSTRAINT "Alert_pkey" PRIMARY KEY ("id") 11 | ); 12 | -------------------------------------------------------------------------------- /prisma/migrations/20241122114843_add_alert_context_model/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `context` on the `Alert` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- CreateEnum 8 | CREATE TYPE "Source" AS ENUM ('custom', 'checkly', 'github'); 9 | 10 | -- AlterTable 11 | ALTER TABLE "Alert" DROP COLUMN "context"; 12 | 13 | -- CreateTable 14 | CREATE TABLE "AlertContext" ( 15 | "id" TEXT NOT NULL, 16 | "alertId" TEXT NOT NULL, 17 | "source" "Source" NOT NULL DEFAULT 'custom', 18 | "key" TEXT NOT NULL, 19 | "value" JSONB NOT NULL, 20 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 21 | "updatedAt" TIMESTAMP(3) NOT NULL, 22 | 23 | CONSTRAINT "AlertContext_pkey" PRIMARY KEY ("id") 24 | ); 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "AlertContext" ADD CONSTRAINT "AlertContext_alertId_fkey" FOREIGN KEY ("alertId") REFERENCES "Alert"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | -------------------------------------------------------------------------------- /prisma/migrations/20241213104042_create_release_model/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Release" ( 3 | "id" TEXT NOT NULL, 4 | "name" TEXT NOT NULL, 5 | "org" TEXT NOT NULL, 6 | "repo" TEXT NOT NULL, 7 | "repoUrl" TEXT NOT NULL, 8 | "tag" TEXT NOT NULL, 9 | "releaseUrl" TEXT NOT NULL, 10 | "diffUrl" TEXT NOT NULL, 11 | "publishedAt" TIMESTAMP(3) NOT NULL, 12 | "authors" TEXT[], 13 | "summary" TEXT NOT NULL, 14 | 15 | CONSTRAINT "Release_pkey" PRIMARY KEY ("id") 16 | ); 17 | -------------------------------------------------------------------------------- /prisma/migrations/20241213143026_add_on_delete_cascade/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "AlertContext" DROP CONSTRAINT "AlertContext_alertId_fkey"; 3 | 4 | -- AddForeignKey 5 | ALTER TABLE "AlertContext" ADD CONSTRAINT "AlertContext_alertId_fkey" FOREIGN KEY ("alertId") REFERENCES "Alert"("id") ON DELETE CASCADE ON UPDATE CASCADE; 6 | -------------------------------------------------------------------------------- /prisma/migrations/20250115142547_add_raw_release/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "RawRelease" ( 3 | "id" TEXT NOT NULL, 4 | "body" JSONB NOT NULL, 5 | "releaseId" TEXT NOT NULL, 6 | 7 | CONSTRAINT "RawRelease_pkey" PRIMARY KEY ("id") 8 | ); 9 | 10 | -- CreateIndex 11 | CREATE UNIQUE INDEX "RawRelease_releaseId_key" ON "RawRelease"("releaseId"); 12 | 13 | -- AddForeignKey 14 | ALTER TABLE "RawRelease" ADD CONSTRAINT "RawRelease_releaseId_fkey" FOREIGN KEY ("releaseId") REFERENCES "Release"("id") ON DELETE CASCADE ON UPDATE CASCADE; 15 | -------------------------------------------------------------------------------- /prisma/migrations/20250116111332_add_dynamic_knowledge/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "DynamicKnowledge" ( 3 | "id" TEXT NOT NULL, 4 | "content" TEXT NOT NULL, 5 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | "updatedAt" TIMESTAMP(3) NOT NULL, 7 | 8 | CONSTRAINT "DynamicKnowledge_pkey" PRIMARY KEY ("id") 9 | ); 10 | -------------------------------------------------------------------------------- /prisma/migrations/20250116130851_add_knowledge_source/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "Source" ADD VALUE 'knowledge'; 3 | -------------------------------------------------------------------------------- /prisma/migrations/20250127145837_add_deployment/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Deployment" ( 3 | "id" TEXT NOT NULL, 4 | "org" TEXT NOT NULL, 5 | "repo" TEXT NOT NULL, 6 | "repoUrl" TEXT NOT NULL, 7 | "environment" TEXT NOT NULL, 8 | "sha" TEXT NOT NULL, 9 | "deploymentUrl" TEXT NOT NULL, 10 | "diffUrl" TEXT NOT NULL, 11 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 12 | "rawEvent" JSONB NOT NULL, 13 | "summary" TEXT NOT NULL, 14 | 15 | CONSTRAINT "Deployment_pkey" PRIMARY KEY ("id") 16 | ); 17 | -------------------------------------------------------------------------------- /prisma/migrations/20250207115247_add_feedback_tables/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "BotResponse" ( 3 | "id" TEXT NOT NULL, 4 | "content" TEXT NOT NULL, 5 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | "slackMessageUrl" TEXT, 7 | "slackMessageTs" TEXT NOT NULL, 8 | "alertId" TEXT, 9 | "releaseId" TEXT, 10 | "deploymentId" TEXT, 11 | 12 | CONSTRAINT "BotResponse_pkey" PRIMARY KEY ("id") 13 | ); 14 | 15 | -- CreateTable 16 | CREATE TABLE "Feedback" ( 17 | "id" TEXT NOT NULL, 18 | "categories" TEXT[], 19 | "score" INTEGER NOT NULL, 20 | "botResponseId" TEXT NOT NULL, 21 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 22 | "updatedAt" TIMESTAMP(3) NOT NULL, 23 | 24 | CONSTRAINT "Feedback_pkey" PRIMARY KEY ("id") 25 | ); 26 | 27 | -- CreateIndex 28 | CREATE UNIQUE INDEX "Feedback_botResponseId_key" ON "Feedback"("botResponseId"); 29 | 30 | -- AddForeignKey 31 | ALTER TABLE "BotResponse" ADD CONSTRAINT "BotResponse_alertId_fkey" FOREIGN KEY ("alertId") REFERENCES "Alert"("id") ON DELETE CASCADE ON UPDATE CASCADE; 32 | 33 | -- AddForeignKey 34 | ALTER TABLE "BotResponse" ADD CONSTRAINT "BotResponse_releaseId_fkey" FOREIGN KEY ("releaseId") REFERENCES "Release"("id") ON DELETE CASCADE ON UPDATE CASCADE; 35 | 36 | -- AddForeignKey 37 | ALTER TABLE "BotResponse" ADD CONSTRAINT "BotResponse_deploymentId_fkey" FOREIGN KEY ("deploymentId") REFERENCES "Deployment"("id") ON DELETE SET NULL ON UPDATE CASCADE; 38 | 39 | -- AddForeignKey 40 | ALTER TABLE "Feedback" ADD CONSTRAINT "Feedback_botResponseId_fkey" FOREIGN KEY ("botResponseId") REFERENCES "BotResponse"("id") ON DELETE CASCADE ON UPDATE CASCADE; 41 | -------------------------------------------------------------------------------- /prisma/migrations/migration_lock.toml: -------------------------------------------------------------------------------- 1 | # Please do not edit this file manually 2 | # It should be added in your version-control system (i.e. Git) 3 | provider = "postgresql" -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | appnope==0.1.4 2 | asttokens==3.0.0 3 | comm==0.2.2 4 | contourpy==1.3.1 5 | cycler==0.12.1 6 | debugpy==1.8.13 7 | decorator==5.2.1 8 | dotenv==0.9.9 9 | executing==2.2.0 10 | fonttools==4.56.0 11 | ipykernel==6.29.5 12 | ipython==9.0.2 13 | ipython_pygments_lexers==1.1.1 14 | jedi==0.19.2 15 | jupyter_client==8.6.3 16 | jupyter_core==5.7.2 17 | kiwisolver==1.4.8 18 | load-dotenv==0.1.0 19 | matplotlib==3.10.1 20 | matplotlib-inline==0.1.7 21 | nest-asyncio==1.6.0 22 | numpy==2.2.4 23 | packaging==24.2 24 | pandas==2.2.3 25 | parso==0.8.4 26 | pexpect==4.9.0 27 | pillow==11.1.0 28 | platformdirs==4.3.6 29 | prompt_toolkit==3.0.50 30 | psutil==7.0.0 31 | psycopg2==2.9.10 32 | ptyprocess==0.7.0 33 | pure_eval==0.2.3 34 | Pygments==2.19.1 35 | pyparsing==3.2.1 36 | python-dateutil==2.9.0.post0 37 | python-dotenv==1.0.1 38 | pytz==2025.1 39 | pyzmq==26.3.0 40 | seaborn==0.13.2 41 | six==1.17.0 42 | stack-data==0.6.3 43 | tornado==6.4.2 44 | traitlets==5.14.3 45 | tzdata==2025.1 46 | tzlocal==5.3.1 47 | wcwidth==0.2.13 48 | -------------------------------------------------------------------------------- /scripts/import-historical-data.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ts-node 2 | 3 | import { Command } from "commander"; 4 | import { AthenaImporter } from "../src/data-import/AthenaImporter"; 5 | import { log } from "../src/log"; 6 | import { initConfig } from "../src/lib/init-config"; 7 | 8 | initConfig(); 9 | 10 | const program = new Command(); 11 | 12 | program 13 | .name("import-historical-data") 14 | .description("Import historical data for an account") 15 | .requiredOption("--account-id ", "The account ID to import") 16 | .option( 17 | "--hours-back ", 18 | "Number of hours back to import", 19 | (value: string) => { 20 | const num = Number(value); 21 | if (isNaN(num) || num <= 0) { 22 | throw new Error( 23 | "Invalid argument. Please provide a positive number of hours.", 24 | ); 25 | } 26 | return num; 27 | }, 28 | 1, 29 | ) 30 | .option( 31 | "--checkly-api-key ", 32 | "Checkly API key", 33 | process.env.CHECKLY_API_KEY, 34 | ) 35 | .option( 36 | "--athena-api-key ", 37 | "Athena API key", 38 | process.env.CHECKLY_API_KEY, 39 | ) 40 | .option( 41 | "--athena-endpoint-url ", 42 | "Athena endpoint URL", 43 | process.env.ATHENA_ACCESS_ENDPOINT_URL, 44 | ) 45 | .helpOption("--help", "Show this help message"); 46 | 47 | program.parse(process.argv); 48 | 49 | const options = program.opts(); 50 | const accountId = options.accountId; 51 | const hoursBack = options.hoursBack; // Already converted to number by our custom parser 52 | const checklyApiKey = options.checklyApiKey; 53 | const athenaApiKey = options.athenaApiKey; 54 | const athenaEndpointUrl = options.athenaEndpointUrl; 55 | 56 | const hoursAgo = (hoursBack: number): Date => 57 | new Date(Date.now() - hoursBack * 60 * 60 * 1000); 58 | 59 | const main = async () => { 60 | log.info({ hoursBack, accountId }, "Starting to import data"); 61 | 62 | const importer = new AthenaImporter({ 63 | accountId: accountId, 64 | checklyApiKey: checklyApiKey!, 65 | athenaApiKey: athenaApiKey!, 66 | athenaAccessEndpointUrl: athenaEndpointUrl!, 67 | }); 68 | 69 | const fromDate = hoursAgo(hoursBack); 70 | const toDate = new Date(); 71 | 72 | await importer.importAccountData(fromDate, toDate); 73 | 74 | log.info("Import completed."); 75 | process.exit(0); 76 | }; 77 | 78 | main().catch((err) => { 79 | console.error("Import failed:", err); 80 | process.exit(1); 81 | }); 82 | -------------------------------------------------------------------------------- /scripts/init-assistant.ts: -------------------------------------------------------------------------------- 1 | import { getOpenaiClient } from "../src/ai/openai"; 2 | import { initConfig } from "../src/lib/init-config"; 3 | 4 | initConfig(); 5 | 6 | const openai = getOpenaiClient(); 7 | 8 | function createAssistant(name: string) { 9 | return openai.beta.assistants.create({ 10 | model: "gpt-4o", 11 | name, 12 | }); 13 | } 14 | 15 | async function main() { 16 | const assistants = await openai.beta.assistants.list(); 17 | 18 | if (!assistants.data.length) { 19 | console.log("Creating sre-assistant"); 20 | await createAssistant("sre-assistant"); 21 | } else { 22 | const sreAssistant = assistants.data.find( 23 | (assistant) => assistant.name === "sre-assistant", 24 | ); 25 | 26 | if (!sreAssistant) { 27 | console.log("Creating sre-assistant"); 28 | await createAssistant("sre-assistant"); 29 | } else { 30 | console.log("sre-assistant already exists: ", sreAssistant.id); 31 | } 32 | } 33 | } 34 | 35 | main() 36 | .then(() => { 37 | console.log("Done"); 38 | }) 39 | .catch((error) => { 40 | console.error(error); 41 | }); 42 | -------------------------------------------------------------------------------- /src/aggregator/ContextAggregator.ts: -------------------------------------------------------------------------------- 1 | import { checklyAggregator } from "./checkly-aggregator"; 2 | import { WebhookAlertDto } from "../checkly/alertDTO"; 3 | import { githubAggregator } from "./github-aggregator"; 4 | import type { $Enums } from "@prisma/client"; 5 | import { slackChannelAggregator } from "./slack-channel-aggregator"; 6 | import { knowledgeAggregator } from "./knowledge-aggregator"; 7 | 8 | export enum ContextKey { 9 | ChecklyScript = "checkly.script", 10 | ChecklyAlert = "checkly.alert", 11 | ChecklyCheck = "checkly.check", 12 | ChecklyResults = "checkly.results", 13 | ChecklyPrometheusStatus = "checkly.prometheusStatus", 14 | ChecklyLogs = "checkly.logs", 15 | GitHubRepoChanges = "github.repoChanges.$repo", 16 | GitHubReleaseSummary = "github.releaseSummary.$repo", 17 | GitHubDeploymentSummary = "github.deploymentSummary.$repo", 18 | Knowledge = "knowledge.$documentSlug", 19 | SlackChannelSummary = "slack.channelSummary.$channel", 20 | AlertAnalysis = "alert.analysis", 21 | } 22 | 23 | export interface CheckContext { 24 | checkId: string; 25 | source: $Enums.Source; 26 | key: ContextKey; 27 | value: unknown; 28 | analysis: string; 29 | } 30 | 31 | export class CheckContextAggregator { 32 | alert: WebhookAlertDto; 33 | plugins = [ 34 | checklyAggregator, 35 | githubAggregator, 36 | slackChannelAggregator, 37 | knowledgeAggregator, 38 | ]; 39 | 40 | constructor(alert: WebhookAlertDto) { 41 | this.alert = alert; 42 | } 43 | 44 | aggregate() { 45 | return Promise.all( 46 | this.plugins.map(async (plugin) => { 47 | return plugin.fetchContext(this.alert).catch((error) => { 48 | console.error( 49 | `Error fetching context from ${plugin.name ?? "unknown plugin"}:`, 50 | error, 51 | ); 52 | return []; 53 | }); 54 | }), 55 | ).then((results) => results.flat()); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/aggregator/chains.ts: -------------------------------------------------------------------------------- 1 | import { generateText } from "ai"; 2 | import { 3 | contextAnalysisEntryPrompt, 4 | contextAnalysisSummaryPrompt, 5 | } from "../prompts/checkly"; 6 | import { CheckContext } from "./ContextAggregator"; 7 | 8 | export const generateContextAnalysis = async (context: CheckContext[]) => { 9 | return await Promise.all( 10 | context.map(async (c) => { 11 | const analysis = await generateContextAnalysisForEntry(c); 12 | return { ...c, analysis }; 13 | }), 14 | ); 15 | 16 | async function generateContextAnalysisForEntry(entry: CheckContext) { 17 | const summary = await generateText( 18 | contextAnalysisEntryPrompt(entry, context), 19 | ); 20 | 21 | return summary.text; 22 | } 23 | }; 24 | 25 | export const generateContextAnalysisSummary = async ( 26 | contextAnalysis: CheckContext[], 27 | ) => { 28 | const summary = await generateText( 29 | contextAnalysisSummaryPrompt(contextAnalysis), 30 | ); 31 | 32 | return summary.text; 33 | }; 34 | -------------------------------------------------------------------------------- /src/aggregator/checkly-aggregator.spec.ts: -------------------------------------------------------------------------------- 1 | import { plainToInstance } from "class-transformer"; 2 | import "dotenv/config"; 3 | import "reflect-metadata"; 4 | import { WebhookAlertDto } from "../checkly/alertDTO"; 5 | import { checkly } from "../checkly/client"; 6 | import { checklyAggregator } from "./checkly-aggregator"; 7 | 8 | jest.setTimeout(30000); 9 | describe("ChecklyService", () => { 10 | // FIXME do not use static ids here (al) 11 | it.skip("can collect checkly context", async () => { 12 | const checks = await checkly.getChecks(); 13 | const context = await checklyAggregator.fetchContext( 14 | plainToInstance( 15 | WebhookAlertDto, 16 | { 17 | CHECK_NAME: "GET /books", 18 | CHECK_ID: "d6330bf8-1928-4953-9bc1-f4ac8d98f81f", 19 | CHECK_TYPE: "API", 20 | GROUP_NAME: "", 21 | ALERT_TITLE: "GET /books has failed", 22 | ALERT_TYPE: "ALERT_FAILURE", 23 | CHECK_RESULT_ID: "e394cc96-bbb9-4cc7-a715-9501cce87ac0", 24 | RESPONSE_TIME: 36, 25 | API_CHECK_RESPONSE_STATUS_CODE: 500, 26 | API_CHECK_RESPONSE_STATUS_TEXT: "Internal Server Error", 27 | RUN_LOCATION: "Ireland", 28 | RESULT_LINK: 29 | "https://app.checklyhq.com/checks/d6330bf8-1928-4953-9bc1-f4ac8d98f81f/check-sessions/e4b448a4-8909-4c89-8278-6a6494fc007f/results/e394cc96-bbb9-4cc7-a715-9501cce87ac0", 30 | SSL_DAYS_REMAINING: "", 31 | SSL_CHECK_DOMAIN: "", 32 | STARTED_AT: "2025-01-15T15:34:21.900Z", 33 | TAGS: "website,api,srebot", 34 | $RANDOM_NUMBER: 1547, 35 | $UUID: "380b94c2-2c56-4f1d-904a-a6122d96722a", 36 | moment: "January 15, 2025", 37 | }, 38 | { enableImplicitConversion: true }, 39 | ), 40 | ); 41 | 42 | expect(context).toBeDefined(); 43 | expect(context.length).toBeGreaterThan(0); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /src/aggregator/checkly-aggregator.ts: -------------------------------------------------------------------------------- 1 | import { CheckContext, ContextKey } from "./ContextAggregator"; 2 | import { checkly } from "../checkly/client"; 3 | import { WebhookAlertDto } from "../checkly/alertDTO"; 4 | import { 5 | mapCheckResultToContextValue, 6 | mapCheckToContextValue, 7 | } from "../checkly/utils"; 8 | 9 | export const checklyAggregator = { 10 | name: "Checkly", 11 | fetchContext: async (alert: WebhookAlertDto): Promise => { 12 | console.log("Aggregating Checkly Context..."); 13 | const [check, results] = await Promise.all([ 14 | checkly.getCheck(alert.CHECK_ID), 15 | checkly.getCheckResult(alert.CHECK_ID, alert.CHECK_RESULT_ID), 16 | ]); 17 | 18 | const makeCheckContext = (key: ContextKey, value: unknown) => { 19 | return { 20 | checkId: alert.CHECK_ID, 21 | source: "checkly", 22 | key, 23 | value, 24 | } as CheckContext; 25 | }; 26 | 27 | const logs = results.getLog(); 28 | const script = check.script; 29 | 30 | const checklyCheckContext = [ 31 | ...(script ? [makeCheckContext(ContextKey.ChecklyScript, script)] : []), 32 | makeCheckContext(ContextKey.ChecklyCheck, mapCheckToContextValue(check)), 33 | makeCheckContext( 34 | ContextKey.ChecklyResults, 35 | mapCheckResultToContextValue(results), 36 | ), 37 | makeCheckContext(ContextKey.ChecklyLogs, logs), 38 | ] as CheckContext[]; 39 | 40 | return checklyCheckContext; 41 | }, 42 | }; 43 | -------------------------------------------------------------------------------- /src/aggregator/github-aggregator.spec.ts: -------------------------------------------------------------------------------- 1 | import "reflect-metadata"; 2 | import { WebhookAlertDto } from "../checkly/alertDTO"; 3 | import { githubAggregator } from "./github-aggregator"; 4 | import "dotenv/config"; 5 | import { plainToInstance } from "class-transformer"; 6 | 7 | jest.setTimeout(30000); 8 | 9 | describe.skip("GithubAggregator", () => { 10 | it("can collect github context from configured repos", async () => { 11 | // Create a sample alert DTO similar to the one in checkly-aggregator.spec.ts 12 | const context = await githubAggregator 13 | .fetchContext( 14 | plainToInstance( 15 | WebhookAlertDto, 16 | { 17 | CHECK_NAME: "test-check", 18 | CHECK_ID: "test-check-id", 19 | CHECK_TYPE: "MULTI_STEP", 20 | GROUP_NAME: "", 21 | ALERT_TITLE: "Test check has failed", 22 | ALERT_TYPE: "ALERT_FAILURE", 23 | CHECK_RESULT_ID: "test-result-id", 24 | RESPONSE_TIME: 1000, 25 | API_CHECK_RESPONSE_STATUS_CODE: 0, 26 | API_CHECK_RESPONSE_STATUS_TEXT: "", 27 | RUN_LOCATION: "Frankfurt", 28 | RESULT_LINK: "https://example.com", 29 | SSL_DAYS_REMAINING: 0, 30 | SSL_CHECK_DOMAIN: "", 31 | STARTED_AT: "2024-03-15T13:39:26.259Z", 32 | TAGS: [], 33 | $RANDOM_NUMBER: 1234, 34 | $UUID: "test-uuid", 35 | moment: "March 15, 2024", 36 | }, 37 | { enableImplicitConversion: true }, 38 | ), 39 | ) 40 | .catch((error) => { 41 | console.error("Error fetching context:", error); 42 | return []; 43 | }); 44 | 45 | expect(context).toBeDefined(); 46 | expect(Array.isArray(context)).toBe(true); 47 | 48 | // If repos are configured, we should get some context 49 | if (process.env.GITHUB_REPOS || process.env.GITHUB_ORG) { 50 | expect(context.length).toBeGreaterThan(0); 51 | 52 | // Test structure of returned context 53 | context.forEach((item) => { 54 | expect(item).toHaveProperty("checkId", "test-check-id"); 55 | expect(item).toHaveProperty("source", "github"); 56 | expect(item).toHaveProperty("key"); 57 | expect(item).toHaveProperty("value"); 58 | }); 59 | } 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /src/aggregator/knowledge-aggregator.ts: -------------------------------------------------------------------------------- 1 | import { WebhookAlertDto } from "../checkly/alertDTO"; 2 | import { CheckContext, ContextKey } from "./ContextAggregator"; 3 | import { 4 | getAllDocuments, 5 | KnowledgeDocument, 6 | } from "../knowledge-base/knowledgeBase"; 7 | 8 | const transformDocument = ( 9 | document: KnowledgeDocument, 10 | checkId: string, 11 | ): CheckContext => { 12 | return { 13 | checkId, 14 | value: document.content, 15 | source: "knowledge", 16 | key: ContextKey.Knowledge.replace("$documentSlug", document.slug), 17 | analysis: document.summary, 18 | } as CheckContext; 19 | }; 20 | 21 | export const knowledgeAggregator = { 22 | name: "Knowledge", 23 | fetchContext: async (alert: WebhookAlertDto): Promise => { 24 | console.log("Aggregating Knowledge Context..."); 25 | const documents = await getAllDocuments(); 26 | 27 | return documents.map((doc) => transformDocument(doc, alert.CHECK_ID)); 28 | }, 29 | }; 30 | -------------------------------------------------------------------------------- /src/aggregator/slack-channel-aggregator.ts: -------------------------------------------------------------------------------- 1 | import { CheckContext, ContextKey } from "./ContextAggregator"; 2 | import { WebhookAlertDto } from "../checkly/alertDTO"; 3 | import { generateChannelSummary } from "../slackbot/channel-summary"; 4 | import { Source } from "@prisma/client"; 5 | 6 | // List of channels to monitor 7 | const RELEVANT_CHANNELS = 8 | process.env.SLACK_CHANNELS?.split(",").filter((id): id is string => !!id) ?? 9 | []; 10 | 11 | const makeChannelContext = ( 12 | channelId: string, 13 | summary: string, 14 | relevantLinks: Array<{ url: string; title: string }>, 15 | checkId: string, 16 | ): CheckContext => ({ 17 | key: ContextKey.SlackChannelSummary.replace( 18 | "$channel", 19 | channelId, 20 | ) as ContextKey, 21 | value: { 22 | summary, 23 | relevantLinks, 24 | channelId, 25 | }, 26 | checkId, 27 | source: Source.custom, 28 | analysis: summary, 29 | }); 30 | 31 | export const slackChannelAggregator = { 32 | name: "Slack Channel", 33 | fetchContext: async (alert: WebhookAlertDto): Promise => { 34 | try { 35 | // Fetch summaries for all monitored channels in parallel 36 | const channelSummaries = await Promise.all( 37 | RELEVANT_CHANNELS.map(async (channelId) => { 38 | try { 39 | const { summary, relevantLinks } = await generateChannelSummary( 40 | channelId, 41 | alert, 42 | ); 43 | 44 | return makeChannelContext( 45 | channelId, 46 | summary, 47 | relevantLinks, 48 | alert.CHECK_ID, 49 | ); 50 | } catch (error) { 51 | console.error( 52 | `Error fetching summary for channel ${channelId}:`, 53 | error, 54 | ); 55 | return null; 56 | } 57 | }), 58 | ); 59 | 60 | // Filter out any failed channel summaries 61 | return channelSummaries.filter( 62 | (context): context is CheckContext => context !== null, 63 | ); 64 | } catch (error) { 65 | console.error("Error in Slack Channel aggregator:", error); 66 | return []; 67 | } 68 | }, 69 | }; 70 | -------------------------------------------------------------------------------- /src/ai/openai.ts: -------------------------------------------------------------------------------- 1 | import { createOpenAI } from "@ai-sdk/openai"; 2 | import { OpenAI } from "openai"; 3 | import dotenv from "dotenv"; 4 | 5 | dotenv.config(); 6 | 7 | export const getOpenaiClient = () => 8 | new OpenAI({ 9 | apiKey: process.env.OPENAI_API_KEY, 10 | }); 11 | 12 | export const getOpenaiSDKClient = () => 13 | createOpenAI({ 14 | apiKey: process.env.OPENAI_API_KEY, 15 | }); 16 | -------------------------------------------------------------------------------- /src/ai/utils.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Message, 3 | Run, 4 | RunSubmitToolOutputsParams, 5 | } from "openai/resources/beta/threads"; 6 | import { RunStep } from "openai/resources/beta/threads/runs/steps"; 7 | import { stringify } from "yaml"; 8 | import { getOpenaiClient } from "./openai"; 9 | 10 | const openai = getOpenaiClient(); 11 | 12 | export const requiresToolAction = (run: Run): boolean => { 13 | return ( 14 | run?.status === "requires_action" && 15 | run.required_action?.type === "submit_tool_outputs" 16 | ); 17 | }; 18 | 19 | export const isThreadLockError = (error: any): boolean => { 20 | return (error.message as string).includes("Can't add messages to thread_"); 21 | }; 22 | 23 | export const cancelRun = async (threadId: string): Promise => { 24 | const run = await openai.beta.threads.runs 25 | .list(threadId, { limit: 1, order: "desc" }) 26 | .then((response) => response.data[0]); 27 | 28 | if (run) { 29 | await openai.beta.threads.runs.cancel(threadId, run.id); 30 | } 31 | }; 32 | 33 | export const formatToolOutput = ( 34 | toolCallId: string, 35 | output: unknown, 36 | ): RunSubmitToolOutputsParams.ToolOutput => { 37 | return { 38 | output: JSON.stringify(output), 39 | tool_call_id: toolCallId, 40 | }; 41 | }; 42 | 43 | export const handleToolError = ( 44 | toolCallId: string, 45 | error: Error, 46 | ): RunSubmitToolOutputsParams.ToolOutput => { 47 | return { 48 | output: stringify({ error: error.message ?? "Unknown error" }), 49 | tool_call_id: toolCallId, 50 | }; 51 | }; 52 | 53 | export const getRunMessages = async ( 54 | threadId: string, 55 | runId: string, 56 | ): Promise => { 57 | const messages = await openai.beta.threads.messages.list(threadId, { 58 | run_id: runId, 59 | order: "asc", 60 | }); 61 | return messages.data; 62 | }; 63 | 64 | export const getMessageHistory = async ( 65 | threadId: string, 66 | cursor: string, 67 | limit: number = 30, 68 | ): Promise => { 69 | const messages = await openai.beta.threads.messages.list(threadId, { 70 | after: cursor, 71 | limit, 72 | order: "desc", 73 | }); 74 | return messages.data.reverse(); 75 | }; 76 | 77 | export const getRunSteps = async ( 78 | threadId: string, 79 | runId: string, 80 | ): Promise => { 81 | const steps = await openai.beta.threads.runs.steps.list(threadId, runId, { 82 | order: "asc", 83 | }); 84 | return steps.data; 85 | }; 86 | 87 | export const getThreadRuns = async ( 88 | threadId: string, 89 | limit: number = 30, 90 | ): Promise => { 91 | const runs = await openai.beta.threads.runs.list(threadId, { 92 | limit, 93 | }); 94 | return runs.data; 95 | }; 96 | 97 | export const getThreadSteps = async (threadId: string): Promise => { 98 | const runs = await getThreadRuns(threadId); 99 | const steps = await Promise.all( 100 | runs.map(async (run) => { 101 | const steps = await getRunSteps(threadId, run.id); 102 | return steps.map((step) => ({ ...step, runId: run.id })); 103 | }), 104 | ); 105 | return steps.flat(); 106 | }; 107 | -------------------------------------------------------------------------------- /src/api.main.ts: -------------------------------------------------------------------------------- 1 | import { initConfig } from "./lib/init-config"; 2 | import express, { Request, Response } from "express"; 3 | import { getOpenaiClient } from "./ai/openai"; 4 | import { getRunMessages } from "./ai/utils"; 5 | import checklyWebhookRouter from "./routes/checklywebhook"; 6 | import githubWebhookRouter from "./routes/githubwebhook"; 7 | import { app as slackApp } from "./slackbot/app"; 8 | import { SreAssistant } from "./sre-assistant/SreAssistant"; 9 | import { startLangfuseTelemetrySDK } from "./langfuse"; 10 | import { log } from "./log"; 11 | 12 | initConfig(); 13 | 14 | process 15 | .on("unhandledRejection", (reason, promise) => { 16 | console.error("Unhandled Rejection at:", promise, "reason:", reason); 17 | }) 18 | .on("uncaughtException", (error) => { 19 | console.error("Uncaught Exception thrown", error); 20 | }); 21 | 22 | // configures dotenv to work in your application 23 | const app = express(); 24 | 25 | // Start the OpenTelemetry SDK to collect traces in Langfuse 26 | if (process.env.ENABLE_LANGFUSE_TELEMETRY === "true") { 27 | startLangfuseTelemetrySDK(); 28 | } 29 | 30 | const PORT = process.env.PORT || 3000; 31 | 32 | // Middleware to parse JSON bodies 33 | app.use(express.json()); 34 | 35 | // Use the Checkly Webhook router 36 | app.use("/checkly-webhook", checklyWebhookRouter); 37 | app.use("/github-webhook", githubWebhookRouter); 38 | 39 | app.get("/", (request: Request, response: Response) => { 40 | response.status(200).send("Hello World"); 41 | }); 42 | 43 | app.post("/test/:alertId", async (req: Request, res: Response) => { 44 | const { alertId } = req.params; 45 | const thread = await getOpenaiClient().beta.threads.create(); 46 | const assistant = new SreAssistant(thread.id, alertId, { 47 | username: "Test User", 48 | date: new Date().toISOString(), 49 | }); 50 | const userMessage = await assistant.addMessage(req.body.message); 51 | const responseMessages = await assistant 52 | .runSync() 53 | .then((run) => getRunMessages(thread.id, run.id)); 54 | 55 | console.log("Assistant response: ", responseMessages); 56 | 57 | res.status(200).send(responseMessages); 58 | }); 59 | 60 | app 61 | .listen(PORT, () => { 62 | log.info({ port: PORT }, "HTTP Server listening for connections"); 63 | }) 64 | .on("error", (error) => { 65 | // gracefully handle error 66 | throw new Error(error.message); 67 | }); 68 | 69 | //run slack app 70 | slackApp.error(async (error) => { 71 | // Check the details of the error to handle cases where you should retry sending a message or stop the app 72 | console.error(error); 73 | }); 74 | 75 | const main = async () => { 76 | await slackApp.start(); 77 | log.info("Slack Bolt app is running!"); 78 | }; 79 | 80 | main(); 81 | -------------------------------------------------------------------------------- /src/checkly/PrometheusMetric.ts: -------------------------------------------------------------------------------- 1 | export class PrometheusMetric { 2 | metricName: string; 3 | help: string; 4 | type: string; 5 | values: PrometheusMetricValue[]; 6 | 7 | constructor(metricName: string, help: string, type: string) { 8 | this.metricName = metricName; 9 | this.help = help; 10 | this.type = type; 11 | this.values = []; 12 | } 13 | 14 | addValue(value: PrometheusMetricValue) { 15 | this.values.push(value); 16 | } 17 | } 18 | 19 | export class PrometheusMetricValue { 20 | labels: { [key: string]: string }; 21 | value: number; 22 | 23 | constructor(labels: { [key: string]: string }, value: number) { 24 | this.labels = labels; 25 | this.value = value; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/checkly/PrometheusParser.ts: -------------------------------------------------------------------------------- 1 | import { PrometheusMetric, PrometheusMetricValue } from "./PrometheusMetric"; 2 | 3 | export class PrometheusParser { 4 | static parse(input: string): PrometheusMetric[] { 5 | const lines = input.split("\n"); 6 | const metrics: PrometheusMetric[] = []; 7 | let currentMetric: PrometheusMetric | null = null; 8 | 9 | for (const line of lines) { 10 | if (line.startsWith("# HELP")) { 11 | const parts = line.split(" "); 12 | const metricName = parts[2]; 13 | const help = parts.slice(3).join(" "); 14 | currentMetric = new PrometheusMetric(metricName, help, ""); 15 | metrics.push(currentMetric); 16 | } else if (line.startsWith("# TYPE")) { 17 | const parts = line.split(" "); 18 | const metricName = parts[2]; 19 | const type = parts[3]; 20 | currentMetric = 21 | metrics.find((m) => m.metricName === metricName) || null; 22 | if (currentMetric) { 23 | currentMetric.type = type; 24 | } 25 | } else if (line.trim() !== "") { 26 | const lastSpaceIndex = line.lastIndexOf(" "); 27 | const valuePart = line.substring(lastSpaceIndex + 1); 28 | const metricPart = line.substring(0, lastSpaceIndex); 29 | 30 | const metricName = metricPart.split("{")[0]; 31 | const labelsPart = metricPart.split("{")[1]?.split("}")[0]; 32 | const labels = labelsPart 33 | ? Object.fromEntries( 34 | labelsPart 35 | .split(",") 36 | .map((l) => l.split("=").map((s) => s.replace(/"/g, ""))), 37 | ) 38 | : {}; 39 | const value = parseFloat(valuePart); 40 | 41 | currentMetric = 42 | metrics.find((m) => m.metricName === metricName) || null; 43 | if (currentMetric) { 44 | currentMetric.addValue(new PrometheusMetricValue(labels, value)); 45 | } 46 | } 47 | } 48 | 49 | return metrics; 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/checkly/alertDTO.ts: -------------------------------------------------------------------------------- 1 | import { Transform } from "class-transformer"; 2 | import { 3 | IsArray, 4 | IsDate, 5 | IsEnum, 6 | IsNumber, 7 | IsOptional, 8 | IsString, 9 | IsUUID, 10 | } from "class-validator"; 11 | 12 | /** 13 | * Enum representing the different alert types. 14 | * See https://www.checklyhq.com/docs/alerting-and-retries/alert-states/#alert-states--transitions 15 | * for more details. 16 | * @enum {string} 17 | */ 18 | export enum AlertType { 19 | /** 20 | * Nothing to see here, keep moving. 21 | */ 22 | NO_ALERT = "NO_ALERT", 23 | 24 | /** 25 | * Send directly, if threshold is “alert after 1 failure”. 26 | */ 27 | ALERT_DEGRADED = "ALERT_DEGRADED", 28 | 29 | /** 30 | * Send directly, if threshold is “alert after 1 failure”. 31 | */ 32 | ALERT_FAILURE = "ALERT_FAILURE", 33 | 34 | /** 35 | * i.e. when threshold is “alert after 2 failures” or “after 5 minutes”. 36 | */ 37 | ALERT_DEGRADED_REMAIN = "ALERT_DEGRADED_REMAIN", 38 | 39 | /** 40 | * Send but only if you received a degraded notification before. 41 | */ 42 | ALERT_DEGRADED_RECOVERY = "ALERT_DEGRADED_RECOVERY", 43 | 44 | /** 45 | * This is an escalation, it overrides any threshold setting. We send this even if you already received degraded notifications. 46 | */ 47 | ALERT_DEGRADED_FAILURE = "ALERT_DEGRADED_FAILURE", 48 | 49 | /** 50 | * i.e. when threshold is “alert after 2 failures” or “after 5 minutes”. 51 | */ 52 | ALERT_FAILURE_REMAIN = "ALERT_FAILURE_REMAIN", 53 | 54 | /** 55 | * This is a deescalation, it overrides any thresholds settings. We send this even if you already received failure notifications. 56 | */ 57 | ALERT_FAILURE_DEGRADED = "ALERT_FAILURE_DEGRADED", 58 | 59 | /** 60 | * Send directly. 61 | */ 62 | ALERT_RECOVERY = "ALERT_RECOVERY", 63 | } 64 | 65 | export class WebhookAlertDto { 66 | @IsString() 67 | CHECK_NAME: string; 68 | 69 | @IsUUID() 70 | CHECK_ID: string; 71 | @IsUUID() 72 | $UUID: string; 73 | 74 | @IsString() 75 | CHECK_TYPE: string; 76 | 77 | @IsString() 78 | GROUP_NAME: string; 79 | 80 | @IsString() 81 | ALERT_TITLE: string; 82 | 83 | @IsEnum(AlertType) 84 | ALERT_TYPE: AlertType; 85 | 86 | @IsUUID() 87 | CHECK_RESULT_ID: string; 88 | 89 | @IsNumber() 90 | RESPONSE_TIME: number; 91 | 92 | @IsOptional() // This is optional because it's only for API checks 93 | @IsNumber() 94 | API_CHECK_RESPONSE_STATUS_CODE?: number; 95 | 96 | @IsOptional() // This is optional because it's only for API checks 97 | @IsString() 98 | API_CHECK_RESPONSE_STATUS_TEXT?: string; 99 | 100 | @IsString() 101 | RUN_LOCATION: string; 102 | 103 | @IsString() 104 | RESULT_LINK: string; 105 | 106 | @IsOptional() // This is only for ALERT_SSL alerts 107 | @IsNumber() 108 | SSL_DAYS_REMAINING?: number; 109 | 110 | @IsOptional() // This is only for ALERT_SSL alerts 111 | @IsString() 112 | SSL_CHECK_DOMAIN?: string; 113 | 114 | @IsDate() 115 | STARTED_AT: Date; 116 | 117 | @Transform(({ value }) => { 118 | try { 119 | if (!value) { 120 | return []; 121 | } 122 | // If the value is a valid stringified JSON array, parse it 123 | const parsed = typeof value === "string" ? JSON.parse(value) : value; 124 | 125 | // Return the value only if it's a valid array, otherwise return an empty array 126 | return Array.isArray(parsed) ? parsed : []; 127 | } catch (e) { 128 | if (e instanceof SyntaxError) { 129 | return [value]; 130 | } 131 | // If parsing fails, return an empty array 132 | console.trace(e); 133 | return []; 134 | } 135 | }) 136 | @IsArray() // Assuming TAGS is an array of strings 137 | @IsString({ each: true }) 138 | TAGS: string[]; 139 | 140 | @IsNumber() 141 | $RANDOM_NUMBER: number; 142 | 143 | @IsString() 144 | moment: string; 145 | } 146 | -------------------------------------------------------------------------------- /src/checkly/bla.ts: -------------------------------------------------------------------------------- 1 | const exampleAlert = { 2 | CHECK_NAME: "fail50", 3 | CHECK_ID: "b68422ae-6528-45a5-85a6-e85e1be9de2e", 4 | CHECK_TYPE: "MULTI_STEP", 5 | GROUP_NAME: "", 6 | ALERT_TITLE: "fail50 has failed", 7 | ALERT_TYPE: "ALERT_FAILURE", 8 | CHECK_RESULT_ID: "995b7d3c-d42a-443a-a8b3-194319436ba7", 9 | RESPONSE_TIME: "1649", 10 | API_CHECK_RESPONSE_STATUS_CODE: "", 11 | API_CHECK_RESPONSE_STATUS_TEXT: "", 12 | RUN_LOCATION: "Frankfurt", 13 | RESULT_LINK: 14 | "https://app.checklyhq.com/checks/b68422ae-6528-45a5-85a6-e85e1be9de2e/results/multi_step/995b7d3c-d42a-443a-a8b3-194319436ba7", 15 | SSL_DAYS_REMAINING: "", 16 | SSL_CHECK_DOMAIN: "", 17 | STARTED_AT: "2024-10-09T13:30:22.741Z", 18 | TAGS: "", 19 | $RANDOM_NUMBER: "271", 20 | $UUID: "94a5dc1e-9d84-42d5-8a9c-e0fd859616d9", 21 | moment: "October 09, 2024", 22 | }; 23 | -------------------------------------------------------------------------------- /src/checkly/checklyAlertEventDTO.ts: -------------------------------------------------------------------------------- 1 | export type ChecklyAlertSummary = { 2 | checkName: string; 3 | checkId: string; 4 | checkType: string; 5 | groupName: string; 6 | alertTitle: string; 7 | alertType: string; 8 | checkResultId: string; 9 | apiCheckResponseStatusCode?: number; 10 | apiCheckResponseStatusText?: string; 11 | runLocation: string; 12 | aiSummary: AnalysisResult; 13 | }; 14 | 15 | export type AnalysisResult = { 16 | logSummary: string; 17 | scriptAnalysis: string; 18 | rootCause: string; 19 | suggestedFix: string; 20 | }; 21 | -------------------------------------------------------------------------------- /src/checkly/client.ts: -------------------------------------------------------------------------------- 1 | import { initConfig } from "../lib/init-config"; 2 | import { ChecklyClient } from "./checklyclient"; 3 | 4 | initConfig(); 5 | 6 | export const checkly: ChecklyClient = new ChecklyClient(); 7 | -------------------------------------------------------------------------------- /src/checkly/utils.ts: -------------------------------------------------------------------------------- 1 | import { checkly } from "./client"; 2 | import { Check, CheckResult } from "./models"; 3 | 4 | export const mapCheckToContextValue = (check: Check) => { 5 | return { 6 | checkId: check.id, 7 | type: check.checkType, 8 | frequency: check.frequency, 9 | frequencyOffset: check.frequencyOffset, 10 | shouldFail: check.shouldFail, 11 | locations: check.locations, 12 | tags: check.tags, 13 | maxResponseTime: check.maxResponseTime, 14 | sslCheckDomain: check.sslCheckDomain, 15 | retryStrategy: check.retryStrategy, 16 | }; 17 | }; 18 | 19 | export const mapCheckResultToContextValue = (result: CheckResult) => { 20 | return { 21 | resultId: result.id, 22 | hasErrors: result.hasErrors, 23 | hasFailures: result.hasFailures, 24 | runLocation: result.runLocation, 25 | startedAt: result.startedAt, 26 | stoppedAt: result.stoppedAt, 27 | responseTime: result.responseTime, 28 | checkId: result.checkId, 29 | attempts: result.attempts, 30 | isDegraded: result.isDegraded, 31 | overMaxResponseTime: result.overMaxResponseTime, 32 | resultType: result.resultType, 33 | }; 34 | }; 35 | 36 | export const getLastCheckResult = async ( 37 | checkId: string, 38 | hasFailures: boolean, 39 | ) => { 40 | const results = await checkly.getCheckResults(checkId, hasFailures, 1); 41 | return results[0]; 42 | }; 43 | -------------------------------------------------------------------------------- /src/data-syncer.main.ts: -------------------------------------------------------------------------------- 1 | import { startSyncingData } from "./data-syncing"; 2 | import { log } from "./log"; 3 | 4 | const main = async () => { 5 | const signalsToHandle = ["SIGINT", "SIGTERM"]; 6 | 7 | signalsToHandle.forEach((signal) => { 8 | process.on(signal, () => process.exit(0)); 9 | }); 10 | 11 | log.info("Starting data syncing"); 12 | await startSyncingData().catch((err) => { 13 | console.error("Data syncing failed:", err); 14 | }); 15 | log.info("Finished syncing"); 16 | process.exit(0); 17 | }; 18 | 19 | main(); 20 | -------------------------------------------------------------------------------- /src/data-syncing.ts: -------------------------------------------------------------------------------- 1 | import timers from "node:timers/promises"; 2 | import { PublicApiImporter } from "./data-import/PublicApiImporter"; 3 | import { NotionImporter } from "./data-import/NotionImporter"; 4 | 5 | let shouldRun = true; 6 | 7 | const syncChecksAndGroups = async ( 8 | syncer: PublicApiImporter, 9 | targetInterval: number, 10 | ) => { 11 | while (shouldRun) { 12 | const startedAt = Date.now(); 13 | try { 14 | await syncer.syncChecks(); 15 | await syncer.syncCheckGroups(); 16 | } catch (err) { 17 | console.error(`msg="Syncing Checks or Check Groups failed" err=`, err); 18 | } finally { 19 | const durationMs = Date.now() - startedAt; 20 | if (durationMs < targetInterval) { 21 | await timers.setTimeout(targetInterval - durationMs); 22 | } 23 | } 24 | } 25 | }; 26 | 27 | const syncCheckResults = async ( 28 | syncer: PublicApiImporter, 29 | targetInterval: number, 30 | ) => { 31 | while (shouldRun) { 32 | const startedAt = Date.now(); 33 | try { 34 | const minutesBackToSync = 24 * 60; 35 | await syncer.syncCheckResults(minutesBackToSync); 36 | } catch (err) { 37 | console.error(`msg="Syncing Check Results failed" err=`, err); 38 | } finally { 39 | const durationMs = Date.now() - startedAt; 40 | if (durationMs < targetInterval) { 41 | await timers.setTimeout(targetInterval - durationMs); 42 | } 43 | } 44 | } 45 | }; 46 | 47 | const syncNotionData = async ( 48 | notionImporter: NotionImporter, 49 | targetInterval: number, 50 | ) => { 51 | while (shouldRun) { 52 | const startedAt = Date.now(); 53 | try { 54 | await notionImporter.importNotion(); 55 | } catch (err) { 56 | console.error(`msg="Syncing Data from Notion" err=`, err); 57 | } finally { 58 | const durationMs = Date.now() - startedAt; 59 | if (durationMs < targetInterval) { 60 | await timers.setTimeout(targetInterval - durationMs); 61 | } 62 | } 63 | } 64 | }; 65 | 66 | export const startSyncingData = async () => { 67 | const importer = new PublicApiImporter(); 68 | const notionImporter = new NotionImporter(); 69 | 70 | const checksAndGroups = syncChecksAndGroups(importer, 60_000); 71 | const checkResults = syncCheckResults(importer, 60_000); 72 | const syncNotion = syncNotionData(notionImporter, 60_000); // 5 minutes. this can be configured later 73 | 74 | const signalsToHandle = ["SIGINT", "SIGTERM"]; 75 | 76 | signalsToHandle.forEach((signal) => { 77 | process.on(signal, () => { 78 | shouldRun = false; 79 | }); 80 | }); 81 | 82 | await Promise.all([checksAndGroups, checkResults, syncNotion]); 83 | }; 84 | -------------------------------------------------------------------------------- /src/db/check-groups.ts: -------------------------------------------------------------------------------- 1 | import { CheckGroup } from "../checkly/models"; 2 | import { checkly } from "../checkly/client"; 3 | import postgres from "./postgres"; 4 | 5 | export interface CheckGroupTable { 6 | id: bigint; 7 | name: string; 8 | concurrency: number; 9 | accountId: string; 10 | apiCheckDefaults: any; 11 | alertSettings: any; 12 | environmentVariables: any[]; 13 | setupSnippetId: number | null; 14 | tearDownSnippetId: number | null; 15 | localSetupScript: string | null; 16 | localTearDownScript: string | null; 17 | activated: boolean; 18 | muted: boolean; 19 | useGlobalAlertSettings: boolean; 20 | doubleCheck: boolean; 21 | locations: string[]; 22 | tags: string[]; 23 | created_at: Date; 24 | updated_at: Date; 25 | runtimeId: string | null; 26 | retryStrategy: any; 27 | runParallel: boolean; 28 | alertChannelSubscriptions: any[]; 29 | privateLocations: string[]; 30 | fetchedAt: Date | null; 31 | } 32 | 33 | export async function readCheckGroup(id: bigint): Promise { 34 | const group = await postgres("check_groups") 35 | .where({ id }) 36 | .first(); 37 | if (!group) { 38 | throw new Error(`Check group with id ${id} not found`); 39 | } 40 | return group; 41 | } 42 | 43 | export async function readCheckGroups( 44 | ids: number[], 45 | ): Promise { 46 | return postgres("check_groups").whereIn("id", ids); 47 | } 48 | 49 | export async function insertCheckGroups(groups: CheckGroup[]) { 50 | const serializedGroups = groups.map((group) => ({ 51 | id: group.id, 52 | accountId: checkly.accountId, 53 | name: group.name, 54 | activated: group.activated, 55 | muted: group.muted, 56 | tags: group.tags || [], 57 | locations: group.locations || [], 58 | environmentVariables: JSON.stringify(group.environmentVariables || []), 59 | alertChannelSubscriptions: JSON.stringify( 60 | group.alertChannelSubscriptions || [], 61 | ), 62 | alertSettings: JSON.stringify(group.alertSettings || {}), 63 | useGlobalAlertSettings: group.useGlobalAlertSettings, 64 | doubleCheck: group.doubleCheck, 65 | runtimeId: group.runtimeId, 66 | apiCheckDefaults: JSON.stringify(group.apiCheckDefaults || {}), 67 | concurrency: group.concurrency, 68 | setupSnippetId: group.setupSnippetId, 69 | tearDownSnippetId: group.tearDownSnippetId, 70 | localSetupScript: group.localSetupScript, 71 | localTearDownScript: group.localTearDownScript, 72 | privateLocations: group.privateLocations || [], 73 | })); 74 | 75 | await postgres("check_groups") 76 | .insert(serializedGroups) 77 | .onConflict("id") 78 | .merge(); 79 | } 80 | 81 | export const removeAccountCheckGroups = async ( 82 | groupIdsToKeep: number[], 83 | accountId: string, 84 | ): Promise => { 85 | await postgres("check_groups") 86 | .delete() 87 | .whereNotIn("id", groupIdsToKeep) 88 | .where("accountId", accountId); 89 | }; 90 | -------------------------------------------------------------------------------- /src/db/check-sync-status.ts: -------------------------------------------------------------------------------- 1 | import { CheckSyncStatus } from "../checkly/models"; 2 | import postgres from "./postgres"; 3 | 4 | // We need to keep track of what was synced in a separate table to avoid re-syncing periods where no results are available 5 | // This is especially important for checks with lower frequency, or accounts with maintenance-windows 6 | // This also helps us to avoid re-syncing the same data over and over again 7 | export interface CheckSyncStatusTable extends CheckSyncStatus {} 8 | 9 | export const upsertCheckSyncStatus = async ( 10 | input: CheckSyncStatusTable, 11 | ): Promise => { 12 | await postgres("check_sync_status") 13 | .insert(input) 14 | .onConflict("checkId") 15 | .merge({ 16 | to: postgres.raw("GREATEST(EXCLUDED.to, check_sync_status.to)"), // Keep the latest `to` 17 | syncedAt: postgres.fn.now(), // Always update `syncedAt` 18 | }); 19 | }; 20 | 21 | export const findCheckSyncStatus = async ( 22 | checkId: string, 23 | ): Promise => { 24 | const syncStatus = await postgres("check_sync_status") 25 | .where({ checkId }) 26 | .first(); 27 | return syncStatus || null; 28 | }; 29 | -------------------------------------------------------------------------------- /src/db/checks-merged.ts: -------------------------------------------------------------------------------- 1 | import { keyBy, uniqBy } from "lodash"; 2 | import { CheckTable, readChecks } from "./check"; 3 | import { CheckGroupTable, readCheckGroups } from "./check-groups"; 4 | 5 | export type CheckTableMerged = CheckTable & { 6 | group?: CheckGroupTable; 7 | }; 8 | 9 | export const checksMerged = async ( 10 | checkIds: string[], 11 | ): Promise> => { 12 | const checks = await readChecks(checkIds); 13 | const groupIds = [ 14 | ...new Set(checks.map((check) => check.groupId).filter(Boolean)), 15 | ]; 16 | if (groupIds.length === 0) { 17 | return keyBy(checks, "id"); 18 | } 19 | 20 | const groupsById = keyBy(await readCheckGroups(groupIds as number[]), "id"); 21 | 22 | const result: Record = {}; 23 | checks.forEach((check) => { 24 | if (!check.groupId) { 25 | result[check.id] = check; 26 | return; 27 | } 28 | const group = groupsById[check.groupId]; 29 | const tags = [...new Set(check.tags || group.tags)]; 30 | const activated = check.activated && group.activated; 31 | const muted = check.muted || group.muted; 32 | const environmentVariables = uniqBy( 33 | [...group.environmentVariables, ...check.environmentVariables], 34 | "key", 35 | ); 36 | 37 | result[check.id] = { 38 | ...check, 39 | group, 40 | tags, 41 | activated, 42 | muted, 43 | environmentVariables, 44 | locations: group.locations, 45 | }; 46 | }); 47 | 48 | return result; 49 | }; 50 | 51 | export const readCheckMerged = async ( 52 | checkId: string, 53 | ): Promise => { 54 | const checks = await checksMerged([checkId]); 55 | return checks[checkId]; 56 | }; 57 | -------------------------------------------------------------------------------- /src/db/knexfile.ts: -------------------------------------------------------------------------------- 1 | import { initConfig } from "../lib/init-config"; 2 | import { log } from "../log"; 3 | 4 | initConfig(); 5 | 6 | const dbUrlEnv = process.env.DATABASE_URL; 7 | if (!dbUrlEnv && process.env.NODE_ENV !== "test") { 8 | throw new Error("DATABASE_URL is not set in environment variables."); 9 | } 10 | 11 | const config = { 12 | test: { client: "pg" }, // No DB in test 13 | local: { 14 | client: "pg", 15 | connection: dbUrlEnv, 16 | pool: { min: 2, max: 10 }, 17 | debug: false, 18 | migrations: { 19 | directory: "./migrations", // Directory where migration files are stored 20 | extension: "ts", // Migration files will be TypeScript 21 | }, 22 | }, 23 | dev: { 24 | client: "pg", 25 | connection: dbUrlEnv, 26 | pool: { min: 2, max: 10 }, 27 | debug: false, 28 | migrations: { 29 | directory: "./migrations", // Directory where migration files are stored 30 | extension: "ts", // Migration files will be TypeScript 31 | }, 32 | }, 33 | production: { 34 | client: "pg", 35 | connection: dbUrlEnv, 36 | pool: { min: 2, max: 20 }, 37 | migrations: { 38 | directory: "./migrations", // Directory where migration files are stored 39 | extension: "ts", // Migration files will be TypeScript 40 | }, 41 | }, 42 | }; 43 | 44 | const currentConfig = config[process.env.NODE_ENV || "local"]; 45 | 46 | log.info( 47 | { 48 | env: process.env.NODE_ENV || "local", 49 | }, 50 | "Loading Knex config", 51 | ); 52 | 53 | export default currentConfig; 54 | -------------------------------------------------------------------------------- /src/db/learnings.ts: -------------------------------------------------------------------------------- 1 | import postgres from "./postgres"; 2 | import pgvector from "pgvector/knex"; 3 | 4 | export interface LearningsTable { 5 | id: string; 6 | source: LearningSource; 7 | sourceId: string; 8 | content: string; 9 | fetchedAt: Date; 10 | embedding: number[]; // represents a vector of 1536 numbers 11 | embedding_model: string; 12 | } 13 | 14 | export enum LearningSource { 15 | NOTION = "NOTION", 16 | } 17 | 18 | export async function upsertLearnings( 19 | learnings: LearningsTable[], 20 | ): Promise { 21 | await postgres("learnings") 22 | .insert( 23 | learnings.map((learning) => ({ 24 | ...learning, 25 | embedding: pgvector.toSql(learning.embedding), 26 | })), 27 | ) 28 | .onConflict("id") 29 | .merge(); 30 | } 31 | 32 | export async function findAllLearnings(options?: { 33 | source: LearningSource; 34 | }): Promise { 35 | const queryBuilder = postgres("learnings").select("*"); 36 | 37 | if (options?.source) { 38 | queryBuilder.where("source", options.source); 39 | } 40 | 41 | return queryBuilder; 42 | } 43 | 44 | export async function deleteLearnings( 45 | idsToRemove: string[], 46 | ): Promise { 47 | return postgres("learnings").whereIn("id", idsToRemove).del(); 48 | } 49 | -------------------------------------------------------------------------------- /src/db/migrations/20250304153255_add_pgvector.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | export async function up(knex: Knex): Promise { 4 | await knex.raw("CREATE EXTENSION IF NOT EXISTS vector;"); 5 | } 6 | 7 | export async function down(knex: Knex): Promise { 8 | await knex.raw("DROP EXTENSION IF EXISTS vector;"); 9 | } 10 | -------------------------------------------------------------------------------- /src/db/migrations/20250304200309_add_checks.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | export async function up(knex: Knex): Promise { 4 | await knex.schema.createTable("checks", (table) => { 5 | table.uuid("id").primary(); // Unique Checkly ID 6 | table.uuid("accountId").notNullable().index(); // Account UUID 7 | table.string("checkType").notNullable(); // e.g., BROWSER, API 8 | table.string("name").notNullable(); 9 | table.integer("frequency").nullable(); 10 | table.integer("frequencyOffset").nullable(); 11 | table.boolean("activated").defaultTo(true); 12 | table.boolean("muted").defaultTo(false); 13 | table.boolean("shouldFail").defaultTo(false); 14 | 15 | table.specificType("locations", "TEXT[]").nullable(); 16 | table.text("script").nullable(); 17 | table.timestamp("created_at"); 18 | table.timestamp("updated_at"); 19 | 20 | table.boolean("doubleCheck").defaultTo(false); 21 | table.specificType("tags", "TEXT[]").defaultTo("{}"); 22 | table.string("sslCheckDomain").nullable(); 23 | table.integer("setupSnippetId").nullable(); 24 | table.integer("tearDownSnippetId").nullable(); 25 | table.text("localSetupScript").nullable(); 26 | table.text("localTearDownScript").nullable(); 27 | 28 | table.jsonb("alertSettings").defaultTo("{}"); 29 | table.boolean("useGlobalAlertSettings").defaultTo(true); 30 | 31 | table.integer("degradedResponseTime").nullable(); 32 | table.integer("maxResponseTime").nullable(); 33 | 34 | table.integer("groupId").nullable(); 35 | table.integer("groupOrder").defaultTo(0); 36 | table.string("heartbeat").nullable(); 37 | table.string("runtimeId").nullable(); 38 | table.string("scriptPath").nullable(); 39 | table.jsonb("retryStrategy").defaultTo("{}"); 40 | table.jsonb("request").defaultTo("{}"); 41 | table.boolean("runParallel").defaultTo(false); 42 | table.jsonb("alertChannelSubscriptions").defaultTo("[]"); 43 | table.specificType("privateLocations", "TEXT[]").defaultTo("{}"); 44 | table.jsonb("dependencies").defaultTo("[]"); 45 | table.jsonb("environmentVariables").defaultTo("[]"); 46 | 47 | // Checkly specific fields 48 | table.timestamp("fetchedAt").nullable(); // Last fetch time 49 | 50 | // Indexes for performance 51 | table.index(["groupId"]); 52 | }); 53 | } 54 | 55 | export async function down(knex: Knex): Promise { 56 | await knex.schema.dropTableIfExists("checks"); 57 | } 58 | -------------------------------------------------------------------------------- /src/db/migrations/20250304211520_add_check_groups.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | export async function up(knex: Knex): Promise { 4 | await knex.schema.createTable("check_groups", (table) => { 5 | table.bigInteger("id").primary(); // Unique Checkly Group ID 6 | table.string("name").notNullable(); 7 | table.integer("concurrency").notNullable().defaultTo(1); 8 | table.uuid("accountId").notNullable(); 9 | 10 | // Default API check settings (stored as JSON) 11 | table.jsonb("apiCheckDefaults").defaultTo("{}"); 12 | 13 | // Alert settings 14 | table.jsonb("alertSettings").defaultTo("{}"); 15 | 16 | // Environment variables (list of key-value pairs) 17 | table.jsonb("environmentVariables").defaultTo("[]"); 18 | 19 | table.integer("setupSnippetId").nullable(); 20 | table.integer("tearDownSnippetId").nullable(); 21 | table.text("localSetupScript").nullable(); 22 | table.text("localTearDownScript").nullable(); 23 | 24 | table.boolean("activated").defaultTo(true); 25 | table.boolean("muted").defaultTo(false); 26 | table.boolean("useGlobalAlertSettings").defaultTo(true); 27 | table.boolean("doubleCheck").defaultTo(false); 28 | 29 | // Locations as an array 30 | table.specificType("locations", "TEXT[]").notNullable(); 31 | 32 | table.specificType("tags", "TEXT[]").defaultTo("{}"); 33 | 34 | table.timestamp("created_at").defaultTo(knex.fn.now()); 35 | table.timestamp("updated_at").defaultTo(knex.fn.now()); 36 | 37 | table.string("runtimeId").nullable(); 38 | 39 | // Retry strategy as JSON 40 | table.jsonb("retryStrategy").defaultTo("{}"); 41 | 42 | table.boolean("runParallel").defaultTo(false); 43 | 44 | // Alert channel subscriptions 45 | table.jsonb("alertChannelSubscriptions").defaultTo("[]"); 46 | 47 | table.specificType("privateLocations", "TEXT[]").defaultTo("{}"); 48 | 49 | // Fetched timestamp for cron tracking 50 | table.timestamp("fetchedAt").nullable(); 51 | }); 52 | } 53 | 54 | export async function down(knex: Knex): Promise { 55 | await knex.schema.dropTableIfExists("check_groups"); 56 | } 57 | -------------------------------------------------------------------------------- /src/db/migrations/20250305132710_add_check_results.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | export async function up(knex: Knex): Promise { 4 | await knex.schema.createTable("check_results", (table) => { 5 | table.uuid("id").primary(); // Unique result ID 6 | table.uuid("checkId").notNullable().index(); // References the check that was run 7 | table.uuid("accountId").notNullable(); 8 | table.bigInteger("checkRunId").notNullable(); // Unique run identifier 9 | table.string("name").notNullable(); // Name of the check 10 | table.boolean("hasErrors").defaultTo(false); 11 | table.boolean("hasFailures").defaultTo(false); 12 | table.string("runLocation").notNullable(); // e.g., "eu-central-1" 13 | 14 | table.timestamp("startedAt").notNullable(); 15 | table.timestamp("stoppedAt").notNullable(); 16 | table.integer("responseTime").notNullable(); // Response time in ms 17 | 18 | table.integer("attempts").defaultTo(1); 19 | table.boolean("isDegraded").defaultTo(false); 20 | table.boolean("overMaxResponseTime").defaultTo(false); 21 | table.uuid("sequenceId").notNullable(); // Sequence of check runs 22 | table.string("resultType").notNullable(); // e.g., "FINAL" 23 | 24 | // Store full multi-step check result 25 | table.jsonb("multiStepCheckResult").defaultTo("{}"); 26 | table.jsonb("apiCheckResult").defaultTo("{}"); 27 | table.jsonb("browserCheckResult").defaultTo("{}"); 28 | 29 | table.timestamp("created_at"); 30 | 31 | table.timestamp("fetchedAt").nullable(); 32 | }); 33 | } 34 | 35 | export async function down(knex: Knex): Promise { 36 | await knex.schema.dropTableIfExists("check_results"); 37 | } 38 | -------------------------------------------------------------------------------- /src/db/migrations/20250306083009_error_cluster.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | export async function up(knex: Knex): Promise { 4 | await knex.schema.createTable("error_cluster", (table) => { 5 | table.uuid("id").primary(); 6 | table.uuid("account_id").notNullable(); 7 | table.text("error_message").notNullable(); 8 | table.timestamp("first_seen_at").notNullable(); 9 | table.timestamp("last_seen_at").notNullable(); 10 | table.specificType("embedding", "vector(1536)").notNullable(); 11 | table.string("embedding_model").notNullable(); 12 | 13 | table.index(["account_id"]); 14 | }); 15 | 16 | await knex.schema.raw( 17 | `CREATE INDEX idx_error_cluster_embedding ON error_cluster USING hnsw (embedding vector_l2_ops)`, 18 | ); 19 | 20 | await knex.schema.createTable("error_cluster_membership", (table) => { 21 | table 22 | .uuid("error_id") 23 | .notNullable() 24 | .references("id") 25 | .inTable("error_cluster") 26 | .onDelete("CASCADE"); 27 | table 28 | .uuid("result_check_id") 29 | .notNullable() 30 | .references("id") 31 | .inTable("check_results") 32 | .onDelete("CASCADE"); 33 | table 34 | .uuid("check_id") 35 | .notNullable() 36 | .references("id") 37 | .inTable("checks") 38 | .onDelete("CASCADE"); 39 | table.timestamp("date").notNullable(); 40 | table.specificType("embedding", "float[]").notNullable(); 41 | table.string("embedding_model").notNullable(); 42 | 43 | // Create a composite primary key 44 | table.primary(["error_id", "result_check_id"]); 45 | 46 | table.index(["error_id", "date"]); 47 | }); 48 | } 49 | 50 | export async function down(knex: Knex): Promise { 51 | await knex.schema.dropTable("error_cluster_membership"); 52 | await knex.schema.dropTable("error_cluster"); 53 | } 54 | -------------------------------------------------------------------------------- /src/db/migrations/20250310075322_add_check_sync_status.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | export async function up(knex: Knex): Promise { 4 | await knex.schema.createTable("check_sync_status", (table) => { 5 | table.uuid("checkId").primary(); 6 | table.uuid("accountId").notNullable(); 7 | 8 | table.timestamp("from").notNullable(); // Start of the synced timeframe 9 | table.timestamp("to").notNullable(); // End of the synced timeframe 10 | 11 | table.timestamp("syncedAt").notNullable().defaultTo(knex.fn.now()); // Timestamp when it was last updated 12 | }); 13 | } 14 | 15 | export async function down(knex: Knex): Promise { 16 | await knex.schema.dropTableIfExists("check_sync_status"); 17 | } 18 | -------------------------------------------------------------------------------- /src/db/migrations/20250311144357_drop_foreign_key_error_cluster_membership.ts: -------------------------------------------------------------------------------- 1 | exports.up = async function (knex) { 2 | await knex.schema.alterTable("error_cluster_membership", (table) => { 3 | table.dropForeign("error_id"); 4 | table.dropForeign("result_check_id"); 5 | table.dropForeign("check_id"); 6 | }); 7 | }; 8 | 9 | exports.down = async function (knex) { 10 | // do nothing 11 | }; 12 | -------------------------------------------------------------------------------- /src/db/migrations/20250321171451_improve_indexing.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | const INDEX_STARTED_AT_CHECK_ID_ACCOUNT_ID = 4 | "check_results_startedat_checkid_accountid_idx"; 5 | const INDEX_CHECK_RESULTS_CHECKID = "check_results_checkid_index"; 6 | const INDEX_IDX_CHECK_RESULTS_LATEST = "idx_check_results_latest"; 7 | const INDEX_CHECK_RESULTS_QUERY = "check_results_query_index"; 8 | 9 | export async function up(knex: Knex): Promise { 10 | // This is more optimal because startedAt is the first column in the index 11 | // It can be used in all types of queries 12 | await knex.schema.raw( 13 | `CREATE INDEX IF NOT EXISTS ${INDEX_STARTED_AT_CHECK_ID_ACCOUNT_ID} ON check_results ("startedAt", "checkId", "accountId")`, 14 | ); 15 | 16 | // Drop unnecessary indexes 17 | await knex.schema.raw(`DROP INDEX IF EXISTS ${INDEX_CHECK_RESULTS_CHECKID}`); 18 | await knex.schema.raw( 19 | `DROP INDEX IF EXISTS ${INDEX_IDX_CHECK_RESULTS_LATEST}`, 20 | ); 21 | await knex.schema.raw(`DROP INDEX IF EXISTS ${INDEX_CHECK_RESULTS_QUERY}`); 22 | } 23 | 24 | export async function down(knex: Knex): Promise { 25 | await knex.schema.raw( 26 | `DROP INDEX IF EXISTS ${INDEX_STARTED_AT_CHECK_ID_ACCOUNT_ID}`, 27 | ); 28 | 29 | // Recreate the dropped indexes 30 | await knex.schema.raw( 31 | `CREATE INDEX IF NOT EXISTS ${INDEX_CHECK_RESULTS_CHECKID} ON public.check_results ("checkId")`, 32 | ); 33 | await knex.schema.raw( 34 | `CREATE INDEX IF NOT EXISTS ${INDEX_IDX_CHECK_RESULTS_LATEST} ON public.check_results ("checkId" ASC, "startedAt" DESC)`, 35 | ); 36 | await knex.schema.raw( 37 | `CREATE INDEX IF NOT EXISTS ${INDEX_CHECK_RESULTS_QUERY} ON public.check_results ("accountId" ASC, "checkId" ASC, "startedAt" DESC)`, 38 | ); 39 | } 40 | -------------------------------------------------------------------------------- /src/db/migrations/20250322154023_add_learnings.ts: -------------------------------------------------------------------------------- 1 | import { Knex } from "knex"; 2 | 3 | export const up = async (knex: Knex): Promise => { 4 | await knex.schema.createTable("learnings", (table) => { 5 | table.string("id").primary(); // short string identifier 6 | table.string("source"); // short string for source 7 | table.string("sourceId"); // short string for source ID, this can notion page title 8 | table.text("content"); // long text column for content 9 | table.timestamp("fetchedAt"); // Date/timestamp column 10 | table.specificType("embedding", "vector(1536)").notNullable(); // custom vector type 11 | table.string("embedding_model").notNullable(); // string column for the embedding model 12 | 13 | table.unique(["source", "sourceId"]); 14 | }); 15 | }; 16 | 17 | export const down = async (knex: Knex): Promise => { 18 | await knex.schema.dropTableIfExists("learnings"); 19 | }; 20 | -------------------------------------------------------------------------------- /src/db/migrations/20250325083143_improve_error_cluster_membership_indexes.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from "knex"; 2 | 3 | const NEW_INDEX_NAME = "error_cluster_membership_date_error_id_check_id_idx"; 4 | 5 | export async function up(knex: Knex): Promise { 6 | await knex.schema.raw( 7 | `CREATE INDEX IF NOT EXISTS ${NEW_INDEX_NAME} ON error_cluster_membership ("date", "error_id", "check_id")`, 8 | ); 9 | } 10 | 11 | export async function down(knex: Knex): Promise { 12 | await knex.schema.raw(`DROP INDEX IF EXISTS ${NEW_INDEX_NAME}`); 13 | } 14 | -------------------------------------------------------------------------------- /src/db/postgres.ts: -------------------------------------------------------------------------------- 1 | import Knex from "knex"; 2 | import knexConfig from "./knexfile"; 3 | 4 | const postgres = Knex(knexConfig); 5 | 6 | export default postgres; 7 | -------------------------------------------------------------------------------- /src/github/agent.spec.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import { createOpenAI, OpenAIProvider } from "@ai-sdk/openai"; 3 | import { GithubAgent } from "./agent"; 4 | import GitHubAPI from "./github"; 5 | 6 | dotenv.config(); 7 | 8 | const OPENAI_API_KEY = process.env.OPENAI_API_KEY!; 9 | const CHECKLY_GITHUB_TOKEN = process.env.CHECKLY_GITHUB_TOKEN!; 10 | 11 | jest.setTimeout(120000); // Set timeout to 120 seconds 12 | 13 | describe("GithubAgent Tests", () => { 14 | let openai: OpenAIProvider; 15 | let github: GitHubAPI; 16 | 17 | beforeAll(() => { 18 | openai = createOpenAI({ apiKey: OPENAI_API_KEY }); 19 | github = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 20 | }); 21 | 22 | it.skip("should summarize a single release", async () => { 23 | let agent = new GithubAgent(openai("gpt-4o"), github); 24 | let response = await agent.summarizeRelease({ 25 | org: "checkly", 26 | repo: "checkly-webapp", 27 | previousRelease: "2025-02-07-15.59.26", 28 | release: "2025-02-11-17.04.27", 29 | }); 30 | console.log(response); 31 | }); 32 | 33 | it.skip("should summarize releases by prompt", async () => { 34 | let agent = new GithubAgent(openai("gpt-4o"), github); 35 | let response = await agent.summarizeReleases( 36 | "what changed in the ui since yesterday", 37 | "checkly", 38 | ); 39 | console.log(response); 40 | }); 41 | }); 42 | -------------------------------------------------------------------------------- /src/github/github.spec.ts: -------------------------------------------------------------------------------- 1 | import "dotenv/config"; 2 | import GitHubAPI from "./github"; 3 | 4 | const CHECKLY_GITHUB_TOKEN = process.env.CHECKLY_GITHUB_TOKEN!; 5 | 6 | describe("GitHub API Tests", () => { 7 | it.skip("should print the api usage", async () => { 8 | const githubAPI = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 9 | await githubAPI.checkRateLimit(); 10 | }); 11 | 12 | it.skip("should return the latest releases for checkly", async () => { 13 | const githubAPI = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 14 | const org = "checkly"; 15 | const repo = "checkly-backend"; 16 | 17 | const _24h_ago = new Date(Date.now() - 24 * 60 * 60 * 1000); 18 | 19 | const releases = await githubAPI.queryLatestReleases(org, repo, _24h_ago); 20 | 21 | let diff = await githubAPI.getDiffBetweenTags( 22 | org, 23 | repo, 24 | releases[0].tag, 25 | releases[1].tag, 26 | ); 27 | expect(diff).toBeDefined(); 28 | }); 29 | 30 | it.skip("should return the latest releases with diffs for checkly", async () => { 31 | const githubAPI = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 32 | const org = "checkly"; 33 | const repo = "checkly-backend"; 34 | 35 | const _24h_ago = new Date(Date.now() - 24 * 60 * 60 * 1000); 36 | const releasesWithDiffs = await githubAPI.queryLatestReleasesWithDiffs( 37 | org, 38 | repo, 39 | _24h_ago, 40 | ); 41 | expect(releasesWithDiffs).toBeDefined(); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /src/github/slackBlock.ts: -------------------------------------------------------------------------------- 1 | export const releaseHeader = { 2 | type: "section", 3 | text: { 4 | type: "mrkdwn", 5 | text: "*Release Overview*", 6 | }, 7 | }; 8 | 9 | export const divider = { type: "divider" }; 10 | 11 | export const createReleaseBlock = function ({ 12 | release, 13 | releaseUrl, 14 | diffUrl, 15 | date, 16 | repo, 17 | repoUrl, 18 | authors, 19 | summary, 20 | }: { 21 | release: string; 22 | releaseUrl: string; 23 | diffUrl: string; 24 | date: string; 25 | repo: string; 26 | repoUrl: string; 27 | authors: string[]; 28 | summary: string; 29 | }) { 30 | return { 31 | blocks: [ 32 | { 33 | type: "section", 34 | fields: [ 35 | { 36 | type: "mrkdwn", 37 | text: `:rocket: *Release*\n<${releaseUrl}|${release}> - <${diffUrl}|Diff>`, 38 | }, 39 | { 40 | type: "mrkdwn", 41 | text: `:calendar: *When*\n${date}`, 42 | }, 43 | { 44 | type: "mrkdwn", 45 | text: `:package: *Repo*\n<${repoUrl}|${repo}>`, 46 | }, 47 | { 48 | type: "mrkdwn", 49 | text: `:star: *Authors*\n${authors.join(", ")}`, 50 | }, 51 | ], 52 | }, 53 | { 54 | type: "section", 55 | text: { 56 | type: "mrkdwn", 57 | text: `*Summary*\n${summary}`, 58 | }, 59 | }, 60 | ], 61 | }; 62 | }; 63 | 64 | export const createDeploymentBlock = function ({ 65 | diffUrl, 66 | date, 67 | repo, 68 | repoUrl, 69 | authors, 70 | summary, 71 | environment, 72 | deploymentUrl, 73 | }: { 74 | diffUrl: string; 75 | date: string; 76 | repo: string; 77 | environment: string; 78 | repoUrl: string; 79 | authors: string[]; 80 | summary: string; 81 | deploymentUrl: string; 82 | }) { 83 | return { 84 | blocks: [ 85 | { 86 | type: "section", 87 | fields: [ 88 | { 89 | type: "mrkdwn", 90 | text: `:rocket: *Deployment*\n<${deploymentUrl}|${environment}> - <${diffUrl}|Diff>`, 91 | }, 92 | { 93 | type: "mrkdwn", 94 | text: `:calendar: *When*\n${date}`, 95 | }, 96 | { 97 | type: "mrkdwn", 98 | text: `:package: *Repo*\n<${repoUrl}|${repo}>`, 99 | }, 100 | { 101 | type: "mrkdwn", 102 | text: `:star: *Authors*\n${authors.join(", ")}`, 103 | }, 104 | ], 105 | }, 106 | { 107 | type: "section", 108 | text: { 109 | type: "mrkdwn", 110 | text: `*Summary*\n${summary}`, 111 | }, 112 | }, 113 | ], 114 | }; 115 | }; 116 | -------------------------------------------------------------------------------- /src/grafana/grafanaclient.spec.ts: -------------------------------------------------------------------------------- 1 | import { GrafanaClient } from "./grafanaclient"; 2 | import "dotenv/config"; 3 | 4 | const grafanaApiKey = process.env.GRAFANA_API_KEY!; 5 | const grafanaUrl = process.env.GRAFANA_INSTANCE_URL!; 6 | const isGithubActions = process.env.GITHUB_ACTIONS === "true"; 7 | const maybe = !isGithubActions ? describe : describe.skip; 8 | 9 | maybe("GrafanaClient", () => { 10 | let grafanaClient: GrafanaClient; 11 | 12 | beforeAll(() => { 13 | grafanaClient = new GrafanaClient(grafanaUrl, grafanaApiKey); 14 | }); 15 | 16 | it("should get dashboards", async () => { 17 | const dashboards = await grafanaClient.getDashboards(); 18 | expect(dashboards).toBeDefined(); 19 | 20 | expect(Array.isArray(dashboards)).toBe(true); 21 | 22 | const db = "Runners Overview"; 23 | const dashboard = await grafanaClient.getDashboardUrlByName(db); 24 | expect(dashboard).toBeDefined(); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /src/grafana/grafanaclient.ts: -------------------------------------------------------------------------------- 1 | export class GrafanaClient { 2 | private readonly grafanaUrl: string; 3 | private readonly grafanaApiKey: string; 4 | 5 | constructor(grafanaUrl: string, grafanaApiKey: string) { 6 | this.grafanaUrl = grafanaUrl; 7 | this.grafanaApiKey = grafanaApiKey; 8 | } 9 | 10 | async getDashboardUrlByName(dashboardName: string): Promise { 11 | const dashboards = await this.getDashboards(); 12 | const runners = dashboards 13 | .filter((d) => d.type === "dash-db") 14 | .filter((d) => 15 | d.title.toLowerCase().includes(dashboardName.toLowerCase()), 16 | )[0]; 17 | return runners.url; 18 | } 19 | 20 | async getDashboards(): Promise { 21 | const url = `${this.grafanaUrl}/api/search`; 22 | const response = await fetch(url, { 23 | method: "GET", 24 | headers: { 25 | "Content-Type": "application/json", 26 | Authorization: `Bearer ${this.grafanaApiKey}`, 27 | }, 28 | }); 29 | 30 | if (!response.ok) { 31 | throw new Error(`Error fetching dashboards: ${response.statusText}`); 32 | } 33 | 34 | return response.json(); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/grafana/lokiclient.spec.ts: -------------------------------------------------------------------------------- 1 | import { LokiClient } from "./lokiclient"; 2 | import "dotenv/config"; 3 | const lokiApiKey = process.env.LOKI_API_KEY!; 4 | const user = process.env.LOKI_USER!; 5 | const lokiUrl = process.env.LOKI_URL!; 6 | const isGithubActions = process.env.GITHUB_ACTIONS === "true"; 7 | const maybe = !isGithubActions ? describe : describe.skip; 8 | jest.setTimeout(30000); 9 | maybe("LokiClient", () => { 10 | let lokiClient: LokiClient; 11 | 12 | beforeAll(() => { 13 | lokiClient = new LokiClient(lokiUrl, lokiApiKey, user, "staging"); 14 | }); 15 | 16 | it("can count logs by level for a service", async () => { 17 | const service = "checkly-api"; 18 | const rangeMinutes = 60 * 12; 19 | const data = await lokiClient.getLogCountByLevel(service, rangeMinutes); 20 | expect(data).toBeDefined(); 21 | console.log(JSON.stringify(data.data.result)); 22 | expect(data).toHaveProperty("data"); 23 | //console.log(JSON.stringify(data.data.result[0].values)); 24 | }); 25 | 26 | it("should get available services", async () => { 27 | const services = await lokiClient.getAllValuesForLabel("app"); 28 | expect(services).toBeDefined(); 29 | expect(services.length).toBeGreaterThan(0); 30 | //console.log(services); 31 | }); 32 | 33 | it("should run a query and return results", async () => { 34 | const services = lokiClient.getAllValuesForLabel("app"); 35 | const data = await lokiClient.getErrorsForService(services[1], 10); 36 | expect(data).toBeDefined(); 37 | expect(data).toHaveProperty("data"); 38 | //console.log(JSON.stringify(data.data.result[0].values)); 39 | }); 40 | }); 41 | -------------------------------------------------------------------------------- /src/grafana/lokiclient.ts: -------------------------------------------------------------------------------- 1 | export class LokiClient { 2 | private readonly lokiUrl: string; 3 | private readonly lokiApiKey: string; 4 | private readonly environment: string; 5 | user: string; 6 | 7 | constructor( 8 | lokiUrl: string, 9 | lokiApiKey: string, 10 | user: string, 11 | environment: string, 12 | ) { 13 | this.lokiUrl = lokiUrl; 14 | this.lokiApiKey = lokiApiKey; 15 | this.environment = environment; 16 | this.user = user; 17 | } 18 | 19 | queryError(service: string): string { 20 | return `{app="${service}", env="${this.environment}"} |= "error"`; 21 | } 22 | 23 | async getLogCountByLevel(app: string, rangeMinutes: number): Promise { 24 | const query = `sum by (detected_level) (count_over_time({app="${app}", env="${this.environment}"}[5m]))`; 25 | const end = new Date(); 26 | const start = new Date(end.getTime() - rangeMinutes * 60 * 1000); 27 | const data = await this.queryLoki( 28 | query, 29 | start.toISOString(), 30 | end.toISOString(), 31 | ); 32 | return data; 33 | } 34 | 35 | async getAllEnvironments(): Promise { 36 | return this.getAllValuesForLabel("env"); 37 | } 38 | 39 | async getAllApps(): Promise { 40 | return this.getAllValuesForLabel("app"); 41 | } 42 | 43 | /** 44 | * This function gets all available values for a label in Loki. 45 | * @returns 46 | */ 47 | async getAllValuesForLabel(label: string): Promise { 48 | const url = new URL(`${this.lokiUrl}/loki/api/v1/label/${label}/values`); 49 | const authHeader = "Basic " + btoa(`${this.user}:${this.lokiApiKey}`); 50 | 51 | const response = await fetch(url.toString(), { 52 | method: "GET", 53 | headers: { 54 | "Content-Type": "application/json", 55 | Authorization: authHeader, 56 | }, 57 | }); 58 | 59 | if (!response.ok) { 60 | throw new Error( 61 | `Error fetching available services: ${response.statusText}`, 62 | ); 63 | } 64 | 65 | const data = await response.json(); 66 | return data.data; // Assuming the response structure is { "status": "success", "data": ["app1", "app2", ...] } 67 | } 68 | 69 | async getErrorsForService(service: string, rangeMinutes: number) { 70 | // Get the current time and subtract "rangeMinutes" minutes 71 | const end = new Date(); 72 | const start = new Date(end.getTime() - rangeMinutes * 60 * 1000); 73 | 74 | // Convert to ISO string format 75 | const startISOString = start.toISOString(); 76 | const endISOString = end.toISOString(); 77 | const query = this.queryError(service); 78 | return this.queryLoki(query, startISOString, endISOString); 79 | } 80 | async queryLoki(query: string, start: string, end: string): Promise { 81 | const url = new URL(`${this.lokiUrl}/loki/api/v1/query_range`); 82 | url.searchParams.append("query", query); 83 | url.searchParams.append("start", start); 84 | url.searchParams.append("end", end); 85 | const authHeader = "Basic " + btoa(`${this.user}:${this.lokiApiKey}`); 86 | 87 | const response = await fetch(url.toString(), { 88 | method: "GET", 89 | headers: { 90 | "Content-Type": "application/json", 91 | Authorization: authHeader, 92 | }, 93 | }); 94 | 95 | if (!response.ok) { 96 | const errorText = await response.text(); 97 | throw new Error( 98 | `Error querying Loki: ${response.status} ${response.statusText} - ${errorText}`, 99 | ); 100 | } 101 | //https://grafana.com/docs/loki/latest/reference/loki-http-api/#query-logs-within-a-range-of-time 102 | return response.json(); 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /src/instrumentation.ts: -------------------------------------------------------------------------------- 1 | import "reflect-metadata"; 2 | 3 | export function register() { 4 | console.log("Registering instrumentation"); 5 | } 6 | -------------------------------------------------------------------------------- /src/knowledge-base/knowledgeBase.ts: -------------------------------------------------------------------------------- 1 | import { findAllLearnings } from "../db/learnings"; 2 | 3 | export type KnowledgeDocument = { 4 | content: string; 5 | title: string; 6 | slug: string; 7 | summary: string; 8 | }; 9 | 10 | export const getAllDocuments = async (): Promise => { 11 | const learningsInDb = await findAllLearnings(); 12 | 13 | return learningsInDb.map((learning) => ({ 14 | content: learning.content, 15 | slug: learning.id, 16 | summary: "", // TODO we should replace this with a vector search 17 | title: learning.sourceId, 18 | })); 19 | }; 20 | 21 | export const getDocumentBySlug = async ( 22 | slug: string, 23 | ): Promise => { 24 | const documents = await getAllDocuments(); 25 | return documents.find((doc) => doc.slug === slug) || null; 26 | }; 27 | -------------------------------------------------------------------------------- /src/langfuse/index.ts: -------------------------------------------------------------------------------- 1 | import { Langfuse } from "langfuse"; 2 | import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node"; 3 | import { NodeSDK } from "@opentelemetry/sdk-node"; 4 | import { LangfuseExporter } from "langfuse-vercel"; 5 | import dotenv from "dotenv"; 6 | 7 | dotenv.config(); 8 | 9 | export const langfuse = new Langfuse({ 10 | secretKey: process.env.LANGFUSE_SECRET_KEY, 11 | publicKey: process.env.LANGFUSE_PUBLIC_KEY, 12 | baseUrl: process.env.LANGFUSE_BASEURL, 13 | }); 14 | 15 | export const telemetrySDK = new NodeSDK({ 16 | traceExporter: new LangfuseExporter(), 17 | instrumentations: [getNodeAutoInstrumentations()], 18 | }); 19 | 20 | export const startLangfuseTelemetrySDK = () => { 21 | if ( 22 | process.env.LANGFUSE_SECRET_KEY && 23 | process.env.LANGFUSE_BASEURL && 24 | process.env.LANGFUSE_PUBLIC_KEY 25 | ) { 26 | telemetrySDK.start(); 27 | } else { 28 | console.warn( 29 | "LANGFUSE_SECRET_KEY, LANGFUSE_BASEURL and LANGFUSE_PUBLIC_KEY are not set. Langfuse observability will not be available.", 30 | ); 31 | } 32 | }; 33 | -------------------------------------------------------------------------------- /src/lib/async-utils.spec.ts: -------------------------------------------------------------------------------- 1 | import { promiseAllWithConcurrency } from "./async-utils"; 2 | 3 | describe("promiseAllWithConcurrency", () => { 4 | it("should resolve all tasks with the specified concurrency", async () => { 5 | const tasks = [ 6 | () => Promise.resolve(1), 7 | () => Promise.resolve(2), 8 | () => Promise.resolve(3), 9 | ]; 10 | const concurrency = 2; 11 | const results = await promiseAllWithConcurrency(tasks, concurrency); 12 | expect(results).toEqual([1, 2, 3]); 13 | }); 14 | 15 | it("should handle tasks that resolve at different times", async () => { 16 | const tasks = [ 17 | () => new Promise((resolve) => setTimeout(() => resolve(1), 100)), 18 | () => new Promise((resolve) => setTimeout(() => resolve(2), 50)), 19 | () => new Promise((resolve) => setTimeout(() => resolve(3), 150)), 20 | ]; 21 | const concurrency = 2; 22 | const results = await promiseAllWithConcurrency(tasks, concurrency); 23 | expect(results).toEqual([1, 2, 3]); 24 | }); 25 | 26 | it("should handle tasks that reject", async () => { 27 | const tasks = [ 28 | () => Promise.resolve(1), 29 | () => Promise.reject(new Error("Task failed")), 30 | () => Promise.resolve(3), 31 | ]; 32 | const concurrency = 2; 33 | await expect(promiseAllWithConcurrency(tasks, concurrency)).rejects.toThrow( 34 | "Task failed", 35 | ); 36 | }); 37 | 38 | it("should respect the concurrency limit", async () => { 39 | const tasks = [ 40 | () => new Promise((resolve) => setTimeout(() => resolve(1), 100)), 41 | () => new Promise((resolve) => setTimeout(() => resolve(2), 100)), 42 | () => new Promise((resolve) => setTimeout(() => resolve(3), 100)), 43 | () => new Promise((resolve) => setTimeout(() => resolve(4), 100)), 44 | ]; 45 | const concurrency = 2; 46 | const start = Date.now(); 47 | await promiseAllWithConcurrency(tasks, concurrency); 48 | const duration = Date.now() - start; 49 | expect(duration).toBeGreaterThanOrEqual(200); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /src/lib/async-utils.ts: -------------------------------------------------------------------------------- 1 | export const promiseAllWithConcurrency = async ( 2 | tasks: (() => Promise)[], 3 | concurrency: number, 4 | ): Promise => { 5 | const results: T[] = []; 6 | const executing: Promise[] = []; 7 | 8 | for (const task of tasks) { 9 | const promise = task().then((result) => { 10 | executing.splice(executing.indexOf(promise), 1); // Remove from queue 11 | return result; 12 | }); 13 | 14 | // Limit the number of concurrent promises 15 | results.push(await promise); 16 | executing.push(promise); 17 | 18 | if (executing.length >= concurrency) { 19 | await Promise.race(executing); // Wait for one to finish 20 | } 21 | } 22 | 23 | return results; 24 | }; 25 | -------------------------------------------------------------------------------- /src/lib/init-config.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import path from "path"; 3 | 4 | // Load the .env file 5 | // This allows us to run any file as an entry point from any working directory 6 | export const initConfig = () => { 7 | const envPath = path.resolve(__dirname, "../..", ".env"); 8 | dotenv.config({ path: envPath }); 9 | }; 10 | -------------------------------------------------------------------------------- /src/log.ts: -------------------------------------------------------------------------------- 1 | import { pino } from "pino"; 2 | import process from "node:process"; 3 | import { LogLevel } from "@slack/bolt"; 4 | 5 | // Allows to override log level 6 | const logLevel = 7 | process.env.LOG_LEVEL || process.env.NODE_ENV === "production" 8 | ? "info" 9 | : "debug"; 10 | 11 | export const log = pino({ 12 | level: logLevel, 13 | transport: { 14 | target: "pino-logfmt", 15 | options: { 16 | flattenNestedObjects: true, 17 | convertToSnakeCase: true, 18 | includeLevelLabel: true, 19 | formatTime: true, 20 | }, 21 | }, 22 | }); 23 | 24 | export const pinoBoltLogger = { 25 | setLevel: (level: LogLevel) => { 26 | switch (level) { 27 | case LogLevel.DEBUG: 28 | log.level = "debug"; 29 | break; 30 | case LogLevel.INFO: 31 | log.level = "info"; 32 | break; 33 | case LogLevel.WARN: 34 | log.level = "warn"; 35 | break; 36 | case LogLevel.ERROR: 37 | log.level = "error"; 38 | break; 39 | } 40 | }, 41 | getLevel: () => { 42 | switch (log.level) { 43 | case "trace": 44 | return LogLevel.DEBUG; 45 | case "debug": 46 | return LogLevel.DEBUG; 47 | case "info": 48 | return LogLevel.INFO; 49 | case "warn": 50 | return LogLevel.WARN; 51 | case "error": 52 | return LogLevel.ERROR; 53 | case "fatal": 54 | return LogLevel.ERROR; 55 | default: 56 | return LogLevel.INFO; 57 | } 58 | }, 59 | setName: () => {}, 60 | debug: (...msgs: unknown[]) => log.debug(msgs), 61 | info: (...msgs: unknown[]) => log.info(msgs), 62 | warn: (...msgs: unknown[]) => log.warn(msgs), 63 | error: (...msgs: unknown[]) => log.error(msgs), 64 | }; 65 | -------------------------------------------------------------------------------- /src/prisma.ts: -------------------------------------------------------------------------------- 1 | import { PrismaClient } from "@prisma/client"; 2 | 3 | export const prisma = new PrismaClient({ 4 | datasourceUrl: process.env.DATABASE_URL, 5 | }); 6 | -------------------------------------------------------------------------------- /src/prompts/checkly-data.ts: -------------------------------------------------------------------------------- 1 | import { ChecklyClient } from "../checkly/checklyclient"; 2 | import { CheckResult, ErrorMessage } from "../checkly/models"; 3 | 4 | export function last1h(date: Date = new Date()) { 5 | return { 6 | from: new Date(date.getTime() - 60 * 60 * 1000), 7 | to: date, 8 | }; 9 | } 10 | 11 | export const last24h = (date: Date = new Date()) => { 12 | return { 13 | from: new Date(date.getTime() - 24 * 60 * 60 * 1000), 14 | to: date, 15 | }; 16 | }; 17 | 18 | export function last30d(date: Date = new Date()) { 19 | return { 20 | from: new Date(date.getTime() - 30 * 24 * 60 * 60 * 1000), 21 | to: date, 22 | }; 23 | } 24 | 25 | export async function fetchCheckResults( 26 | checkly: ChecklyClient, 27 | { 28 | checkId, 29 | from, 30 | to, 31 | }: { 32 | checkId: string; 33 | from?: Date; 34 | to?: Date; 35 | }, 36 | ) { 37 | return await checkly.getCheckResultsByCheckId(checkId, { 38 | resultType: "FINAL", 39 | from: from ?? new Date(Date.now() - 30 * 24 * 60 * 60 * 1000), 40 | to: to ?? new Date(), 41 | limit: 100, 42 | }); 43 | } 44 | 45 | export function summarizeCheckResult(checkResult: CheckResult) { 46 | const error = getErrorMessageFromCheckResult(checkResult); 47 | return { 48 | id: checkResult.id, 49 | sequenceId: checkResult.sequenceId, 50 | resultType: checkResult.resultType, 51 | startedAt: checkResult.startedAt, 52 | location: checkResult.runLocation, 53 | attempts: checkResult.attempts, 54 | error: error.split("\n")[0], 55 | }; 56 | } 57 | 58 | export function getErrorMessageFromCheckResult( 59 | checkResult: CheckResult, 60 | ): string { 61 | if (checkResult.apiCheckResult) { 62 | return getErrorMessageFromApiError(checkResult); 63 | } 64 | if (checkResult.multiStepCheckResult) { 65 | return getErrorMessageFromResult(checkResult.multiStepCheckResult); 66 | } 67 | if (checkResult.browserCheckResult) { 68 | return getErrorMessageFromResult(checkResult.browserCheckResult); 69 | } 70 | 71 | throw new Error("Unsupported Check Result Type"); 72 | } 73 | 74 | export function getErrorMessageFromResult(result: { 75 | errors?: ErrorMessage[]; 76 | }): string { 77 | const errorFromMessages = result.errors 78 | ?.map((e) => { 79 | if (typeof e === "string") { 80 | return e; 81 | } 82 | return e.message; 83 | }) 84 | .find((e) => !!e); 85 | 86 | // TODO public API is not yet returning scheduling errors. We can deal with this later 87 | return errorFromMessages || "Scheduling error"; // Fallback to scheduling issues 88 | } 89 | 90 | export function getErrorMessageFromApiError(checkResult: CheckResult): string { 91 | const assertionErrors = 92 | checkResult.apiCheckResult?.assertions 93 | ?.filter((a) => (a.error ? a.error : null)) 94 | ?.map((a) => a.error) 95 | ?.join("\n") || ""; 96 | if (assertionErrors.trim()) { 97 | return assertionErrors.trim(); 98 | } 99 | 100 | const overMaxResponseTime = checkResult.overMaxResponseTime; 101 | if (overMaxResponseTime) { 102 | return "Response time over max response time"; 103 | } 104 | 105 | const requestError = checkResult.apiCheckResult?.requestError; 106 | if (requestError) { 107 | return requestError; 108 | } 109 | 110 | const setupErrors = checkResult.apiCheckResult?.jobLog?.setup?.filter( 111 | (log) => log.level === "ERROR", 112 | ); 113 | if (setupErrors?.length) { 114 | return setupErrors[setupErrors.length - 1].msg; 115 | } 116 | const teardownErrors = checkResult.apiCheckResult?.jobLog?.teardown?.filter( 117 | (log) => log.level === "ERROR", 118 | ); 119 | if (teardownErrors?.length) { 120 | return teardownErrors[teardownErrors.length - 1].msg; 121 | } 122 | 123 | return "Unable to extract error message"; 124 | } 125 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-001.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/checkly/srebot/2b0d1a5616c05feb48a4b5b7ee4731dc291a5567/src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-001.png -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-002.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/checkly/srebot/2b0d1a5616c05feb48a4b5b7ee4731dc291a5567/src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-002.png -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-003.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/checkly/srebot/2b0d1a5616c05feb48a4b5b7ee4731dc291a5567/src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-003.png -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-004.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/checkly/srebot/2b0d1a5616c05feb48a4b5b7ee4731dc291a5567/src/prompts/checkly.eval.spec.fixtures/heatmaps/heatmap-004.png -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/005fd7bd-81f9-43e0-bcc5-4ac57002b8cd/expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": [ 3 | { 4 | "errorMessage": "Test timeout of 120000ms exceeded.", 5 | "checkResults": [ 6 | "90b2729a-5dff-44db-aac0-37c142e38574", 7 | "4a9f77ad-e509-4927-a344-63e188a5407e", 8 | "af51cebb-6982-4f19-84c5-4fdeb101a5c9" 9 | ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/005fd7bd-81f9-43e0-bcc5-4ac57002b8cd/result-summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "check": "Edit browser check", 3 | "interval": { 4 | "from": 1737901683655, 5 | "to": 1740493683655 6 | }, 7 | "frequency": 10, 8 | "locations": ["eu-west-1", "us-east-1"], 9 | "results": [ 10 | { 11 | "id": "90b2729a-5dff-44db-aac0-37c142e38574", 12 | "sequenceId": "453c04d3-b348-4fc6-91f5-ac9455f68806", 13 | "resultType": "ATTEMPT", 14 | "startedAt": "2025-02-22T14:25:48.026Z", 15 | "location": "eu-west-1", 16 | "attempts": 1, 17 | "error": { 18 | "stack": "Test timeout of 120000ms exceeded.", 19 | "message": "Test timeout of 120000ms exceeded." 20 | } 21 | }, 22 | { 23 | "id": "4a9f77ad-e509-4927-a344-63e188a5407e", 24 | "sequenceId": "17e7dbc5-3f18-4996-ab54-4704902d0926", 25 | "resultType": "ATTEMPT", 26 | "startedAt": "2025-02-12T02:52:04.323Z", 27 | "location": "eu-west-1", 28 | "attempts": 1, 29 | "error": { 30 | "stack": "Test timeout of 120000ms exceeded.", 31 | "message": "Test timeout of 120000ms exceeded." 32 | } 33 | }, 34 | { 35 | "id": "af51cebb-6982-4f19-84c5-4fdeb101a5c9", 36 | "sequenceId": "e5208dd8-7cd0-4a2f-80b4-32765c721646", 37 | "resultType": "ATTEMPT", 38 | "startedAt": "2025-01-30T09:51:33.346Z", 39 | "location": "us-east-1", 40 | "attempts": 1, 41 | "error": { 42 | "stack": "Test timeout of 120000ms exceeded.", 43 | "message": "Test timeout of 120000ms exceeded." 44 | } 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/3c4264bc-4355-4f7f-ba6c-5d79a647e0bc/expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": [ 3 | { 4 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toBeVisible()", 5 | "checkResults": ["505aa5fb-8ae1-4bc7-a2cd-b06d4fd47956"] 6 | }, 7 | { 8 | "errorMessage": "Test timeout of 120000ms exceeded.", 9 | "checkResults": [ 10 | "6df6099b-74e6-4df0-9329-66e55c377970", 11 | "864e481d-4176-4991-918e-37e06d4f00f8" 12 | ] 13 | }, 14 | { 15 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toHaveTitle(expected)", 16 | "checkResults": [ 17 | "ee7cb36b-e3c3-418b-aa0c-c439f6ccdd83", 18 | "fdaa8c32-5862-4a60-844f-0c41b3c11bd9", 19 | "54678ac1-2285-41db-b743-f30c6a6b3263", 20 | "46a6573b-1654-4037-b239-5382afc2869f", 21 | "1cd67c28-87b3-4fc4-8abb-200b1a9eb3cd", 22 | "84fcb2c0-1233-4dd1-abb7-16936a6b16a0", 23 | "38a47bf1-400f-4e42-bae6-94926b00ec06", 24 | "3a44b559-27e4-4076-b327-eefc2a6a6226", 25 | "764ce63e-23a5-452b-8050-4f5011725754", 26 | "120a30a6-f2e8-4965-a724-a71e62141bd8", 27 | "67449d18-90aa-4171-8f9e-8bfef228630f", 28 | "7021109c-1882-4818-b283-f0f0847a4649", 29 | "2e31c00b-1900-4db3-911d-7906767f8567", 30 | "d1315faa-1173-4931-acc5-90d04688c9e7", 31 | "7dbee5b5-00fa-4405-bdc0-277190f6ec83", 32 | "2f1b8b21-272d-430d-923a-2467e0b37aa0", 33 | "10a32f0b-2015-42a3-b855-abd255f38f66", 34 | "a922620e-b67b-41b0-aac8-66198804b2b1", 35 | "acd627aa-5fab-4d65-bcdc-266d9bb9c78e", 36 | "39a36d08-8467-461a-900c-52f2cbb1c3ec", 37 | "cba3a795-8c68-485b-bc00-03e8a15d961e", 38 | "484a38c5-d75e-43a0-a480-c7ba3f45e716", 39 | "c32d4b13-2eb9-41f0-8394-0c3a43af9581", 40 | "c70fd597-277e-4639-b09b-70b9dd562bae", 41 | "3758b511-8fd9-405d-8561-e5cb63360706", 42 | "acc1ecee-d368-4172-8c85-8d38a0b2da20", 43 | "c249e1c5-efda-4a5e-b1fd-49faea8453ca", 44 | "4712dc00-4c2f-4c21-b8be-d1201dc64013" 45 | ] 46 | } 47 | ] 48 | } 49 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/5ee8e373-f204-45e4-b193-d652dba7e928/expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": [ 3 | { 4 | "errorMessage": "Test timeout of 120000ms exceeded.", 5 | "checkResults": [ 6 | "28c5806b-efce-4429-908b-434f88e6be4e", 7 | "f013732a-c1ef-4990-ac3d-673878a8dc8f", 8 | "a2f7f995-536f-4b1e-8d75-dcce38dbab33" 9 | ] 10 | }, 11 | { 12 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toBeVisible()\n\nLocator: getByRole('button', { name: /switch accounts/i })\nExpected: visible\nReceived: hidden\nCall log:\n - expect.toBeVisible with timeout 30000ms\n - waiting for getByRole('button', { name: /switch accounts/i })", 13 | "checkResults": ["578495ed-4169-4dad-ade3-71c7b6804370"] 14 | }, 15 | { 16 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toBeVisible()\n\nLocator: getByRole('heading').filter({ hasText: 'No checks or groups matched your filters' })\nExpected: visible\nReceived: hidden\nCall log:\n - expect.toBeVisible with timeout 30000ms\n - waiting for getByRole('heading').filter({ hasText: 'No checks or groups matched your filters' })", 17 | "checkResults": [ 18 | "47a8bc46-2986-4ddd-83e3-63d94d3bab03", 19 | "9d5d04b9-f884-4f0f-b4ac-6587555de525" 20 | ] 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/683f229f-48d2-4b97-9161-db029f9d9a32/expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": [ 3 | { 4 | "errorMessage": "undefined", 5 | "checkResults": [ 6 | "13f6aa82-61b9-4aed-8e15-827041fde24b", 7 | "e37efec7-f409-4627-840d-fd28b195bb30", 8 | "8ade2538-5500-422a-9f9a-28b18613f683", 9 | "3f28e510-7d6e-4bc6-8c63-af27f9a4e8ac", 10 | "d3d12657-57e8-42b6-9c90-11c5529f40e7" 11 | ] 12 | }, 13 | { 14 | "errorMessage": "TimeoutError: locator.fill: Timeout 10000ms exceeded.\nCall log:\n - waiting for getByRole('textbox', { name: 'email' })", 15 | "checkResults": [ 16 | "b7b2fa77-298b-486b-8214-035f601d1aec", 17 | "a22e3117-d5f7-41a5-9919-16889c5e3309", 18 | "e07eff3a-5672-4e4d-aa33-9a456f9ffa0f" 19 | ] 20 | }, 21 | { 22 | "errorMessage": "TimeoutError: locator.hover: Timeout 10000ms exceeded.\nCall log:\n - waiting for locator('[data-testid=home-dashboard-table]').getByRole('row').filter({ has: locator('span').getByText('API check E2E test heo413sk', { exact: true }) }).getByRole('button').locator(':scope[aria-haspopup=menu]')", 23 | "checkResults": ["a709fff6-3fd2-4e04-ba4a-90ac1a72f447"] 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/84d25fb6-a6a7-4127-9c99-64cd3d754817/expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": [ 3 | { 4 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toBeVisible() - Locator: getByRole('button', { name: /switch accounts/i })", 5 | "checkResults": [ 6 | "67c975c0-4f8b-4567-a271-22e1878943da", 7 | "9158c7dd-ccc0-47a9-9414-ca518e4ac364" 8 | ] 9 | }, 10 | { 11 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toBeVisible() - Locator: getByText('Export to code')", 12 | "checkResults": [ 13 | "c568835f-7775-48c5-93b5-6f7070bf4680", 14 | "8dfdea30-8436-4366-b630-61171f3769d7" 15 | ] 16 | }, 17 | { 18 | "errorMessage": "Test timeout of 120000ms exceeded.", 19 | "checkResults": ["a2a13969-4c2f-45a5-8f98-c55d43eadb6b"] 20 | }, 21 | { 22 | "errorMessage": "undefined", 23 | "checkResults": ["1dba540f-1b7b-4e00-8652-7c874a3c18e7"] 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/e9ac8920-ee84-40aa-bd67-f3d04babc3db/expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": [ 3 | { 4 | "errorMessage": "Test timeout of 120000ms exceeded.", 5 | "checkResults": [ 6 | "e13cd594-6c08-4098-8f66-96f374c82b34", 7 | "5a62ba25-efab-469b-81d4-a47fe0db31ff", 8 | "17b1e8f6-fe18-4332-9aca-ee430b20e76e", 9 | "d0a9cafe-8b48-416f-b6db-463b995763c5", 10 | "1758ad06-6943-4998-9b4e-feb0ae2d21bd", 11 | "b16dafdd-84fa-4b06-8cc7-aeaa3e2ae516", 12 | "ab123006-d114-4ce0-a0f6-64a286e34f17", 13 | "dd176d51-54c6-49b1-9901-0bbae70fdfd3", 14 | "eb8c8385-2916-4836-8a1e-f8fdab5796c3", 15 | "f0b8efe2-835e-40ef-8059-8ec5744c1a77", 16 | "cb4186ca-cbfd-447e-ae04-86ed8829dfda", 17 | "5f109aca-e735-4679-8766-fa66be1c1b2d", 18 | "2cb00c30-b0d7-459f-8bce-03ce1a206f0e", 19 | "9ea1e360-bc58-4f14-b282-1d68e2d4fb67", 20 | "a9ace8d5-e939-467b-b832-60d9275a734d", 21 | "a6e7cd40-82bd-4ce9-a1fe-ce4b64723dd9", 22 | "524d4600-21b2-4fd1-b454-d82f496124d6", 23 | "d19c0afc-acd5-46d1-aa97-6f5b7715180e", 24 | "3d572b69-1fc0-4f91-9dfa-6a49b760f38f", 25 | "e1c97d3f-1019-4639-ac33-000c661d4b8a", 26 | "23cd993a-2cfd-4fa0-9754-7356137771dd", 27 | "95485458-f12b-461a-b14f-dc8fac13f8bb", 28 | "3f4c9e57-0ab4-4e20-a9de-77d7a94ded7f", 29 | "7d319217-e892-46af-a21d-53a3ff256111", 30 | "0a73047e-a233-4639-9d2d-eecc3acc0f78", 31 | "abae38a0-287b-4f40-8ff6-19df62dddbe3" 32 | ] 33 | }, 34 | { 35 | "errorMessage": "Error: Timed out 30000ms waiting for expect(locator).toBeVisible()\n\nLocator: getByRole('button', { name: /switch accounts/i })\nExpected: visible\nReceived: hidden\nCall log:\n - expect.toBeVisible with timeout 30000ms\n - waiting for getByRole('button', { name: /switch accounts/i })", 36 | "checkResults": [ 37 | "57e8031b-3043-4ddc-916d-b4e4d51a6a0e", 38 | "05fbe7c2-474a-417b-9498-bf222338d46d", 39 | "2c54f163-9898-465c-8254-66e0ef4baa9d" 40 | ] 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /src/prompts/checkly.eval.spec.fixtures/should find similar errors for check/generate-data.ts: -------------------------------------------------------------------------------- 1 | import { ChecklyClient } from "../../../checkly/checklyclient"; 2 | import fs from "fs"; 3 | 4 | const groupId = 394650; 5 | 6 | async function main() { 7 | const client = new ChecklyClient(); 8 | let checks = await client.getChecksByGroup(groupId); 9 | 10 | const intervalStart = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); 11 | const intervalEnd = new Date(); 12 | 13 | // Create base directory for this group 14 | const baseDir = `results/groups/${groupId}`; 15 | if (!fs.existsSync(baseDir)) { 16 | fs.mkdirSync(baseDir, { recursive: true }); 17 | } else { 18 | fs.rmSync(baseDir, { recursive: true, force: true }); 19 | fs.mkdirSync(baseDir, { recursive: true }); 20 | } 21 | 22 | const filteredChecks = checks; 23 | for (const check of filteredChecks) { 24 | const checkDir = `${baseDir}/checks/${check.id}`; 25 | fs.mkdirSync(checkDir, { recursive: true }); 26 | 27 | fs.writeFileSync(`${checkDir}/check.json`, JSON.stringify(check, null, 2)); 28 | 29 | // Get failed results from last 30 days 30 | const resultSummary = new Array(); 31 | const results = await client.getCheckResultsByCheckId(check.id, { 32 | hasFailures: true, 33 | resultType: "ALL", 34 | from: new Date(intervalStart), 35 | to: new Date(intervalEnd), 36 | limit: 100, 37 | }); 38 | console.log("RESULTS", results.length); 39 | if (results.length == 0) { 40 | console.log("No results found for check", check.id); 41 | continue; 42 | } 43 | 44 | // Create results directory for this check 45 | const resultsDir = `${checkDir}/results`; 46 | fs.mkdirSync(resultsDir, { recursive: true }); 47 | 48 | // Store each result in a separate file 49 | for (const result of results) { 50 | const resultPath = `${checkDir}/results/${result.id}.json`; 51 | fs.writeFileSync(resultPath, JSON.stringify(result, null, 2)); 52 | resultSummary.push({ 53 | id: result.id, 54 | sequenceId: result.sequenceId, 55 | resultType: result.resultType, 56 | startedAt: result.startedAt, 57 | location: result.runLocation, 58 | attempts: result.attempts, 59 | error: result.browserCheckResult?.errors[0], 60 | }); 61 | } 62 | 63 | fs.writeFileSync( 64 | `${checkDir}/result-summary.json`, 65 | JSON.stringify( 66 | { 67 | check: check.name, 68 | intervalStart, 69 | intervalEnd, 70 | frequency: check.frequency, 71 | locations: check.locations, 72 | results: resultSummary, 73 | }, 74 | null, 75 | 2, 76 | ), 77 | ); 78 | } 79 | } 80 | 81 | main(); 82 | -------------------------------------------------------------------------------- /src/prompts/checkly.script.ts: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env ts-node 2 | 3 | import * as dataForge from "data-forge"; 4 | import "data-forge-fs"; 5 | 6 | import { findCheckResultsAggregated } from "../db/check-results"; 7 | 8 | import { config } from "dotenv"; 9 | import { resolve } from "path"; 10 | import { last24h } from "./checkly-data"; 11 | import { accountSummary } from "../slackbot/accountSummaryCommandHandler"; 12 | // Load environment variables from .env file 13 | config({ path: resolve(__dirname, "../../.env") }); 14 | 15 | const accountId = process.env.CHECKLY_ACCOUNT_ID!; 16 | console.log("ACCOUNT ID", accountId); 17 | 18 | const checkId = "3fbcec6b-fba0-4ca6-bb2e-b7c64c2f1e9f"; 19 | console.log("CHECK ID", checkId); 20 | 21 | const runLocation = "us-east-1"; 22 | console.log("RUN LOCATION", runLocation); 23 | 24 | const now = new Date(); 25 | const from = new Date( 26 | now.getFullYear(), 27 | now.getMonth(), 28 | now.getDate() - 1, 29 | 0, 30 | 0, 31 | 0, 32 | ); 33 | const to = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 0, 0, 0); 34 | const interval = { from, to }; 35 | console.log("INTERVAL", interval); 36 | 37 | async function main() { 38 | //todo make this a full blown checkly cli 39 | const accountSummaryResult = await accountSummary(accountId, interval); 40 | console.log( 41 | "ACCOUNT SUMMARY RESULT", 42 | JSON.stringify(accountSummaryResult, null, 2), 43 | ); 44 | 45 | // const aggregatedCheckResults = await findCheckResultsAggregated({ 46 | // accountId: accountId, 47 | // checkId: checkId, 48 | // from: interval.from, 49 | // to: interval.to, 50 | // }); 51 | 52 | // const aggregatedCheckResultsInRegion = aggregatedCheckResults.filter( 53 | // (r) => r.runLocation === runLocation, 54 | // ); 55 | 56 | // const result = await summarizeCheckResultsToLabeledCheckStatus( 57 | // aggregatedCheckResultsInRegion, 58 | // ); 59 | 60 | // const serializedResult = result.toArray().map((r) => ({ 61 | // ...r, 62 | // changePoints: JSON.stringify(r.changePoints), 63 | // })); 64 | // console.log( 65 | // "SUMMARIZED CHECK RESULTS", 66 | // JSON.stringify(serializedResult, null, 2), 67 | // ); 68 | 69 | // const { text: summary } = await generateText( 70 | // summarizeMultipleChecksStatus(result.toArray()), 71 | // ); 72 | 73 | // console.log( 74 | // result 75 | // .toArray() 76 | // .map((r) => JSON.stringify(r)) 77 | // .join("\n"), 78 | // ); 79 | // console.log(summary); 80 | } 81 | 82 | main().then(() => { 83 | process.exit(0); 84 | }); 85 | -------------------------------------------------------------------------------- /src/prompts/common.ts: -------------------------------------------------------------------------------- 1 | import { openai } from "@ai-sdk/openai"; 2 | import { CoreMessage, LanguageModel, LanguageModelV1, Message } from "ai"; 3 | import { trace } from "@opentelemetry/api"; 4 | import { z, ZodSchema } from "zod"; 5 | export const model = openai("gpt-4o"); 6 | 7 | export interface PromptConfig { 8 | model: LanguageModel; 9 | temperature?: number; 10 | maxTokens?: number; 11 | system?: string; 12 | experimental_telemetry?: { 13 | isEnabled: boolean; 14 | functionId: string; 15 | }; 16 | } 17 | 18 | export type PromptDefinition< 19 | T extends "array" | "object" | "enum" | "no-schema" = "object", 20 | > = PromptConfig & { 21 | prompt?: string; 22 | messages?: CoreMessage[]; 23 | schema: z.Schema; 24 | output: T; 25 | }; 26 | 27 | export type PromptDefinitionForText = Omit< 28 | PromptDefinition, 29 | "schema" | "output" 30 | >; 31 | 32 | export function promptConfig(id: string, config?: Partial) { 33 | return { 34 | model, 35 | experimental_telemetry: { 36 | isEnabled: true, 37 | functionId: id, 38 | metadata: { 39 | ...langfuseTraceIdFromOtel(), 40 | }, 41 | }, 42 | ...config, 43 | }; 44 | } 45 | 46 | export function definePrompt< 47 | T extends "array" | "object" | "enum" | "no-schema" = "object", 48 | >( 49 | id: string, 50 | prompt: string, 51 | schema: ZodSchema, 52 | config?: Partial & { output?: T }, 53 | ): PromptDefinition & { output: T } { 54 | return { 55 | output: "object" as T, // type assertion here since we know config.output will override if provided 56 | prompt, 57 | schema, 58 | ...promptConfig(id, config), 59 | }; 60 | } 61 | 62 | export function defineMessagesPrompt< 63 | T extends "array" | "object" | "enum" | "no-schema" = "object", 64 | >( 65 | id: string, 66 | messages: CoreMessage[], 67 | schema: ZodSchema, 68 | config?: Partial & { output?: T }, 69 | ): PromptDefinition & { output: T } { 70 | return { 71 | output: "object" as T, // type assertion here since we know config.output will override if provided 72 | messages, 73 | schema, 74 | ...promptConfig(id, config), 75 | }; 76 | } 77 | 78 | function langfuseTraceIdFromOtel() { 79 | const activeSpan = trace.getActiveSpan(); 80 | if (!activeSpan) return null; 81 | 82 | const context = activeSpan.spanContext(); 83 | return { 84 | langfuseTraceId: context.traceId, 85 | langfuseUpdateParent: false, // Do not update the parent trace with execution results 86 | }; 87 | } 88 | -------------------------------------------------------------------------------- /src/prompts/github.eval.spec.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import { createOpenAI, OpenAIProvider } from "@ai-sdk/openai"; 3 | import GitHubAPI from "../github/github"; 4 | import { startLangfuseTelemetrySDK } from "../langfuse"; 5 | import { generateReleaseSummaryPrompt } from "./github"; 6 | import { generateText } from "ai"; 7 | import { expect } from "@jest/globals"; 8 | import { Summary } from "./toScoreMatcher"; 9 | 10 | startLangfuseTelemetrySDK(); 11 | 12 | dotenv.config(); 13 | 14 | const OPENAI_API_KEY = process.env.OPENAI_API_KEY!; 15 | const CHECKLY_GITHUB_TOKEN = process.env.CHECKLY_GITHUB_TOKEN!; 16 | 17 | jest.setTimeout(120000); 18 | 19 | describe("Github Prompt Tests", () => { 20 | let openai: OpenAIProvider; 21 | let github: GitHubAPI; 22 | 23 | beforeAll(() => { 24 | openai = createOpenAI({ apiKey: OPENAI_API_KEY }); 25 | github = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 26 | }); 27 | 28 | it("should summarize releases by prompt", async () => { 29 | const { baseTag, headTag } = { 30 | baseTag: "2025-02-07-15.59.26", 31 | headTag: "2025-02-11-17.04.27", 32 | }; 33 | 34 | const input = { 35 | commits: [ 36 | { 37 | sha: "9f2db4a2cbf4439fe6afd2bfd31cf4a35b1b3047", 38 | author: "Michelle Liebheit", 39 | message: "feat: update vercel onboarding flow [sc-00] (#7852)", 40 | }, 41 | { 42 | sha: "08578b470e285ed55d544b5338f3c7776e0f63aa", 43 | author: "Sergii Bezliudnyi", 44 | message: "feat: reduce response data comparison options (#7854)", 45 | }, 46 | { 47 | sha: "15d6190cd42d68df6b5b207c59d11765067a6b69", 48 | author: "Sergii Bezliudnyi", 49 | message: 50 | "chore: bump deps to fix nodegyp errs (#7853)\n\n* chore: bump deps to fix nodegyp errs\n\n* chore: attempt lockfile update\n\n---------\n\nCo-authored-by: Javier Pérez ", 51 | }, 52 | { 53 | sha: "356749fc14a99286d7f047f1cef00275a8de490e", 54 | author: "Javier Pérez", 55 | message: 56 | 'refactor: drop vueuse "usescripttag" to load featurebase (#7859)', 57 | }, 58 | { 59 | sha: "e9599b6706260cb707916f8a47b78a36730380d8", 60 | author: "Pilar", 61 | message: "fix: remove beta tag [sc-00] (#7860)", 62 | }, 63 | ], 64 | }; 65 | 66 | const { text: summary } = await generateText( 67 | generateReleaseSummaryPrompt(baseTag, headTag, input), 68 | ); 69 | 70 | const expected = ` 71 | feat: update vercel onboarding flow [sc-00] (#7852) 72 | 73 | Commit: 9f2db4a2cbf4439fe6afd2bfd31cf4a35b1b3047 74 | Author: Michelle Liebheit 75 | Summary: Updated the Vercel onboarding flow. 76 | feat: reduce response data comparison options (#7854) 77 | 78 | Commit: 08578b470e285ed55d544b5338f3c7776e0f63aa 79 | Author: Sergii Bezliudnyi 80 | Summary: Reduced the options for response data comparison,. 81 | chore: bump deps to fix nodegyp errs (#7853) 82 | 83 | Commit: 15d6190cd42d68df6b5b207c59d11765067a6b69 84 | Author: Sergii Bezliudnyi 85 | Summary: Bumped dependencies to resolve nodegyp errors. 86 | refactor: drop vueuse "usescripttag" to load featurebase (#7859) 87 | 88 | Commit: 356749fc14a99286d7f047f1cef00275a8de490e 89 | Author: Javier Pérez 90 | Summary: Refactored code to remove the use of vueuse "usescripttag" for loading featurebase. 91 | fix: remove beta tag [sc-00] (#7860)`; 92 | 93 | await expect(summary).toScorePerfect( 94 | Summary({ 95 | expected, 96 | input: JSON.stringify(input), 97 | }), 98 | ); 99 | }); 100 | }); 101 | -------------------------------------------------------------------------------- /src/prompts/search.ts: -------------------------------------------------------------------------------- 1 | import { JsonValue } from "@prisma/client/runtime/library"; 2 | import { PromptConfig, promptConfig } from "./common"; 3 | import { validObjectList, validString } from "./validation"; 4 | 5 | interface SearchContextRowForPrompt { 6 | key: string; 7 | value: JsonValue; 8 | } 9 | 10 | export function searchContextPrompt( 11 | query: string, 12 | contextRows: SearchContextRowForPrompt[], 13 | ): [string, PromptConfig] { 14 | validString.parse(query); 15 | validObjectList.parse(contextRows); 16 | 17 | const config = promptConfig("searchContext", { 18 | maxTokens: 1000, 19 | temperature: 0, 20 | }); 21 | 22 | return [ 23 | `You are an AI assistant tasked with searching through a given context based on a user's query. Your goal is to find and return the most relevant information from the context that relates to the query. 24 | 25 | Here is the context you will be searching through: 26 | 27 | ${contextRows.map((c) => c.key + ": " + JSON.stringify(c.value)).join("\n")} 28 | 29 | 30 | The user's query is: 31 | ${query} 32 | 33 | To complete this task, follow these steps: 34 | 35 | 1. Carefully read and analyze both the context and the query. 36 | 2. Identify key words, phrases, or concepts in the query that you should look for in the context. 37 | 3. Search through the context to find sections that are most relevant to the query. Consider both exact matches and semantically similar information. 38 | 4. Determine the relevance of each potential match by considering: 39 | - How closely it relates to the query 40 | - How completely it answers the query (if applicable) 41 | - The importance of the information in the context of the query 42 | 5. Select the most relevant section(s) of the context. If multiple sections are equally relevant, you may include more than one. 43 | 44 | Remember: 45 | - Stay focused on the query and only return information that is directly relevant. 46 | - Do not add any information that is not present in the given context. 47 | - If the query asks a specific question, prioritize information that directly answers that question. 48 | - Be concise in your explanations, but make sure they clearly justify the relevance of the selected text.`, 49 | config, 50 | ]; 51 | } 52 | -------------------------------------------------------------------------------- /src/prompts/slack.ts: -------------------------------------------------------------------------------- 1 | import { WebhookAlertDto } from "../checkly/alertDTO"; 2 | import { convertSlackTimestamp } from "../slackbot/utils"; 3 | import { definePrompt, PromptDefinition } from "./common"; 4 | import { validObjectList, validObject, validString } from "./validation"; 5 | import { z } from "zod"; 6 | 7 | export const slackFormatInstructions = `Format all output in Slack mrkdwn format. 8 | Generate Slack messages using the following style: *bold*, , _italics_, > quote, \`code\`, \`\`\`code block\`\`\`. 9 | It's important to use the correct syntax for the output to be rendered correctly. 10 | E.g. Important Link: .`; 11 | 12 | export interface SlackMsgForPrompt { 13 | plaintext: string; 14 | ts?: string; 15 | } 16 | 17 | /** 18 | * Formats a Slack message for inclusion in the prompt 19 | */ 20 | function formatSlackMessageForPrompt(msg: SlackMsgForPrompt): string { 21 | return `${convertSlackTimestamp(msg.ts!).toISOString()} Message: ${msg.plaintext}`; 22 | } 23 | 24 | /** 25 | * Schema for channel summary response 26 | */ 27 | const channelSummarySchema = z.object({ 28 | summary: z.string().describe("Concise summary of the channel context"), 29 | relevantLinks: z 30 | .array( 31 | z.object({ 32 | url: z.string(), 33 | title: z.string(), 34 | }), 35 | ) 36 | .describe( 37 | "Links that are relevant to the given question or channel summary", 38 | ), 39 | }); 40 | 41 | /** 42 | * Generates a prompt for analyzing Slack channel context 43 | */ 44 | export function channelSummaryPrompt( 45 | alert: WebhookAlertDto, 46 | messageHistory: SlackMsgForPrompt[], 47 | ): PromptDefinition { 48 | validObject.parse(alert); 49 | validObjectList.parse(messageHistory); 50 | 51 | const prompt = `You are a Slack channel context collector. Your task is to analyze the given message history based on a specific prompt and provide a concise summary of the relevant context. 52 | 53 | What are the recent events, discussions or relevant context related to the following alert? 54 | 55 | ${JSON.stringify({ 56 | title: alert.ALERT_TITLE, 57 | type: alert.ALERT_TYPE, 58 | name: alert.CHECK_NAME, 59 | runLocation: alert.RUN_LOCATION, 60 | responseTime: alert.RESPONSE_TIME, 61 | tags: alert.TAGS, 62 | })} 63 | 64 | Here is the message history from the Slack channel: 65 | 66 | ${messageHistory.map(formatSlackMessageForPrompt).join(" ")} 67 | 68 | 69 | To complete the task, follow these steps: 70 | 1. Carefully read through the entire message history. 71 | 2. Identify the main topics, themes, or discussions that are relevant to the prompt. 72 | 3. Create a concise summary of the channel's content related to the prompt, highlighting the most relevant and important information. 73 | 4. Your summary should NOT be longer than 3-5 sentences.`; 74 | 75 | return definePrompt("channelSummary", prompt, channelSummarySchema, { 76 | temperature: 0, 77 | }); 78 | } 79 | 80 | // Export these types for use in other files that may need them 81 | export type ChannelSummaryResponse = z.infer; 82 | -------------------------------------------------------------------------------- /src/prompts/sre-assistant.ts: -------------------------------------------------------------------------------- 1 | import { slackFormatInstructions } from "./slack"; 2 | import { validString, validStringAllowEmpty } from "./validation"; 3 | 4 | export function generateSREAssistantPrompt( 5 | username: string, 6 | date: string, 7 | alertSummary: string, 8 | ): string { 9 | validString.parse(username); 10 | validString.parse(date); 11 | validStringAllowEmpty.parse(alertSummary); 12 | 13 | return `You are an AI-powered SRE Bot designed to assist in real-time incident management. Your primary goal is to reduce Mean Time To Resolution (MTTR) by automatically aggregating and analyzing contextual data, providing actionable insights, and guiding first responders effectively. 14 | 15 | CONSTITUTION: 16 | 1. Always prioritize accuracy and relevance in your insights and recommendations 17 | 2. Be concise but comprehensive in your explanations. Skip unnecessary details; deliver actionable insights suitable for experienced DevOps engineers. 18 | 3. Focus on providing actionable information that can help reduce MTTR 19 | 4. Load the context to and examine it understand to understand the alert 20 | 5. The user is a experienced devops engineer. Don't overcomplicate it, focus on the context and provide actionable insights. They know what they are doing, don't worry about the details 21 | 6. Proactive Investigations: Automatically gather contextual data about the alert, such as related checks, logs, metrics, and recent system changes (e.g., releases, deployments, or config updates). Look for recent releases or changes within a relevant time window that could explain the failure. 22 | 7. Make active use of the tools (multiple times if needed) to get a holistic view of the situation 23 | 8. Generate super short, concise and insightful messages. Users are experts, skip the fluff, no yapping. 24 | 9. Context-Driven Analysis: prioritise referring to the available context, use tools for searching the context. No hallucinations, only facts. 25 | 10. Refer to the the knowledge base to build a better understanding of the terminology, systems and the organisation you are working for. Assume that the users have good knowledge of the company, and do not proactively provide basic information unless explicitly asked. 26 | 27 | INTERACTION CONTEXT: 28 | Username: ${username} 29 | Date: ${date} 30 | 31 | OUTPUT FORMAT: 32 | ${slackFormatInstructions} 33 | 34 | ${alertSummary.trim().length > 0 ? `SUMMARY:\n${alertSummary}` : ""}`; 35 | } 36 | -------------------------------------------------------------------------------- /src/prompts/summarizeCheckGoals.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { Check } from "../checkly/models"; 3 | import { CheckTable } from "../db/check"; 4 | import { formatMultipleChecks } from "./checkly"; 5 | import { promptConfig, PromptDefinition } from "./common"; 6 | 7 | const ouput_schema = z.object({ 8 | response: z 9 | .array( 10 | z.object({ 11 | header: z.string().describe("The header of the group, max 3 words"), 12 | description: z 13 | .string() 14 | .describe( 15 | "The detailed description of the feature monitored by the group, use up to 150 characters max", 16 | ), 17 | }), 18 | ) 19 | .describe("Groups of checks by their purpose max 5 groups"), 20 | }); 21 | 22 | export type MultipleChecksGoalResponse = z.infer; 23 | 24 | export function summariseMultipleChecksGoal( 25 | checks: Check[] | CheckTable[], 26 | options: { maxTokens: number; extraContext?: string | null } = { 27 | maxTokens: 500, 28 | extraContext: null, 29 | }, 30 | ): PromptDefinition { 31 | const checksFormatted = formatMultipleChecks(checks); 32 | const maxTokens = options.maxTokens; 33 | 34 | let prompt = ` 35 | ### **Task** 36 | You are an expert SRE engineer. Analyze the following monitoring checks and provide a **high-level summary** of their **common goal** for another engineer. 37 | The target engineer is likely in an incident situation and needs to understand the purpose of the checks to resolve the issue. 38 | 39 | ### **Instructions** 40 | 1. Identify what user-facing feature(s) these checks are monitoring. 41 | 2. Do **not** focus on technical details (e.g., URLs, assertions, scripts). 42 | 3. Prioritize accuracy and clarity in your response. 43 | 4. Provide a concise but meaningful summary in **natural language**. 44 | 5. Take into account the url/name of the service that is being monitored. Output it if possible 45 | 6. The obvious goals of the checks is to monitor functionality and reliability of services - do not focus on this, focus on WHAT is monitored. 46 | 7. favor user generated content (name, group name, tags, ADDITIONAL CONTEXT EXPLAINING CHECKLY ACCOUNT SETUP) over other input 47 | ### **Checks Data** 48 | ${checksFormatted} 49 | 50 | ### **Expected Output** 51 | Provide a **brief summary** explaining the **common purpose** of these checks, focusing on the user impact rather than implementation details. 52 | `; 53 | 54 | if (options.extraContext) { 55 | prompt += ` 56 | ADDITIONAL CONTEXT EXPLAINING CHECKLY ACCOUNT SETUP: 57 | ${options.extraContext} 58 | `; 59 | } 60 | 61 | return { 62 | prompt, 63 | ...promptConfig("summariseMultipleChecksGoal", { 64 | temperature: 0.1, 65 | maxTokens: maxTokens, 66 | }), 67 | schema: ouput_schema, 68 | output: "object", 69 | }; 70 | } 71 | -------------------------------------------------------------------------------- /src/prompts/timeframe.ts: -------------------------------------------------------------------------------- 1 | import moment from "moment"; 2 | import { PromptConfig, promptConfig } from "./common"; 3 | import { validString } from "./validation"; 4 | 5 | export function parseTimeframePrompt( 6 | timeframe: string, 7 | ): [string, PromptConfig] { 8 | validString.parse(timeframe); 9 | 10 | return [ 11 | `Parse the following timeframe into a precise timestamp range with and interpretation. 12 | 13 | Input: "${timeframe}" 14 | 15 | Return a JSON object with: 16 | - start: ISO 8601 timestamp for range start 17 | - end: ISO 8601 timestamp for range end 18 | - confidence: number between 0-1 indicating parsing confidence 19 | - interpretation: explanation of how the timeframe was interpreted 20 | 21 | Consider current date: ${moment().format("YYYY-MM-DD")}`, 22 | promptConfig("parseTimeframe"), 23 | ]; 24 | } 25 | -------------------------------------------------------------------------------- /src/prompts/validation.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | 3 | export const validString = z.string().min(1); 4 | export const validStringAllowEmpty = z.string().min(0); 5 | export const validStringList = z.array(validString).min(1); 6 | export const validObject = z 7 | .object({}) 8 | .catchall(z.any()) 9 | .refine((obj) => Object.keys(obj).length > 0, { 10 | message: "Object must have at least one property", 11 | }); 12 | 13 | export const validObjectList = z.array(validObject).min(1); 14 | -------------------------------------------------------------------------------- /src/routes/githubwebhook.spec.ts: -------------------------------------------------------------------------------- 1 | import { PrismaClient } from "@prisma/client"; 2 | import GitHubAPI from "../github/github"; 3 | import { getOpenaiSDKClient } from "../ai/openai"; 4 | import { GithubAgent } from "../github/agent"; 5 | 6 | const prisma = new PrismaClient(); 7 | const CHECKLY_GITHUB_TOKEN = process.env.CHECKLY_GITHUB_TOKEN!; 8 | 9 | const github = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 10 | 11 | let setupAgent = () => { 12 | let openai = getOpenaiSDKClient(); 13 | 14 | return new GithubAgent(openai("gpt-4o"), github); 15 | }; 16 | 17 | const githubAgent = setupAgent(); 18 | 19 | describe("Load github releases into db", () => { 20 | it.skip("should add releases to the db", async () => { 21 | const org = "checkly"; 22 | const repo = "checkly-backend"; 23 | const timeframe = "1 days"; 24 | 25 | let summary = await githubAgent.summarizeReleases( 26 | `what has changed in the ${repo} within the last ${timeframe}`, 27 | org, 28 | ); 29 | 30 | for (const release of summary.releases) { 31 | const authors = release.authors 32 | .filter((author) => author !== null) 33 | .map((author) => author.login); 34 | 35 | await prisma.release.create({ 36 | data: { 37 | name: release.id, 38 | releaseUrl: release.link, 39 | publishedAt: release.release_date, 40 | org: org, 41 | repo: summary.repo.name, 42 | repoUrl: summary.repo.link, 43 | tag: release.id, 44 | diffUrl: release.diffLink, 45 | authors, 46 | summary: release.summary, 47 | }, 48 | }); 49 | } 50 | }, 600000000); 51 | }); 52 | -------------------------------------------------------------------------------- /src/slack/slack.spec.ts: -------------------------------------------------------------------------------- 1 | import { SlackClient } from "./slack"; 2 | 3 | describe("Slack Web Api Tests", () => { 4 | let slack: SlackClient; 5 | 6 | beforeAll(() => { 7 | slack = new SlackClient(process.env.SLACK_AUTH_TOKEN || ""); 8 | }); 9 | 10 | it.skip("should summarize releases by prompt", async () => { 11 | const messages = await slack.fetchHistoricalMessages("CUZ7V5YKZ"); 12 | console.log(JSON.stringify(messages, null, 2)); 13 | 14 | expect(messages).toBeDefined(); 15 | expect(messages.length).toBeGreaterThan(0); 16 | expect(messages[0]).toHaveProperty("plaintext"); 17 | expect(messages[0]).toHaveProperty("username"); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /src/slackbot/blocks/__snapshots__/checkBlock.spec.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`checkBlock should create a message block for a check summary 1`] = ` 4 | [ 5 | { 6 | "text": { 7 | "emoji": true, 8 | "text": "Test Check - Last 24 hours", 9 | "type": "plain_text", 10 | }, 11 | "type": "header", 12 | }, 13 | { 14 | "type": "divider", 15 | }, 16 | { 17 | "fields": [ 18 | { 19 | "text": "*Type* 20 | Browser Check", 21 | "type": "mrkdwn", 22 | }, 23 | { 24 | "text": "*Frequency* 25 | every *10* minutes", 26 | "type": "mrkdwn", 27 | }, 28 | ], 29 | "type": "section", 30 | }, 31 | { 32 | "fields": [ 33 | { 34 | "text": "*Locations* 35 | \`us-east-1\`, \`eu-west-1\`", 36 | "type": "mrkdwn", 37 | }, 38 | { 39 | "text": "*Failure Rate* 40 | 2100% (42 / 2)", 41 | "type": "mrkdwn", 42 | }, 43 | ], 44 | "type": "section", 45 | }, 46 | { 47 | "text": { 48 | "emoji": true, 49 | "text": "Detected Error Patterns", 50 | "type": "plain_text", 51 | }, 52 | "type": "header", 53 | }, 54 | { 55 | "type": "divider", 56 | }, 57 | { 58 | "text": { 59 | "text": "*Pattern:* \`Test error message\`", 60 | "type": "mrkdwn", 61 | }, 62 | "type": "section", 63 | }, 64 | { 65 | "fields": [ 66 | { 67 | "text": "*Count* 68 | *2* failures", 69 | "type": "mrkdwn", 70 | }, 71 | { 72 | "text": "*Affected Locations* 73 | \`us-east-1\`, \`eu-west-1\`", 74 | "type": "mrkdwn", 75 | }, 76 | ], 77 | "type": "section", 78 | }, 79 | ] 80 | `; 81 | -------------------------------------------------------------------------------- /src/slackbot/blocks/__snapshots__/checkResultBlock.spec.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`checkResultBlock should create a message block for a check result 1`] = ` 4 | [ 5 | { 6 | "text": { 7 | "emoji": true, 8 | "text": "Check Result Details", 9 | "type": "plain_text", 10 | }, 11 | "type": "header", 12 | }, 13 | { 14 | "fields": [ 15 | { 16 | "text": "*Check Name:* 17 | ", 18 | "type": "mrkdwn", 19 | }, 20 | { 21 | "text": "*Timestamp:* 22 | 2024-01-01T00:00:00.000Z", 23 | "type": "mrkdwn", 24 | }, 25 | { 26 | "text": "*Location:* 27 | \`us-east-1\`", 28 | "type": "mrkdwn", 29 | }, 30 | { 31 | "text": "*Check Result:* 32 | ", 33 | "type": "mrkdwn", 34 | }, 35 | ], 36 | "type": "section", 37 | }, 38 | { 39 | "text": { 40 | "text": "*Error Message:* 41 | \`Test error message\` 42 | 43 | *Similar Failures:* 44 | This error occurred \`1 times\` across locations: \`us-east-1\`, \`eu-west-1\`. We saw \`1 Error Groups\` in total with \`1 failures\` in the same timeframe.", 45 | "type": "mrkdwn", 46 | }, 47 | "type": "section", 48 | }, 49 | ] 50 | `; 51 | -------------------------------------------------------------------------------- /src/slackbot/blocks/__snapshots__/errorPatternBlock.spec.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`errorPatternBlock renders empty error patterns 1`] = ` 4 | { 5 | "blocks": [ 6 | { 7 | "text": { 8 | "emoji": true, 9 | "text": "Error Pattern - Top 20", 10 | "type": "plain_text", 11 | }, 12 | "type": "header", 13 | }, 14 | { 15 | "type": "divider", 16 | }, 17 | { 18 | "text": { 19 | "text": "No error Pattern found", 20 | "type": "plain_text", 21 | }, 22 | "type": "section", 23 | }, 24 | ], 25 | } 26 | `; 27 | 28 | exports[`errorPatternBlock renders multiple error patterns 1`] = ` 29 | { 30 | "blocks": [ 31 | { 32 | "text": { 33 | "emoji": true, 34 | "text": "Error Pattern - Top 20", 35 | "type": "plain_text", 36 | }, 37 | "type": "header", 38 | }, 39 | { 40 | "type": "divider", 41 | }, 42 | { 43 | "text": { 44 | "text": "*(Count) Summary* 45 | *Pattern Details*", 46 | "type": "mrkdwn", 47 | }, 48 | "type": "section", 49 | }, 50 | { 51 | "text": { 52 | "text": "*(10) Error Pattern #1* 53 | _Last seen: | First seen: _ 54 | \`\`\`Error Pattern #1 55 | Details of error pattern #1\`\`\`", 56 | "type": "mrkdwn", 57 | }, 58 | "type": "section", 59 | }, 60 | { 61 | "type": "divider", 62 | }, 63 | { 64 | "text": { 65 | "text": "*(20) Error Pattern #2* 66 | _Last seen: | First seen: _ 67 | \`\`\`Error Pattern #2 68 | Details of error pattern #2\`\`\`", 69 | "type": "mrkdwn", 70 | }, 71 | "type": "section", 72 | }, 73 | { 74 | "type": "divider", 75 | }, 76 | ], 77 | } 78 | `; 79 | 80 | exports[`errorPatternBlock renders single error pattern 1`] = ` 81 | { 82 | "blocks": [ 83 | { 84 | "text": { 85 | "emoji": true, 86 | "text": "Error Pattern - Top 20", 87 | "type": "plain_text", 88 | }, 89 | "type": "header", 90 | }, 91 | { 92 | "type": "divider", 93 | }, 94 | { 95 | "text": { 96 | "text": "*(Count) Summary* 97 | *Pattern Details*", 98 | "type": "mrkdwn", 99 | }, 100 | "type": "section", 101 | }, 102 | { 103 | "text": { 104 | "text": "*(10) Error Pattern #1* 105 | _Last seen: | First seen: _ 106 | \`\`\`Error Pattern #1 107 | Details of error pattern #1\`\`\`", 108 | "type": "mrkdwn", 109 | }, 110 | "type": "section", 111 | }, 112 | { 113 | "type": "divider", 114 | }, 115 | ], 116 | } 117 | `; 118 | -------------------------------------------------------------------------------- /src/slackbot/blocks/accountSummaryBlock.script.ts: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env ts-node 2 | import { WebClient } from "@slack/web-api"; 3 | import { createAccountSummaryBlock } from "./accountSummaryBlock"; 4 | 5 | async function main() { 6 | const testCases = [ 7 | // All good 8 | createAccountSummaryBlock({ 9 | accountName: "Healthy", 10 | passingChecks: 50, 11 | degradedChecks: 0, 12 | failingChecks: 0, 13 | passingChecksDelta: 1, 14 | degradedChecksDelta: 0, 15 | failingChecksDelta: -1, 16 | hasIssues: false, 17 | issuesSummary: "No issues detected in the last 24h.", 18 | failingChecksGoals: { response: [] }, 19 | failingCheckIds: [], 20 | errorPatterns: [], 21 | }), 22 | // Some degraded 23 | createAccountSummaryBlock({ 24 | accountName: "Degraded", 25 | passingChecks: 45, 26 | degradedChecks: 5, 27 | failingChecks: 0, 28 | passingChecksDelta: 100, 29 | degradedChecksDelta: -100, 30 | failingChecksDelta: 0, 31 | hasIssues: true, 32 | issuesSummary: 33 | "New degrading or failing checks detected in the last 24h.", 34 | failingChecksGoals: { response: [] }, 35 | failingCheckIds: [], 36 | errorPatterns: [ 37 | { 38 | id: "123", 39 | description: "Error Pattern #1", 40 | count: 10, 41 | firstSeen: new Date("2025-03-24T00:00:00Z"), 42 | }, 43 | { 44 | id: "124", 45 | description: "Error Pattern #2", 46 | count: 99, 47 | firstSeen: new Date("2025-03-24T00:00:00Z"), 48 | }, 49 | { 50 | id: "125", 51 | description: "Error Pattern #3", 52 | count: 10, 53 | firstSeen: new Date("2025-03-24T00:00:00Z"), 54 | }, 55 | ], 56 | }), 57 | // Failing checks without error patterns 58 | createAccountSummaryBlock({ 59 | accountName: "FailingNoErrors", 60 | passingChecks: 42, 61 | degradedChecks: 3, 62 | failingChecks: 5, 63 | passingChecksDelta: 0, 64 | degradedChecksDelta: 0, 65 | failingChecksDelta: 0, 66 | hasIssues: true, 67 | issuesSummary: 68 | "New degrading or failing checks detected in the last 24h.", 69 | failingChecksGoals: { 70 | response: [ 71 | { 72 | header: "main feature", 73 | description: "The main feature is broken.", 74 | }, 75 | ], 76 | }, 77 | failingCheckIds: ["126", "127", "128", "129", "130"], 78 | errorPatterns: [], 79 | }), 80 | // Some failing 81 | createAccountSummaryBlock({ 82 | accountName: "Failing", 83 | passingChecks: 40, 84 | degradedChecks: 2, 85 | failingChecks: 8, 86 | passingChecksDelta: -5, 87 | degradedChecksDelta: 0, 88 | failingChecksDelta: +5, 89 | hasIssues: true, 90 | issuesSummary: 91 | "New degrading or failing checks detected in the last 24h.", 92 | failingChecksGoals: { response: [] }, 93 | failingCheckIds: ["123", "124", "125"], 94 | errorPatterns: [ 95 | { 96 | id: "123", 97 | description: "Error Pattern #1", 98 | count: 10, 99 | firstSeen: new Date("2025-03-24T00:00:00Z"), 100 | }, 101 | ], 102 | }), 103 | ]; 104 | 105 | const client = new WebClient(process.env.SLACK_AUTH_TOKEN); 106 | 107 | for (const blocks of testCases) { 108 | await client.chat.postMessage({ 109 | channel: process.env.SLACK_BOT_CHANNEL_ID!, 110 | blocks: blocks.blocks, 111 | }); 112 | } 113 | } 114 | 115 | main().catch(console.error); 116 | -------------------------------------------------------------------------------- /src/slackbot/blocks/accountSummaryBlock.spec.ts: -------------------------------------------------------------------------------- 1 | import { createAccountSummaryBlock } from "./accountSummaryBlock"; 2 | 3 | describe("accountSummaryBlock", () => { 4 | it("renders healthy account state", () => { 5 | const blocks = createAccountSummaryBlock({ 6 | accountName: "Healthy", 7 | passingChecks: 50, 8 | degradedChecks: 0, 9 | failingChecks: 0, 10 | passingChecksDelta: 0, 11 | degradedChecksDelta: 0, 12 | failingChecksDelta: 0, 13 | hasIssues: false, 14 | issuesSummary: "No issues detected in the last 24h.", 15 | failingChecksGoals: { response: [] }, 16 | failingCheckIds: [], 17 | errorPatterns: [], 18 | }); 19 | 20 | expect(blocks).toMatchSnapshot(); 21 | }); 22 | 23 | it("renders degraded account state", () => { 24 | const blocks = createAccountSummaryBlock({ 25 | accountName: "Degraded", 26 | passingChecks: 45, 27 | degradedChecks: 5, 28 | failingChecks: 0, 29 | passingChecksDelta: 0, 30 | degradedChecksDelta: 0, 31 | failingChecksDelta: 0, 32 | hasIssues: true, 33 | issuesSummary: 34 | "New degrading or failing checks detected in the last 24h.", 35 | failingChecksGoals: { response: [] }, 36 | failingCheckIds: ["123", "124"], 37 | errorPatterns: [ 38 | { 39 | id: "123", 40 | description: "Error Pattern #1", 41 | count: 100, 42 | firstSeen: new Date("2025-03-24T00:00:00Z"), 43 | }, 44 | { 45 | id: "124", 46 | description: "Error Pattern #2", 47 | count: 100, 48 | firstSeen: new Date("2025-03-24T00:00:00Z"), 49 | }, 50 | ], 51 | }); 52 | 53 | expect(blocks).toMatchSnapshot(); 54 | }); 55 | 56 | it("renders failing account state", () => { 57 | const blocks = createAccountSummaryBlock({ 58 | accountName: "Failing", 59 | passingChecks: 40, 60 | passingChecksDelta: 0, 61 | degradedChecks: 2, 62 | degradedChecksDelta: 0, 63 | failingChecks: 8, 64 | failingChecksDelta: 0, 65 | hasIssues: true, 66 | issuesSummary: 67 | "New degrading or failing checks detected in the last 24h.", 68 | failingChecksGoals: { response: [] }, 69 | failingCheckIds: ["123", "124", "125"], 70 | errorPatterns: [ 71 | { 72 | id: "123", 73 | description: "Error Pattern #1", 74 | count: 10, 75 | firstSeen: new Date("2025-03-24T00:00:00Z"), 76 | }, 77 | ], 78 | }); 79 | 80 | expect(blocks).toMatchSnapshot(); 81 | }); 82 | }); 83 | -------------------------------------------------------------------------------- /src/slackbot/blocks/checkBlock.spec.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, it } from "@jest/globals"; 2 | import { createCheckBlock } from "./checkBlock"; 3 | import { Check, CheckResult } from "../../checkly/models"; 4 | 5 | describe("checkBlock", () => { 6 | it("should create a message block for a check summary", () => { 7 | const mockCheck: Check = { 8 | id: "test-check-id", 9 | name: "Test Check", 10 | checkType: "BROWSER", 11 | frequency: 10, 12 | locations: ["us-east-1", "eu-west-1"], 13 | } as Check; 14 | 15 | const mockCheckResults: CheckResult[] = [ 16 | { 17 | id: "test-result-id-1", 18 | runLocation: "us-east-1", 19 | } as CheckResult, 20 | { 21 | id: "test-result-id-2", 22 | runLocation: "eu-west-1", 23 | } as CheckResult, 24 | ]; 25 | 26 | const mockErrorGroups = [ 27 | { 28 | error_message: "Test error message", 29 | error_count: 2, 30 | locations: ["us-east-1", "eu-west-1"], 31 | checkResults: ["test-result-id-1", "test-result-id-2"], 32 | }, 33 | ]; 34 | 35 | const result = createCheckBlock({ 36 | check: mockCheck, 37 | failureCount: 42, 38 | errorGroups: mockErrorGroups, 39 | checkResults: mockCheckResults, 40 | frequency: 10, 41 | locations: ["us-east-1", "eu-west-1"], 42 | }); 43 | 44 | expect(result.blocks).toMatchSnapshot(); 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /src/slackbot/blocks/checkBlock.ts: -------------------------------------------------------------------------------- 1 | import { Check, CheckResult } from "../../checkly/models"; 2 | import { CheckTable } from "../../db/check"; 3 | import { CheckResultTable } from "../../db/check-results"; 4 | import { ErrorClusterTable } from "../../db/error-cluster"; 5 | import { SummarizeErrorsPromptType } from "../../prompts/checkly"; 6 | 7 | export interface CheckBlockProps { 8 | check: Check | CheckTable; 9 | failureCount: number; 10 | errorGroups?: { 11 | error_message: string; 12 | error_count: number; 13 | locations: string[]; 14 | }[]; 15 | checkResults: CheckResult[] | CheckResultTable[]; 16 | frequency: number; 17 | locations: string[]; 18 | } 19 | 20 | export function createCheckBlock({ 21 | check, 22 | failureCount, 23 | errorGroups = [], 24 | checkResults, 25 | frequency, 26 | locations, 27 | }: CheckBlockProps) { 28 | return { 29 | text: `*Check Details*`, 30 | blocks: [ 31 | { 32 | type: "header", 33 | text: { 34 | type: "plain_text", 35 | text: `${check.name} - Last 24 hours`, 36 | emoji: true, 37 | }, 38 | }, 39 | { 40 | type: "divider", 41 | }, 42 | { 43 | type: "section", 44 | fields: [ 45 | { 46 | type: "mrkdwn", 47 | text: `*Type*\n${ 48 | { 49 | BROWSER: "Browser Check", 50 | API: "API Check", 51 | MULTI_STEP: "Multi-Step Check", 52 | }[check.checkType] 53 | }`, 54 | }, 55 | { 56 | type: "mrkdwn", 57 | text: `*Frequency*\nevery *${frequency}* minute${frequency || -1 > 1 ? "s" : ""}`, 58 | }, 59 | ], 60 | }, 61 | { 62 | type: "section", 63 | fields: [ 64 | { 65 | type: "mrkdwn", 66 | text: `*Locations*\n\`${locations.join("\`, \`")}\``, 67 | }, 68 | { 69 | type: "mrkdwn", 70 | text: `*Failure Rate*\n${((failureCount / checkResults.length) * 100).toFixed(2).replace(/\.00$/, "")}% (${failureCount} / ${checkResults.length})`, 71 | }, 72 | ], 73 | }, 74 | ...(errorGroups.length === 0 75 | ? [ 76 | { 77 | type: "section", 78 | text: { 79 | type: "mrkdwn", 80 | text: `*No errors happened in the last 24 hours*`, 81 | }, 82 | }, 83 | ] 84 | : [ 85 | { 86 | type: "header", 87 | text: { 88 | type: "plain_text", 89 | text: `${errorGroups.length > 0 ? "Detected" : "No"} Error Patterns`, 90 | emoji: true, 91 | }, 92 | }, 93 | ]), 94 | ...errorGroups.flatMap((group) => [ 95 | { 96 | type: "divider", 97 | }, 98 | { 99 | type: "section", 100 | text: { 101 | type: "mrkdwn", 102 | text: `*Pattern:* \`${group.error_message}\``, 103 | }, 104 | }, 105 | { 106 | type: "section", 107 | fields: [ 108 | { 109 | type: "mrkdwn", 110 | text: `*Count*\n*${group.error_count}* failure${group.error_count > 1 ? "s" : ""}`, 111 | }, 112 | { 113 | type: "mrkdwn", 114 | text: `*Affected Locations*\n\`${group.locations.join("\`, \`")}\``, 115 | }, 116 | ], 117 | }, 118 | ]), 119 | ], 120 | }; 121 | } 122 | -------------------------------------------------------------------------------- /src/slackbot/blocks/checkResultBlock.spec.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, it } from "@jest/globals"; 2 | import { createCheckResultBlock } from "./checkResultBlock"; 3 | import { ChecklyClient } from "../../checkly/checklyclient"; 4 | import { Check, CheckResult } from "../../checkly/models"; 5 | 6 | describe("checkResultBlock", () => { 7 | it("should create a message block for a check result", () => { 8 | const mockCheck: Check = { 9 | id: "test-check-id", 10 | name: "Test Check", 11 | locations: ["us-east-1", "eu-west-1"], 12 | } as Check; 13 | 14 | const mockCheckResult: CheckResult = { 15 | id: "test-result-id", 16 | runLocation: "us-east-1", 17 | } as CheckResult; 18 | 19 | const mockErrorGroups = { 20 | groups: [ 21 | { 22 | errorMessage: "Test error message", 23 | checkResults: ["test-result-id"], 24 | }, 25 | ], 26 | }; 27 | 28 | const result = createCheckResultBlock({ 29 | check: mockCheck, 30 | checkAppUrl: "https://app.checklyhq.com/checks/test-check-id", 31 | checkResult: mockCheckResult, 32 | checkResultAppUrl: 33 | "https://app.checklyhq.com/checks/test-check-id/results/test-result-id", 34 | errorGroups: mockErrorGroups, 35 | failingCheckResults: [mockCheckResult], 36 | intervalStart: new Date("2024-01-01T00:00:00.000Z"), 37 | }); 38 | 39 | expect(result.blocks).toMatchSnapshot(); 40 | }); 41 | }); 42 | -------------------------------------------------------------------------------- /src/slackbot/blocks/checkResultBlock.ts: -------------------------------------------------------------------------------- 1 | import { Check, CheckResult } from "../../checkly/models"; 2 | import { SummarizeErrorsPromptType } from "../../prompts/checkly"; 3 | 4 | export interface CheckResultBlockProps { 5 | check: Check; 6 | checkAppUrl: string; 7 | checkResult: CheckResult; 8 | checkResultAppUrl: string; 9 | errorGroups: SummarizeErrorsPromptType; 10 | failingCheckResults: CheckResult[]; 11 | intervalStart: Date; 12 | } 13 | 14 | export function createCheckResultBlock({ 15 | check, 16 | checkAppUrl, 17 | checkResult, 18 | checkResultAppUrl, 19 | errorGroups, 20 | failingCheckResults, 21 | intervalStart, 22 | }: CheckResultBlockProps) { 23 | const errorGroup = errorGroups.groups.find( 24 | (g) => g.checkResults.indexOf(checkResult.id) > -1, 25 | ); 26 | 27 | return { 28 | text: `*Check Result Details*`, 29 | blocks: [ 30 | { 31 | type: "header", 32 | text: { 33 | type: "plain_text", 34 | text: "Check Result Details", 35 | emoji: true, 36 | }, 37 | }, 38 | { 39 | type: "section", 40 | fields: [ 41 | { 42 | type: "mrkdwn", 43 | text: `*Check Name:*\n<${checkAppUrl}|${check.name}>`, 44 | }, 45 | { 46 | type: "mrkdwn", 47 | text: `*Timestamp:*\n${intervalStart.toISOString()}`, 48 | }, 49 | { 50 | type: "mrkdwn", 51 | text: `*Location:*\n\`${checkResult.runLocation}\``, 52 | }, 53 | { 54 | type: "mrkdwn", 55 | text: `*Check Result:*\n<${checkResultAppUrl}|Link>`, 56 | }, 57 | ], 58 | }, 59 | { 60 | type: "section", 61 | text: { 62 | type: "mrkdwn", 63 | text: `*Error Message:*\n\`${errorGroup?.errorMessage}\`\n\n*Similar Failures:*\nThis error occurred \`${errorGroup?.checkResults.length} times\` across locations: \`${check.locations.join("\`, \`")}\`. We saw \`${errorGroups.groups.length} Error Groups\` in total with \`${failingCheckResults.length} failures\` in the same timeframe.`, 64 | }, 65 | }, 66 | ], 67 | }; 68 | } 69 | -------------------------------------------------------------------------------- /src/slackbot/blocks/errorPatternBlock.script.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ts-node 2 | 3 | import { WebClient } from "@slack/web-api"; 4 | import { createErrorPatternsBlock } from "./errorPatternBlock"; 5 | 6 | async function main() { 7 | console.log("Sending empty error patterns..."); 8 | const messages = [ 9 | createErrorPatternsBlock([]), 10 | createErrorPatternsBlock([ 11 | { 12 | id: "123", 13 | error_message: "Error Pattern #1\nDetails of error pattern #1", 14 | count: 10, 15 | first_seen_at: new Date(), 16 | last_seen_at: new Date(), 17 | embedding: [], 18 | embedding_model: "model", 19 | account_id: "account1", 20 | }, 21 | ]), 22 | createErrorPatternsBlock([ 23 | { 24 | id: "123", 25 | error_message: "Error Pattern #1\nDetails of error pattern #1", 26 | count: 10, 27 | first_seen_at: new Date(), 28 | last_seen_at: new Date(), 29 | embedding: [], 30 | embedding_model: "model", 31 | account_id: "account1", 32 | }, 33 | { 34 | id: "124", 35 | error_message: "Error Pattern #2\nDetails of error pattern #2", 36 | count: 20, 37 | first_seen_at: new Date(), 38 | last_seen_at: new Date(), 39 | embedding: [], 40 | embedding_model: "model", 41 | account_id: "account2", 42 | }, 43 | ]), 44 | ]; 45 | 46 | const client = new WebClient(process.env.SLACK_AUTH_TOKEN); 47 | 48 | for (const blocks of messages) { 49 | await client.chat.postMessage({ 50 | channel: process.env.SLACK_BOT_CHANNEL_ID!, 51 | blocks: blocks.blocks, 52 | }); 53 | } 54 | } 55 | 56 | main(); 57 | -------------------------------------------------------------------------------- /src/slackbot/blocks/errorPatternBlock.spec.ts: -------------------------------------------------------------------------------- 1 | import { createErrorPatternsBlock } from "./errorPatternBlock"; 2 | 3 | describe("errorPatternBlock", () => { 4 | it("renders empty error patterns", () => { 5 | const blocks = createErrorPatternsBlock([]); 6 | expect(blocks).toMatchSnapshot(); 7 | }); 8 | 9 | it("renders single error pattern", () => { 10 | const blocks = createErrorPatternsBlock([ 11 | { 12 | id: "123", 13 | error_message: "Error Pattern #1\nDetails of error pattern #1", 14 | count: 10, 15 | first_seen_at: new Date(), 16 | last_seen_at: new Date(), 17 | embedding: [], 18 | embedding_model: "model", 19 | account_id: "account1", 20 | }, 21 | ]); 22 | expect(blocks).toMatchSnapshot(); 23 | }); 24 | 25 | it("renders multiple error patterns", () => { 26 | const blocks = createErrorPatternsBlock([ 27 | { 28 | id: "123", 29 | error_message: "Error Pattern #1\nDetails of error pattern #1", 30 | count: 10, 31 | first_seen_at: new Date(), 32 | last_seen_at: new Date(), 33 | embedding: [], 34 | embedding_model: "model", 35 | account_id: "account1", 36 | }, 37 | { 38 | id: "124", 39 | error_message: "Error Pattern #2\nDetails of error pattern #2", 40 | count: 20, 41 | first_seen_at: new Date(), 42 | last_seen_at: new Date(), 43 | embedding: [], 44 | embedding_model: "model", 45 | account_id: "account2", 46 | }, 47 | ]); 48 | expect(blocks).toMatchSnapshot(); 49 | }); 50 | }); 51 | -------------------------------------------------------------------------------- /src/slackbot/blocks/errorPatternBlock.ts: -------------------------------------------------------------------------------- 1 | import { ErrorClusterWithCount } from "../../db/error-cluster"; 2 | 3 | export const LIST_ERROR_PATTERNS_ACTION_ID = "list_error_patterns"; 4 | 5 | export function createErrorPatternsBlock( 6 | errorPatterns: ErrorClusterWithCount[], 7 | ) { 8 | return { 9 | text: "Error Patterns - Top 20", 10 | blocks: [ 11 | { 12 | text: { 13 | emoji: true, 14 | text: "Error Patterns - Top 20", 15 | type: "plain_text", 16 | }, 17 | type: "header", 18 | }, 19 | { 20 | type: "divider", 21 | }, 22 | ...(errorPatterns.length > 0 23 | ? [ 24 | { 25 | text: { 26 | text: "*(Count) Summary*\n*Pattern Details*", 27 | type: "mrkdwn", 28 | }, 29 | type: "section", 30 | }, 31 | ...errorPatterns.slice(0, 20).flatMap((errorPattern) => [ 32 | { 33 | text: { 34 | text: `*(${errorPattern.count}) ${errorPattern.error_message.split("\n")[0]}* 35 | _Last seen: | First seen: _ 36 | \`\`\`${errorPattern.error_message.replaceAll('"', "")}\`\`\``, 37 | type: "mrkdwn", 38 | }, 39 | type: "section", 40 | }, 41 | { 42 | type: "divider", 43 | }, 44 | ]), 45 | ] 46 | : [ 47 | { 48 | text: { 49 | text: "No error Pattern found", 50 | type: "plain_text", 51 | }, 52 | type: "section", 53 | }, 54 | ]), 55 | ], 56 | }; 57 | } 58 | -------------------------------------------------------------------------------- /src/slackbot/blocks/failingChecksBlock.script.ts: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env ts-node 2 | 3 | import { WebClient } from "@slack/web-api"; 4 | import { Check, renderFailingChecksBlock } from "./failingChecksBlock"; 5 | 6 | const channelId = process.env.SLACK_BOT_CHANNEL_ID!; 7 | 8 | async function sendFailingChecksMessage() { 9 | const checksListSamples = [ 10 | [ 11 | { 12 | checkId: "123", 13 | checkState: "FAILED", 14 | name: "Check 1", 15 | failures: { total: 1, timeframe: "24h" }, 16 | lastFailure: { 17 | checkResultId: "123", 18 | timestamp: new Date(), 19 | }, 20 | } as Check, 21 | { 22 | checkId: "124", 23 | checkState: "PASSED", 24 | name: "Check 2", 25 | failures: { total: 0, timeframe: "24h" }, 26 | group: "Group 2", 27 | } as Check, 28 | { 29 | checkId: "125", 30 | checkState: "DEGRADED", 31 | name: "Check 3", 32 | failures: { total: 2, timeframe: "24h" }, 33 | group: "Group 3", 34 | lastFailure: { 35 | checkResultId: "125", 36 | timestamp: new Date(), 37 | }, 38 | } as Check, 39 | { 40 | checkId: "126", 41 | checkState: "PASSED", 42 | name: "Check 4", 43 | failures: { total: 0, timeframe: "24h" }, 44 | group: "Group 4", 45 | } as Check, 46 | ], 47 | ]; 48 | 49 | const client = new WebClient(process.env.SLACK_AUTH_TOKEN); 50 | 51 | await Promise.all( 52 | checksListSamples.map(async (checks) => { 53 | const message = renderFailingChecksBlock(checks); 54 | 55 | await client.chat.postMessage({ 56 | channel: channelId, 57 | text: "Failing Checks Summary", 58 | blocks: message.blocks, 59 | }); 60 | }), 61 | ); 62 | } 63 | 64 | sendFailingChecksMessage().catch(console.error); 65 | -------------------------------------------------------------------------------- /src/slackbot/blocks/failingChecksBlock.spec.ts: -------------------------------------------------------------------------------- 1 | import { Check, renderFailingChecksBlock } from "./failingChecksBlock"; 2 | 3 | describe("failingChecksBlock", () => { 4 | const now = new Date("2023-01-01T00:00:00Z"); 5 | 6 | it("renders checks with mixed states", () => { 7 | const checks = [ 8 | { 9 | checkId: "123", 10 | checkState: "FAILED", 11 | name: "Check 1", 12 | failures: { total: 1, timeframe: "24h" }, 13 | lastFailure: { 14 | checkResultId: "123", 15 | timestamp: now, 16 | }, 17 | }, 18 | { 19 | checkId: "124", 20 | checkState: "PASSED", 21 | name: "Check 2", 22 | failures: { total: 0, timeframe: "24h" }, 23 | group: "Group 2", 24 | }, 25 | { 26 | checkId: "125", 27 | checkState: "DEGRADED", 28 | name: "Check 3", 29 | failures: { total: 2, timeframe: "24h" }, 30 | group: "Group 3", 31 | lastFailure: { 32 | checkResultId: "125", 33 | timestamp: now, 34 | }, 35 | }, 36 | { 37 | checkId: "126", 38 | checkState: "PASSED", 39 | name: "Check 4", 40 | failures: { total: 0, timeframe: "24h" }, 41 | group: "Group 4", 42 | }, 43 | ] as Check[]; 44 | 45 | const blocks = renderFailingChecksBlock(checks); 46 | expect(blocks).toMatchSnapshot(); 47 | }); 48 | 49 | it("renders all passing checks", () => { 50 | const checks = [ 51 | { 52 | checkId: "124", 53 | checkState: "PASSED", 54 | name: "Check 2", 55 | failures: { total: 0, timeframe: "24h" }, 56 | group: "Group 2", 57 | }, 58 | { 59 | checkId: "126", 60 | checkState: "PASSED", 61 | name: "Check 4", 62 | failures: { total: 0, timeframe: "24h" }, 63 | group: "Group 4", 64 | }, 65 | ] as Check[]; 66 | 67 | const blocks = renderFailingChecksBlock(checks); 68 | expect(blocks).toMatchSnapshot(); 69 | }); 70 | 71 | it("renders all failing checks", () => { 72 | const checks = [ 73 | { 74 | checkId: "123", 75 | checkState: "FAILED", 76 | name: "Check 1", 77 | failures: { total: 1, timeframe: "24h" }, 78 | lastFailure: { 79 | checkResultId: "123", 80 | timestamp: now, 81 | }, 82 | }, 83 | { 84 | checkId: "125", 85 | checkState: "DEGRADED", 86 | name: "Check 3", 87 | failures: { total: 2, timeframe: "24h" }, 88 | group: "Group 3", 89 | lastFailure: { 90 | checkResultId: "125", 91 | timestamp: now, 92 | }, 93 | }, 94 | ] as Check[]; 95 | 96 | const blocks = renderFailingChecksBlock(checks); 97 | expect(blocks).toMatchSnapshot(); 98 | }); 99 | }); 100 | -------------------------------------------------------------------------------- /src/slackbot/blocks/failingChecksBlock.ts: -------------------------------------------------------------------------------- 1 | import { checkly } from "../../checkly/client"; 2 | 3 | export const LIST_FAILING_CHECKS_ACTION_ID = "list_failing_checks"; 4 | 5 | export type Check = { 6 | checkId: string; 7 | checkState: "FAILED" | "DEGRADED" | "PASSED"; 8 | name: string; 9 | failures: { 10 | total: number; 11 | timeframe: string; 12 | }; 13 | group: string | null; 14 | lastFailure: { 15 | checkResultId: string; 16 | timestamp: Date; 17 | } | null; 18 | }; 19 | 20 | export function renderFailingChecksBlock(checks: Check[]) { 21 | const statusIcon = (checkState: Check["checkState"]) => { 22 | switch (checkState) { 23 | case "FAILED": 24 | return "❌"; 25 | case "DEGRADED": 26 | return "⚠️"; 27 | case "PASSED": 28 | return "✅"; 29 | } 30 | }; 31 | 32 | const checkUrl = (checkId: string) => checkly.getCheckAppUrl(checkId); 33 | const checkResultUrl = (checkId: string, checkResultId: string) => 34 | checkly.getCheckResultAppUrl(checkId, checkResultId); 35 | return { 36 | text: "Failing Checks", 37 | blocks: [ 38 | { 39 | type: "header", 40 | text: { 41 | type: "plain_text", 42 | text: "Failing Checks", 43 | emoji: true, 44 | }, 45 | }, 46 | { 47 | type: "divider", 48 | }, 49 | { 50 | type: "section", 51 | fields: [ 52 | { 53 | type: "mrkdwn", 54 | text: "*Check Name*", 55 | }, 56 | { 57 | type: "mrkdwn", 58 | text: "*Failure rate*", 59 | }, 60 | { 61 | type: "mrkdwn", 62 | text: "*Group*", 63 | }, 64 | { 65 | type: "mrkdwn", 66 | text: "*Last failed*", 67 | }, 68 | ], 69 | }, 70 | ...checks.map((check) => ({ 71 | type: "section", 72 | fields: [ 73 | { 74 | type: "mrkdwn", 75 | text: `<${checkUrl(check.checkId)}|${statusIcon(check.checkState)} ${check.name}>`, 76 | }, 77 | { 78 | type: "mrkdwn", 79 | text: `${check.failures.total} failure${ 80 | check.failures.total === 1 ? "" : "s" 81 | } (last ${check.failures.timeframe})`, 82 | }, 83 | { 84 | type: "mrkdwn", 85 | text: check.group || " ", 86 | }, 87 | { 88 | type: "mrkdwn", 89 | text: check.lastFailure 90 | ? ` <${checkResultUrl(check.checkId, check.lastFailure.checkResultId)}|Link>` 91 | : "N/A", 92 | }, 93 | ], 94 | })), 95 | ], 96 | }; 97 | } 98 | -------------------------------------------------------------------------------- /src/slackbot/blocks/multipleChecksAnalysisBlock.ts: -------------------------------------------------------------------------------- 1 | import { MultipleCheckAnalysisResult } from "../../use-cases/analyse-multiple/analyse-multiple-checks"; 2 | 3 | export function createMultipleCheckAnalysisBlock( 4 | analysisResult: MultipleCheckAnalysisResult, 5 | ) { 6 | return { 7 | text: `*Multiple Check Analysis*`, 8 | blocks: [ 9 | { 10 | type: "section", 11 | text: { 12 | type: "mrkdwn", 13 | text: `Impact Analysis:\n* ${analysisResult.allAnalysedChecks.length} checks analysed\n* ${analysisResult.goalSummary.response.map((group, index) => `${index + 1}. **${group.header}**: ${group.description}`).join("\n")}`, 14 | }, 15 | }, 16 | ], 17 | }; 18 | } 19 | -------------------------------------------------------------------------------- /src/slackbot/channel-summary.ts: -------------------------------------------------------------------------------- 1 | import { generateObject } from "ai"; 2 | import { WebhookAlertDto } from "../checkly/alertDTO"; 3 | import { channelSummaryPrompt, ChannelSummaryResponse } from "../prompts/slack"; 4 | import { SlackClient } from "../slack/slack"; 5 | 6 | const slackClient = new SlackClient(process.env.SLACK_AUTH_TOKEN || ""); 7 | 8 | export const generateChannelSummary = async ( 9 | channelId: string, 10 | alert: WebhookAlertDto, 11 | fromTimestamp?: string, 12 | ): Promise => { 13 | const fromDate = fromTimestamp 14 | ? new Date(fromTimestamp) 15 | : new Date(Date.now() - 1000 * 60 * 60 * 24); 16 | 17 | const messages = await slackClient.fetchHistoricalMessages( 18 | channelId, 19 | 100, 20 | fromDate, 21 | ); 22 | 23 | const promptDefinition = channelSummaryPrompt(alert, messages); 24 | 25 | const { object } = await generateObject(promptDefinition); 26 | 27 | return object; 28 | }; 29 | -------------------------------------------------------------------------------- /src/slackbot/check-result-slices.ts: -------------------------------------------------------------------------------- 1 | import { 2 | addMinutes, 3 | isAfter, 4 | isBefore, 5 | isEqual, 6 | startOfMinute, 7 | } from "date-fns"; 8 | import { CheckResult } from "../checkly/models"; 9 | import { CheckResultTable } from "../db/check-results"; 10 | import { log } from "../log"; 11 | 12 | export interface CheckResultsTimeSlice { 13 | checkId: string; 14 | location: string; 15 | start: Date; 16 | end: Date; 17 | data: { 18 | passingChecks: number; 19 | degradedChecks: number; 20 | failingChecks: number; 21 | }; 22 | } 23 | 24 | function createTimeSlices( 25 | checkResultKeys: { checkId: string; location: string }[], 26 | from: Date, 27 | to: Date, 28 | sliceMinutes: number = 30, 29 | ): CheckResultsTimeSlice[] { 30 | const slices: CheckResultsTimeSlice[] = []; 31 | let sliceStart = startOfMinute(from); 32 | 33 | while (isBefore(sliceStart, to)) { 34 | const sliceEnd = addMinutes(sliceStart, sliceMinutes); 35 | 36 | for (const { checkId, location } of checkResultKeys) { 37 | slices.push({ 38 | checkId, 39 | start: sliceStart, 40 | end: sliceEnd, 41 | location, 42 | data: { 43 | passingChecks: 0, 44 | degradedChecks: 0, 45 | failingChecks: 0, 46 | }, 47 | }); 48 | } 49 | 50 | sliceStart = sliceEnd; 51 | } 52 | 53 | return slices; 54 | } 55 | 56 | export function aggregateCheckResults( 57 | results: CheckResultTable[], 58 | from: Date, 59 | to: Date, 60 | sliceMinutes: number = 30, 61 | ) { 62 | const checkResultKeys = results.reduce((acc, result) => { 63 | const key = `${result.checkId}:${result.runLocation}`; 64 | if (!acc.has(key)) { 65 | acc.set(key, { checkId: result.checkId, location: result.runLocation }); 66 | } 67 | return acc; 68 | }, new Map()); 69 | 70 | const slices = createTimeSlices( 71 | Array.from(checkResultKeys.values()), 72 | from, 73 | to, 74 | sliceMinutes, 75 | ); 76 | 77 | results.forEach((result) => { 78 | const slice = slices.find( 79 | (s) => 80 | (isBefore(s.start, result.startedAt) || 81 | isEqual(s.start, result.startedAt)) && 82 | isAfter(s.end, result.startedAt) && 83 | s.location === result.runLocation, 84 | ); 85 | 86 | if (slice) { 87 | if (result.hasFailures || result.hasErrors) { 88 | slice.data.failingChecks++; 89 | } else if (result.isDegraded) { 90 | slice.data.degradedChecks++; 91 | } else { 92 | slice.data.passingChecks++; 93 | } 94 | } else { 95 | console.error( 96 | `Slice not found for result ${result.id} ${result.startedAt}`, 97 | ); 98 | } 99 | }); 100 | 101 | return slices; 102 | } 103 | -------------------------------------------------------------------------------- /src/slackbot/checkly-integration-utils.ts: -------------------------------------------------------------------------------- 1 | import { getDocumentBySlug } from "../knowledge-base/knowledgeBase"; 2 | import { log } from "../log"; 3 | 4 | // It's a document that where we can store extra context for the account setup 5 | // Explain here what tags, components, and names mean in the account 6 | const CHECKLY_ACCOUNT_SETUP_DOCUMENT_SLUG = 7 | process.env.CHECKLY_ACCOUNT_SETUP_DOCUMENT_SLUG; 8 | 9 | export const getExtraAccountSetupContext = async (): Promise => { 10 | if (!CHECKLY_ACCOUNT_SETUP_DOCUMENT_SLUG) { 11 | return null; 12 | } 13 | 14 | const document = await getDocumentBySlug(CHECKLY_ACCOUNT_SETUP_DOCUMENT_SLUG); 15 | if (!document) { 16 | log.warn( 17 | "Could not find document with slug", 18 | CHECKLY_ACCOUNT_SETUP_DOCUMENT_SLUG, 19 | ); 20 | return null; 21 | } 22 | 23 | log.debug("Found extra context document"); 24 | 25 | return document.content; 26 | }; 27 | -------------------------------------------------------------------------------- /src/slackbot/checkly.ts: -------------------------------------------------------------------------------- 1 | import { checkly } from "../checkly/client"; 2 | import { last24h } from "../prompts/checkly-data"; 3 | import { log } from "../log"; 4 | import { App, StringIndexed } from "@slack/bolt"; 5 | import { analyseMultipleChecks } from "../use-cases/analyse-multiple/analyse-multiple-checks"; 6 | import { createMultipleCheckAnalysisBlock } from "./blocks/multipleChecksAnalysisBlock"; 7 | import { accountSummary } from "./accountSummaryCommandHandler"; 8 | import { checkSummary } from "./commands/check-summary"; 9 | 10 | // Allow overriding the command name for local dev 11 | export const CHECKLY_COMMAND_NAME = 12 | process.env.CHECKLY_COMMAND_NAME_OVERRIDE || "/checkly"; 13 | 14 | const getIsUUID = (str: string): boolean => { 15 | return /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test( 16 | str, 17 | ); 18 | }; 19 | 20 | export const checklyCommandHandler = (app: App) => { 21 | return async ({ ack, respond, command }) => { 22 | await ack(); 23 | const args = command.text.split(" "); 24 | if (args.length == 1 && args[0].trim() === "") { 25 | const accountId = process.env.CHECKLY_ACCOUNT_ID!; 26 | const account = await checkly.getAccount(accountId); 27 | const interval = last24h(new Date()); 28 | 29 | const response = await app.client.chat.postMessage({ 30 | channel: command.channel_id, 31 | text: `Analysing account \`${account.name}\`... ⏳`, 32 | }); 33 | 34 | try { 35 | const { message } = await accountSummary(accountId, interval); 36 | 37 | await app.client.chat.update({ 38 | channel: command.channel_id, 39 | ts: response.ts, 40 | ...message, 41 | } as any); 42 | } catch (err) { 43 | // Ensure we have a proper Error object 44 | const error = err instanceof Error ? err : new Error(String(err)); 45 | 46 | log.error( 47 | { 48 | err: error, 49 | accountId, 50 | }, 51 | "Error fetching account summary", 52 | ); 53 | 54 | await respond({ 55 | replace_original: true, 56 | text: `:x: Error fetching account summary: ${error.message}`, 57 | }); 58 | } 59 | } else if (args.length === 1 && !!args[0] && getIsUUID(args[0])) { 60 | const checkId = args[0]; 61 | try { 62 | // It is not possible to remove ephemeral messages or update them 63 | const response = await app.client.chat.postMessage({ 64 | channel: command.channel_id, 65 | text: `Analyzing check \`${checkId}\`... ⏳`, 66 | }); 67 | 68 | const { message } = await checkSummary(checkId); 69 | 70 | await app.client.chat.update({ 71 | channel: command.channel_id, 72 | ts: response.ts, 73 | ...message, 74 | } as any); 75 | } catch (err) { 76 | const error = err instanceof Error ? err : new Error(String(err)); 77 | log.error( 78 | { 79 | err: error, 80 | checkId, 81 | }, 82 | "Error preparing check summary", 83 | ); 84 | 85 | await respond({ 86 | replace_original: true, 87 | text: `:x: Error analysing check summary: ${error.message}`, 88 | }); 89 | } 90 | } else { 91 | await respond({ 92 | text: "Please provide either a valid check id or no arguments for Account wide analysis", 93 | }); 94 | } 95 | }; 96 | }; 97 | -------------------------------------------------------------------------------- /src/slackbot/commands/check-summary.script.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "node:fs"; 2 | 3 | import { initConfig } from "../../lib/init-config"; 4 | import { checkSummary } from "./check-summary"; 5 | 6 | initConfig(); 7 | 8 | const main = async () => { 9 | const checkId = process.argv[2] || "50396dfc-22c2-4ee4-9613-a8fb16bdde49"; 10 | 11 | console.log("Fetching check summary for", checkId); 12 | const result = await checkSummary(checkId); 13 | console.log(JSON.stringify(result.message, null, 2)); 14 | process.exit(0); 15 | }; 16 | 17 | main(); 18 | -------------------------------------------------------------------------------- /src/slackbot/config.ts: -------------------------------------------------------------------------------- 1 | import { pinoBoltLogger } from "../log"; 2 | import { Logger, LogLevel } from "@slack/bolt"; 3 | import process from "node:process"; 4 | 5 | interface SlackConfig { 6 | signingSecret: string; 7 | token: string; 8 | appToken: string; 9 | socketMode: boolean; 10 | logLevel: LogLevel; 11 | logger: Logger; 12 | } 13 | 14 | export const getSlackConfig = (): SlackConfig => ({ 15 | signingSecret: process.env.SLACK_SIGNING_SECRET!, 16 | token: process.env.SLACK_AUTH_TOKEN!, 17 | appToken: process.env.SLACK_APP_TOKEN!, 18 | socketMode: true, 19 | logger: pinoBoltLogger, 20 | logLevel: LogLevel.INFO, 21 | }); 22 | 23 | export const validateConfig = (): void => { 24 | const requiredEnvVars = [ 25 | "SLACK_SIGNING_SECRET", 26 | "SLACK_AUTH_TOKEN", 27 | "SLACK_APP_TOKEN", 28 | ]; 29 | 30 | const missingVars = requiredEnvVars.filter( 31 | (varName) => !process.env[varName], 32 | ); 33 | 34 | if (missingVars.length > 0) { 35 | throw new Error( 36 | `Missing required environment variables: ${missingVars.join(", ")}`, 37 | ); 38 | } 39 | }; 40 | -------------------------------------------------------------------------------- /src/slackbot/feedback.ts: -------------------------------------------------------------------------------- 1 | import { prisma } from "../prisma"; 2 | import { generateSlackMessageLink } from "./utils"; 3 | import { app } from "./app"; 4 | import { BotResponse, Feedback } from "@prisma/client"; 5 | import type { ChatPostMessageResponse } from "@slack/web-api/dist/types/response"; 6 | import { getMessageText } from "../slack/slack"; 7 | 8 | type BotResponseWhereClause = { 9 | alertId?: string; 10 | releaseId?: string; 11 | deploymentId?: string; 12 | }; 13 | 14 | const getWhereClause = (metadata: any): BotResponseWhereClause => { 15 | const whereClause: BotResponseWhereClause = {}; 16 | if (metadata.event_type === "alert") { 17 | whereClause.alertId = metadata.event_payload?.alertId; 18 | } 19 | if (metadata.event_type === "release") { 20 | whereClause.releaseId = metadata.event_payload?.releaseId; 21 | } 22 | if (metadata.event_type === "deployment") { 23 | whereClause.deploymentId = metadata.event_payload?.deploymentId; 24 | } 25 | return whereClause; 26 | }; 27 | 28 | async function findBotResponse(metadata: any): Promise { 29 | const whereClause = getWhereClause(metadata); 30 | 31 | return prisma.botResponse.findFirst({ 32 | where: whereClause, 33 | orderBy: { 34 | createdAt: "desc", 35 | }, 36 | }); 37 | } 38 | 39 | export const saveResponseAndAskForFeedback = async ( 40 | postMessageResponse: ChatPostMessageResponse, 41 | ) => { 42 | const message = postMessageResponse.message!; 43 | const whereClause = getWhereClause(message.metadata); 44 | const channel = postMessageResponse.channel!; 45 | const threadTs = message.thread_ts || postMessageResponse.ts; 46 | const messageText = getMessageText(message); 47 | 48 | await prisma.botResponse.create({ 49 | data: { 50 | ...whereClause, 51 | content: messageText, 52 | slackMessageUrl: generateSlackMessageLink(channel!, threadTs!), 53 | slackMessageTs: threadTs!, 54 | }, 55 | }); 56 | 57 | // Post another message with feedback buttons in the same thread 58 | // The message will be replaced with feedback result when a user submits it 59 | await app.client.chat.postMessage({ 60 | channel, 61 | text: "Was this helpful?", 62 | blocks: [ 63 | { 64 | type: "section", 65 | text: { 66 | type: "mrkdwn", 67 | text: "Was this helpful?", 68 | }, 69 | }, 70 | { 71 | type: "actions", 72 | elements: [ 73 | { 74 | type: "button", 75 | action_id: "feedback_thumbs_up", 76 | text: { 77 | type: "plain_text", 78 | text: "👍", 79 | }, 80 | style: "primary", 81 | }, 82 | { 83 | type: "button", 84 | action_id: "feedback_thumbs_down", 85 | text: { 86 | type: "plain_text", 87 | text: "👎", 88 | }, 89 | style: "danger", 90 | }, 91 | ], 92 | }, 93 | ], 94 | 95 | thread_ts: threadTs, // Replies in the same thread 96 | metadata: message.metadata! as any, 97 | }); 98 | }; 99 | 100 | export enum FeedbackScore { 101 | thumbsUp = 1, 102 | thumbsDown = 0, 103 | } 104 | 105 | export const saveResponseFeedback = async ( 106 | metadata: any, 107 | score: FeedbackScore, 108 | categories: string[] = [], 109 | ): Promise => { 110 | const responseRecord = await findBotResponse(metadata); 111 | if (!responseRecord) { 112 | return null; 113 | } 114 | const botResponseId = responseRecord.id; 115 | 116 | return prisma.feedback.upsert({ 117 | where: { 118 | botResponseId, 119 | }, 120 | create: { 121 | botResponseId, 122 | score, 123 | categories, 124 | }, 125 | update: { 126 | score, 127 | categories, 128 | }, 129 | }); 130 | }; 131 | -------------------------------------------------------------------------------- /src/slackbot/index.ts: -------------------------------------------------------------------------------- 1 | import "dotenv/config"; 2 | import { app } from "./app"; 3 | 4 | app.error(async (error) => { 5 | // Check the details of the error to handle cases where you should retry sending a message or stop the app 6 | console.error(error); 7 | }); 8 | 9 | (async () => { 10 | await app.start(); 11 | console.log("⚡️ Bolt app is running!"); 12 | })(); 13 | -------------------------------------------------------------------------------- /src/slackbot/listErrorPatternActionHandler.ts: -------------------------------------------------------------------------------- 1 | import { findErrorClusters } from "../db/error-cluster"; 2 | import { last24h } from "../prompts/checkly-data"; 3 | import { createErrorPatternsBlock } from "./blocks/errorPatternBlock"; 4 | 5 | export const listErrorPatternActionHandler = (app) => { 6 | return async ({ ack, body }) => { 7 | await ack(); 8 | const errorPatternIds = body.actions[0].value.split(","); 9 | 10 | const errorPatterns = await findErrorClusters(errorPatternIds, last24h()); 11 | 12 | const message = createErrorPatternsBlock(errorPatterns); 13 | 14 | await app.client.chat.postMessage({ 15 | channel: body.channel.id, 16 | thread_ts: body.message.ts, 17 | ...message, 18 | }); 19 | }; 20 | }; 21 | -------------------------------------------------------------------------------- /src/slackbot/listFailingChecksActionHandler.ts: -------------------------------------------------------------------------------- 1 | import { readChecksWithGroupNames } from "../db/check"; 2 | import { findCheckResults } from "../db/check-results"; 3 | import { last24h } from "../prompts/checkly-data"; 4 | import * as dataForge from "data-forge"; 5 | import { renderFailingChecksBlock } from "./blocks/failingChecksBlock"; 6 | 7 | export const listFailingChecksActionHandler = (app) => { 8 | return async ({ ack, body }) => { 9 | await ack(); 10 | const interval = last24h(new Date()); 11 | const checkIds = (body.actions[0].value as string).split(","); 12 | const groupNamesForCheckIds = ( 13 | await readChecksWithGroupNames(checkIds) 14 | ).reduce( 15 | (acc, check) => { 16 | acc[check.id] = check.groupName; 17 | return acc; 18 | }, 19 | {} as Record, 20 | ); 21 | 22 | const checkResults = await findCheckResults( 23 | checkIds, 24 | interval.from, 25 | interval.to, 26 | ); 27 | 28 | const checkResultsDF = new dataForge.DataFrame(checkResults); 29 | 30 | const failedChecks = checkResultsDF 31 | .groupBy((cr) => cr.checkId) 32 | .map((group) => ({ 33 | checkId: group.first().checkId, 34 | checkState: (group.first().hasFailures || group.first().hasErrors 35 | ? "FAILED" 36 | : group.first().isDegraded 37 | ? "DEGRADED" 38 | : "PASSED") as "FAILED" | "DEGRADED" | "PASSED", 39 | name: group.first().name, 40 | failures: { 41 | total: group 42 | .deflate((cr) => (cr.hasFailures || cr.hasErrors ? 1 : 0)) 43 | .sum(), 44 | timeframe: "24h", 45 | }, 46 | group: groupNamesForCheckIds[group.first().checkId], 47 | lastFailure: (() => { 48 | const orderedFailures = group 49 | .where((cr) => cr.hasFailures || cr.hasErrors || cr.isDegraded) 50 | .orderBy((cr) => cr.startedAt); 51 | if (orderedFailures.none()) { 52 | return null; 53 | } 54 | const lastFailure = orderedFailures.last(); 55 | return lastFailure 56 | ? { 57 | checkResultId: lastFailure.id, 58 | timestamp: lastFailure.startedAt, 59 | } 60 | : null; 61 | })(), 62 | })) 63 | .toArray(); 64 | 65 | const message = renderFailingChecksBlock(failedChecks); 66 | await app.client.chat.postMessage({ 67 | thread_ts: body.message.ts, 68 | channel: body.channel.id, 69 | ...message, 70 | }); 71 | }; 72 | }; 73 | -------------------------------------------------------------------------------- /src/slackbot/noopActionHandler.ts: -------------------------------------------------------------------------------- 1 | export const NOOP_ACTION_ID = "noop-action"; 2 | 3 | export const noopActionHandler = () => { 4 | return async ({ ack }) => { 5 | await ack(); 6 | }; 7 | }; 8 | -------------------------------------------------------------------------------- /src/slackbot/ops-channel.spec.ts: -------------------------------------------------------------------------------- 1 | import { openai } from "@ai-sdk/openai"; 2 | import { generateText } from "ai"; 3 | import dotenv from "dotenv"; 4 | import "dotenv/config"; 5 | import "reflect-metadata"; 6 | import { convertSlackTimestamp } from "./utils"; 7 | import { SlackClient } from "../slack/slack"; 8 | 9 | const slackClient = new SlackClient(process.env.SLACK_AUTH_TOKEN || ""); 10 | 11 | dotenv.config(); 12 | jest.setTimeout(30000); 13 | 14 | // Playground for testing the channel summary 15 | 16 | describe.skip("fetchHistoricalMessages", () => { 17 | it("should fetch historical messages", async () => { 18 | const messages = await slackClient.fetchHistoricalMessages("CUZ7V5YKZ"); 19 | }); 20 | 21 | it("should generate a summary", async () => { 22 | const opsMessages = 23 | (await slackClient.fetchHistoricalMessages("CUZ7V5YKZ", 100)) ?? []; 24 | const deploymentMessages = 25 | (await slackClient.fetchHistoricalMessages("C046EHXJCFM", 100)) ?? []; 26 | 27 | const messages = [...opsMessages, ...deploymentMessages].sort((a, b) => { 28 | return ( 29 | convertSlackTimestamp(b.ts!).getTime() - 30 | convertSlackTimestamp(a.ts!).getTime() 31 | ); 32 | }); 33 | 34 | const messageHistory = messages 35 | ?.map( 36 | (m) => 37 | `${convertSlackTimestamp(m.ts!).toISOString()} Message: ${ 38 | m.plaintext 39 | }`, 40 | ) 41 | .join("\n"); 42 | 43 | const { text } = await generateText({ 44 | model: openai("o1-preview"), 45 | prompt: `You are a ops channel bot that is part of the incident response team. Given the message history in the channel, your job is to respond to the trigger message with a concise breakdown. 46 | 47 | Objectives: 48 | - Check if the issue has happened before 49 | - Review and analyze the message history to determine if there are any relations to the trigger message 50 | - Generate a helpful response that helps first time responders 51 | - Keep it highly relevant 52 | - Be very concise and to the point 53 | - A valid response should be 1-2 sentences 54 | - Be upfront and direct. If the message does not need any intervention, just say so 55 | - If there is no issue, just say so 56 | - Respond only to the trigger message 57 | - Only propose actions if there is a real issue 58 | 59 | Message History:\n${messageHistory} 60 | 61 | Question: is anything related to the recent adhocrun-eu-west-1 check failing?`, 62 | }); 63 | 64 | console.log(text); 65 | }); 66 | /*it("should generateChannelSummary", async () => { 67 | const summary = await generateChannelSummary( 68 | "CUZ7V5YKZ", 69 | "Create a concise summary recent alerts and notifications." 70 | ); 71 | console.log(summary); 72 | });*/ 73 | }); 74 | -------------------------------------------------------------------------------- /src/slackbot/web-client.ts: -------------------------------------------------------------------------------- 1 | import { WebClient } from "@slack/web-api"; 2 | import dotenv from "dotenv"; 3 | 4 | dotenv.config(); 5 | 6 | export const web = new WebClient(process.env.SLACK_AUTH_TOKEN); 7 | -------------------------------------------------------------------------------- /src/sre-assistant/SreAssistant.ts: -------------------------------------------------------------------------------- 1 | import { BaseAssistant } from "../ai/Assistant"; 2 | import { Tool } from "../ai/Tool"; 3 | import type { RunCreateParams } from "openai/resources/beta/threads"; 4 | import { SearchContextTool } from "./tools/SearchContextTool"; 5 | import { ChecklyTool } from "./tools/ChecklyTool"; 6 | import { GitHubTool } from "./tools/GitHubTool"; 7 | import { prisma } from "../prisma"; 8 | import { KnowledgeTool } from "./tools/KnowledgeTool"; 9 | import { TimeframeTranslationTool } from "./tools/TimeframeTranslationTool"; 10 | import { generateSREAssistantPrompt } from "../prompts/sre-assistant"; 11 | 12 | export class SreAssistant extends BaseAssistant { 13 | alertId: string | undefined; 14 | interactionContext: { 15 | username: string; 16 | date: string; 17 | }; 18 | 19 | constructor( 20 | threadId: string, 21 | alertId: string | undefined = undefined, 22 | interactionContext: { 23 | username: string; 24 | date: string; 25 | }, 26 | config?: Partial, 27 | ) { 28 | super(threadId, { 29 | assistant_id: process.env.OPENAI_ASSISTANT_ID as string, 30 | temperature: 1, 31 | parallel_tool_calls: true, 32 | max_completion_tokens: 800, 33 | ...config, 34 | }); 35 | 36 | this.interactionContext = interactionContext; 37 | this.alertId = alertId; 38 | } 39 | 40 | protected async getInstructions(): Promise { 41 | let alertSummary = ""; 42 | if (this.alertId) { 43 | const alert = await prisma.alert.findUniqueOrThrow({ 44 | where: { 45 | id: this.alertId, 46 | }, 47 | select: { 48 | summary: true, 49 | }, 50 | }); 51 | 52 | alertSummary = alert.summary; 53 | } 54 | 55 | return generateSREAssistantPrompt( 56 | this.interactionContext["username"], 57 | this.interactionContext["date"], 58 | alertSummary, 59 | ); 60 | } 61 | 62 | protected async getTools(): Promise { 63 | if (!this.alertId) { 64 | return [ 65 | new ChecklyTool(this), 66 | new GitHubTool(this), 67 | new KnowledgeTool(this), 68 | new TimeframeTranslationTool(this), 69 | ]; 70 | } 71 | 72 | const searchContextTool = new SearchContextTool(this); 73 | await searchContextTool.init(); 74 | return [ 75 | searchContextTool, 76 | new ChecklyTool(this), 77 | new GitHubTool(this), 78 | new KnowledgeTool(this), 79 | ]; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/sre-assistant/sre-assistant.spec.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import { SreAssistant } from "./SreAssistant"; 3 | import { getRunMessages } from "../ai/utils"; 4 | import OpenAI from "openai"; 5 | 6 | dotenv.config(); 7 | 8 | const OPENAI_API_KEY = process.env.OPENAI_API_KEY!; 9 | 10 | jest.setTimeout(120000); // Set timeout to 120 seconds 11 | 12 | describe.skip("SreAssistant Tests", () => { 13 | let openai; 14 | 15 | beforeAll(() => { 16 | console.log("OPENAI_API_KEY", OPENAI_API_KEY); 17 | 18 | openai = new OpenAI({ 19 | apiKey: OPENAI_API_KEY, 20 | }); 21 | }); 22 | 23 | it("should handle a user message and respond", async () => { 24 | const alertId = "test"; 25 | const thread = await openai.beta.threads.create(); 26 | const assistant = new SreAssistant(thread.id, alertId, { 27 | username: "Test User", 28 | date: new Date().toISOString(), 29 | }); 30 | const userMessage = await assistant.addMessage("Hi"); 31 | const responseMessages = await assistant 32 | .runSync() 33 | .then((run) => getRunMessages(thread.id, run.id)); 34 | 35 | console.log("Assistant response: ", responseMessages); 36 | 37 | expect(responseMessages.length).toBeGreaterThan(0); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /src/sre-assistant/tools/ChecklyTool.ts: -------------------------------------------------------------------------------- 1 | import { generateObject } from "ai"; 2 | import { stringify } from "yaml"; 3 | import { z } from "zod"; 4 | import { createToolOutput, createToolParameters, Tool } from "../../ai/Tool"; 5 | import { checkly } from "../../checkly/client"; 6 | import { 7 | mapCheckResultToContextValue, 8 | mapCheckToContextValue, 9 | } from "../../checkly/utils"; 10 | import { checklyToolPrompt } from "../../prompts/checkly"; 11 | import { SreAssistant } from "../SreAssistant"; 12 | 13 | const parameters = createToolParameters( 14 | z.object({ 15 | action: z 16 | .enum([ 17 | "getCheck", 18 | "getCheckResult", 19 | "getAllFailingChecks", 20 | "searchCheck", 21 | ]) 22 | .describe("The action to perform on the Checkly API"), 23 | checkId: z 24 | .string() 25 | .describe( 26 | "The ID of the Check to get information about. Omit this field for the 'getChecksStatus' action. Required for the 'getCheck' and 'getCheckResult' actions.", 27 | ) 28 | .optional(), 29 | query: z 30 | .string() 31 | .describe( 32 | "A query to search for checks. Use this field only for the 'searchCheck' action.", 33 | ) 34 | .optional(), 35 | }), 36 | ); 37 | 38 | const outputSchema = createToolOutput( 39 | z.string().describe("The response from the Checkly API"), 40 | ); 41 | 42 | export class ChecklyTool extends Tool< 43 | typeof parameters, 44 | typeof outputSchema, 45 | SreAssistant 46 | > { 47 | static parameters = parameters; 48 | static outputSchema = outputSchema; 49 | 50 | constructor(agent: SreAssistant) { 51 | super({ 52 | name: "ChecklyAPI", 53 | description: 54 | "Interact with the Checkly API to retrieve relevant context about checks and check results.", 55 | parameters, 56 | agent, 57 | }); 58 | } 59 | 60 | async execute(input: z.infer) { 61 | if (input.action === "getCheck") { 62 | if (!input.checkId) { 63 | return "Check ID is required"; 64 | } 65 | 66 | const check = await checkly.getCheck(input.checkId!); 67 | return stringify({ 68 | ...mapCheckToContextValue(check), 69 | script: check.script, 70 | }); 71 | } else if (input.action === "getCheckResult") { 72 | if (!input.checkId) { 73 | return "Check ID is required"; 74 | } 75 | 76 | const results = await checkly 77 | .getCheckResults(input.checkId!, undefined, 1) 78 | .then((result) => { 79 | return result[0]; 80 | }); 81 | 82 | if (!results) { 83 | return "No results found"; 84 | } 85 | 86 | return stringify(mapCheckResultToContextValue(results)); 87 | } else if (input.action === "getAllFailingChecks") { 88 | const status = await checkly.getPrometheusCheckStatus(); 89 | return stringify(status.failing); 90 | } else if (input.action === "searchCheck") { 91 | const checks = await checkly.getChecks(); 92 | const search = await generateObject( 93 | checklyToolPrompt(checks, input.query), 94 | ); 95 | 96 | const relevantCheck = checks.find((c) => c.id === search.object.checkId); 97 | 98 | if (!relevantCheck) { 99 | return "No relevant check found"; 100 | } 101 | 102 | return stringify({ 103 | ...mapCheckToContextValue(relevantCheck), 104 | script: relevantCheck.script, 105 | }); 106 | } 107 | 108 | return "Invalid action"; 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/sre-assistant/tools/GitHubTool.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { createToolOutput, createToolParameters, Tool } from "../../ai/Tool"; 3 | import { SreAssistant } from "../SreAssistant"; 4 | import { stringify } from "yaml"; 5 | import GitHubAPI from "../../github/github"; 6 | 7 | const githubApi = new GitHubAPI(process.env.CHECKLY_GITHUB_TOKEN || ""); 8 | const MAX_RESPONSE_LENGTH = 90000; 9 | const parameters = createToolParameters( 10 | z.object({ 11 | action: z 12 | .enum(["getCommitHistory", "listRepositories"]) 13 | .describe("The action to perform on the GitHub API"), 14 | repo: z 15 | .string() 16 | .describe( 17 | "The full_name of the repository to get information about (e.g. 'checkly/checkly-cli')", 18 | ) 19 | .optional(), 20 | }), 21 | ); 22 | 23 | const outputSchema = createToolOutput( 24 | z.string().describe("The response from the GitHub API"), 25 | ); 26 | 27 | export class GitHubTool extends Tool< 28 | typeof parameters, 29 | typeof outputSchema, 30 | SreAssistant 31 | > { 32 | static parameters = parameters; 33 | static outputSchema = outputSchema; 34 | 35 | constructor(agent: SreAssistant) { 36 | super({ 37 | name: "GitHubAPI", 38 | description: 39 | "Interact with the GitHub API to retrieve relevant context about repositories and commits.", 40 | parameters, 41 | agent, 42 | }); 43 | } 44 | 45 | async execute(input: z.infer) { 46 | if (input.action === "getCommitHistory") { 47 | const [owner, repo] = input.repo!.split("/"); 48 | const commits = await githubApi.getCommits(owner, repo); 49 | return stringify( 50 | commits.map((c) => ({ 51 | sha: c.sha, 52 | message: c.commit.message, 53 | author: c.commit.author, 54 | url: c.html_url, 55 | files: c.files?.map((f) => ({ 56 | filename: f.filename, 57 | status: f.status, 58 | patch: f.patch, 59 | url: f.blob_url, 60 | })), 61 | })), 62 | ).slice(0, MAX_RESPONSE_LENGTH); 63 | } else if (input.action === "listRepositories") { 64 | const repos = await githubApi.queryRepositories( 65 | process.env.GITHUB_ORG as string, 66 | ); 67 | return stringify( 68 | repos.map((r) => ({ 69 | full_name: r.full_name, 70 | description: r.description, 71 | last_pushed: r.pushed_at, 72 | url: r.html_url, 73 | })), 74 | ); 75 | } 76 | 77 | return "Invalid action"; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/sre-assistant/tools/GithubAgentInteractionTool.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { createToolOutput, createToolParameters, Tool } from "../../ai/Tool"; 3 | import { SreAssistant } from "../SreAssistant"; 4 | import { getOpenaiSDKClient } from "../../ai/openai"; 5 | import GitHubAPI from "../../github/github"; 6 | import dotenv from "dotenv"; 7 | import { GithubAgent } from "../../github/agent"; 8 | import { stringify } from "yaml"; 9 | 10 | dotenv.config(); 11 | 12 | const CHECKLY_GITHUB_TOKEN = process.env.CHECKLY_GITHUB_TOKEN!; 13 | 14 | const parameters = createToolParameters( 15 | z.object({ 16 | request: z 17 | .string() 18 | .describe( 19 | "A request for the GitHub NLP agent. For example: what changed in the ui since yesterday", 20 | ), 21 | }), 22 | ); 23 | 24 | const outputSchema = createToolOutput(z.string()); 25 | 26 | export class GithubAgentInteractionTool extends Tool< 27 | typeof parameters, 28 | typeof outputSchema, 29 | SreAssistant 30 | > { 31 | static parameters = parameters; 32 | static outputSchema = outputSchema; 33 | 34 | constructor(agent: SreAssistant) { 35 | super({ 36 | name: "GithubAgentInteraction", 37 | description: 38 | "Interact with the GitHub NLP agent to retrieve relevant context for a given request. You can use this tool gather information from the context of a GitHub repository.", 39 | parameters, 40 | agent, 41 | }); 42 | } 43 | 44 | async execute(input: z.infer) { 45 | const github = new GitHubAPI(CHECKLY_GITHUB_TOKEN); 46 | let agent = new GithubAgent(getOpenaiSDKClient()("gpt-4o"), github); 47 | let response = await agent.summarizeReleases(input.request, "checkly"); 48 | 49 | return stringify(response); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/sre-assistant/tools/KnowledgeTool.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { createToolOutput, createToolParameters, Tool } from "../../ai/Tool"; 3 | import { SreAssistant } from "../SreAssistant"; 4 | import { getAllDocuments } from "../../knowledge-base/knowledgeBase"; 5 | 6 | const parameters = createToolParameters( 7 | z.object({ 8 | action: z 9 | .enum(["listDocuments", "getOneDocument"]) 10 | .describe("The action to perform on the Knowledge Base"), 11 | documentSlug: z 12 | .string() 13 | .describe( 14 | "The slug of the Document to get information about. Omit this field for the 'listDocuments' action. Required for the 'getOneDocument'", 15 | ) 16 | .optional(), 17 | }), 18 | ); 19 | 20 | const outputSchema = createToolOutput( 21 | z.string().describe("The response from the Knowledge Base"), 22 | ); 23 | 24 | export class KnowledgeTool extends Tool< 25 | typeof parameters, 26 | typeof outputSchema, 27 | SreAssistant 28 | > { 29 | static parameters = parameters; 30 | static outputSchema = outputSchema; 31 | 32 | constructor(agent: SreAssistant) { 33 | super({ 34 | name: "KnowledgeBase", 35 | description: 36 | "Interact with the Knowledge Base to retrieve relevant context about the organisation structure, projects and terminology.", 37 | parameters, 38 | agent, 39 | }); 40 | } 41 | 42 | async execute(input: z.infer) { 43 | if (input.action === "listDocuments") { 44 | const documents = await getAllDocuments(); 45 | 46 | return JSON.stringify( 47 | documents.map((doc) => ({ 48 | slug: doc.slug, 49 | title: doc.title, 50 | summary: doc.summary, 51 | })), 52 | ); 53 | } else if (input.action === "getOneDocument") { 54 | if (!input.documentSlug) { 55 | return "Document slug is required"; 56 | } 57 | 58 | const document = await getAllDocuments().then((docs) => 59 | docs.find((doc) => doc.slug === input.documentSlug), 60 | ); 61 | 62 | if (!document) { 63 | return `Document for slug: ${input.documentSlug} not found`; 64 | } 65 | 66 | return JSON.stringify(document); 67 | } 68 | 69 | return "Invalid action"; 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/sre-assistant/tools/SearchContextTool.ts: -------------------------------------------------------------------------------- 1 | import { generateObject } from "ai"; 2 | import { z } from "zod"; 3 | import { ContextKey } from "../../aggregator/ContextAggregator"; 4 | import { createToolOutput, createToolParameters, Tool } from "../../ai/Tool"; 5 | import { prisma } from "../../prisma"; 6 | import { searchContextPrompt } from "../../prompts/search"; 7 | import { SreAssistant } from "../SreAssistant"; 8 | 9 | const parameters = createToolParameters( 10 | z.object({ 11 | query: z 12 | .string() 13 | .describe( 14 | "A concise and specific search query or request for information in natural language.", 15 | ), 16 | contextKey: z 17 | .enum(Object.values(ContextKey) as [string, ...string[]]) 18 | .optional() 19 | .describe("A specific context key to filter the search results."), 20 | }), 21 | ); 22 | 23 | const outputSchema = createToolOutput( 24 | z.array( 25 | z.object({ 26 | relevance: z.number(), 27 | context: z.string(), 28 | }), 29 | ), 30 | ); 31 | 32 | export class SearchContextTool extends Tool< 33 | typeof parameters, 34 | typeof outputSchema, 35 | SreAssistant 36 | > { 37 | static parameters = parameters; 38 | static outputSchema = outputSchema; 39 | contextKeys: string[] = Object.values(ContextKey); 40 | 41 | constructor(agent: SreAssistant) { 42 | super({ 43 | name: "SearchContextTool", 44 | description: 45 | "Search for relevant context based on the given query. Extract the most relevant information from the context that relates to the query.", 46 | parameters, 47 | agent, 48 | }); 49 | } 50 | 51 | async init() { 52 | const alertId = this.agent.alertId; 53 | if (!alertId) { 54 | throw new Error("Alert ID not found"); 55 | } 56 | 57 | const contextKeysData = await prisma.alert.findUniqueOrThrow({ 58 | where: { 59 | id: alertId, 60 | }, 61 | select: { 62 | context: { 63 | select: { 64 | key: true, 65 | }, 66 | }, 67 | }, 68 | }); 69 | 70 | if (!contextKeysData.context) { 71 | throw new Error("Alert not found"); 72 | } 73 | 74 | const contextKeys = contextKeysData.context.map((c) => c.key); 75 | this.contextKeys = contextKeys; 76 | 77 | this.description = `Search for relevant context based on the given query. Extract the most relevant information from the context that relates to the query. Available context keys: ${contextKeys.join( 78 | ", ", 79 | )}`; 80 | this.parameters = createToolParameters( 81 | z.object({ 82 | query: z.string().describe("The query to search for in the context"), 83 | contextKey: z 84 | .enum(contextKeys.map((c) => c) as [string, ...string[]]) 85 | .optional() 86 | .describe("The context key to search in."), 87 | }), 88 | ); 89 | } 90 | 91 | async execute(input: z.infer) { 92 | const contextData = await prisma.alertContext.findMany({ 93 | where: { 94 | key: { 95 | in: this.contextKeys, 96 | }, 97 | alertId: this.agent.alertId, 98 | }, 99 | select: { 100 | key: true, 101 | value: true, 102 | }, 103 | }); 104 | 105 | if (!contextData.length) { 106 | throw new Error("No context data found"); 107 | } 108 | 109 | const [prompt, config] = searchContextPrompt(input.query, contextData); 110 | 111 | const relevantContext = await generateObject({ 112 | output: "array", 113 | schema: z.object({ 114 | relevance: z.number(), 115 | context: z.string(), 116 | }), 117 | ...config, 118 | prompt, 119 | }); 120 | 121 | return relevantContext.object 122 | .sort((a, b) => b.relevance - a.relevance) 123 | .slice(0, 30); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/use-cases/analyse-multiple/analyse-multiple-checks.ts: -------------------------------------------------------------------------------- 1 | import { findTargetChecks } from "./find-target-checks"; 2 | import { generateObject } from "ai"; 3 | import { summariseMultipleChecksGoal } from "../../prompts/summarizeCheckGoals"; 4 | import { Check } from "../../checkly/models"; 5 | import { MultipleChecksGoalResponse } from "../../prompts/summarizeCheckGoals"; 6 | 7 | export type MultipleCheckAnalysisResult = { 8 | goalSummary: MultipleChecksGoalResponse; 9 | allAnalysedChecks: Check[]; 10 | }; 11 | 12 | export const analyseMultipleChecks = async ( 13 | arg?: string, 14 | ): Promise => { 15 | const targetChecks = await findTargetChecks(arg); 16 | 17 | const output = await generateObject( 18 | summariseMultipleChecksGoal(targetChecks, { maxTokens: 500 }), 19 | ); 20 | 21 | return { 22 | goalSummary: output.object, 23 | allAnalysedChecks: targetChecks, 24 | }; 25 | }; 26 | -------------------------------------------------------------------------------- /src/use-cases/analyse-multiple/find-target-checks.ts: -------------------------------------------------------------------------------- 1 | import { checkly } from "../../checkly/client"; 2 | import { keyBy } from "lodash"; 3 | import { Check, CheckGroup } from "../../checkly/models"; 4 | 5 | const parseAsGroupId = (str: string): number | null => { 6 | return /^\d+$/i.test(str) && !Number.isNaN(parseInt(str)) 7 | ? parseInt(str) 8 | : null; 9 | }; 10 | 11 | const applyGroupSettings = ( 12 | checks: Check[], 13 | groupsById: Record, 14 | ): Check[] => { 15 | return checks.map((check: Check) => { 16 | const checkGroup = check.groupId ? groupsById[check.groupId] : null; 17 | if (!checkGroup) { 18 | return check; 19 | } 20 | const checkTags = new Set([...check.tags, ...checkGroup.tags]); 21 | return { 22 | ...check, 23 | tags: Array.from(checkTags), 24 | locations: checkGroup.locations, 25 | group: checkGroup, 26 | }; 27 | }); 28 | }; 29 | 30 | const filterTargetCheck = ( 31 | arg: string | undefined, 32 | checks: Check[], 33 | groupsById: Record, 34 | ): Check[] => { 35 | const shouldTargetAllChecks = !arg; 36 | if (shouldTargetAllChecks) { 37 | return checks; 38 | } 39 | const groupId = parseAsGroupId(arg); 40 | const shouldTargetCheckByGroup = groupId && groupsById[groupId]; 41 | if (shouldTargetCheckByGroup) { 42 | return checks.filter((check) => check.groupId === groupId); 43 | } 44 | 45 | const tag = arg; 46 | return checks.filter((check) => check.tags.includes(tag)); 47 | }; 48 | 49 | const enrichChecks = async ( 50 | checks: Check[], 51 | groupsById: Record, 52 | ): Promise => { 53 | const checksWithDependencies = await Promise.all( 54 | checks.map((check) => 55 | checkly.getCheck(check.id, { includeDependencies: true }), 56 | ), 57 | ); 58 | 59 | // Apply group settings again 60 | return applyGroupSettings(checksWithDependencies, groupsById); 61 | }; 62 | 63 | export const findTargetChecks = async (arg?: string): Promise => { 64 | const checksWithoutGroupSettings = await checkly.getChecks(); 65 | // TODO adapt this to work with more check types 66 | const filteredChecksWithoutGroupSettings = checksWithoutGroupSettings.filter( 67 | (check) => ["BROWSER", "API", "MULTI_STEP"].includes(check.checkType), 68 | ); 69 | const allGroups = await checkly.getCheckGroups(); 70 | const groupsById: Record = keyBy(allGroups, "id"); 71 | const checks = applyGroupSettings( 72 | filteredChecksWithoutGroupSettings, 73 | groupsById, 74 | ); 75 | 76 | const filteredChecks = filterTargetCheck(arg, checks, groupsById); 77 | 78 | return enrichChecks(filteredChecks, groupsById); 79 | }; 80 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "declaration": true, 5 | "removeComments": true, 6 | "emitDecoratorMetadata": true, 7 | "experimentalDecorators": true, 8 | "allowSyntheticDefaultImports": true, 9 | "target": "ES2021", 10 | "sourceMap": true, 11 | "outDir": "./dist", 12 | "incremental": true, 13 | "skipLibCheck": true, 14 | "strictNullChecks": true, 15 | "noImplicitAny": false, 16 | "strictBindCallApply": false, 17 | "forceConsistentCasingInFileNames": false, 18 | "noFallthroughCasesInSwitch": false, 19 | "lib": ["dom", "dom.iterable", "esnext"], 20 | "allowJs": true, 21 | "strict": false, 22 | //"noEmit": true, 23 | "esModuleInterop": true, 24 | "moduleResolution": "node", 25 | "resolveJsonModule": true, 26 | "isolatedModules": true, 27 | "jsx": "preserve", 28 | "plugins": [ 29 | { 30 | "name": "next" 31 | } 32 | ] 33 | }, 34 | "include": ["src/**/*.ts", "tests/**/*.ts", ".next/types/**/*.ts"], 35 | "exclude": ["node_modules"] 36 | } 37 | --------------------------------------------------------------------------------