├── .eslintignore
├── .cursorindexingignore
├── .lsmcp
└── config.json
├── tsconfig.build.json
├── .cursor
├── mcp.json
└── rules
│ ├── cursorignore
│ ├── ripersigma-mcp.mdc
│ ├── mcp-playwright.mdc
│ ├── background-agents.mdc
│ ├── mcp-lsmcp.mdc
│ ├── codeprotection.mdc
│ └── ripersigma105.mdc
├── .dockerignore
├── src
├── logger.ts
├── united-domains.service.ts
├── types.ts
├── dns-auth-hook.ts
├── update-dns.ts
├── utils.ts
├── cert-maintenance.ts
└── npm.service.ts
├── cleanup-hook.sh
├── tsconfig.eslint.json
├── auth-hook.sh
├── .vscode
└── settings.json
├── tsconfig.json
├── LICENSE
├── package.json
├── Dockerfile
├── .github
└── workflows
│ └── docker-publish.yml
├── .gitignore
├── README.md
└── .eslintrc.js
/.eslintignore:
--------------------------------------------------------------------------------
1 | coverage
2 | **/templates
3 | **/generators
4 |
--------------------------------------------------------------------------------
/.cursorindexingignore:
--------------------------------------------------------------------------------
1 |
2 | # Don't index SpecStory auto-save files, but allow explicit context inclusion via @ references
3 | .specstory/**
4 |
--------------------------------------------------------------------------------
/.lsmcp/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "../node_modules/@mizchi/lsmcp/lsmcp.schema.json",
3 | "preset": "typescript",
4 | "settings": {
5 | "autoIndex": true,
6 | "indexConcurrency": 5
7 | }
8 | }
--------------------------------------------------------------------------------
/tsconfig.build.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "exclude": [
4 | "node_modules",
5 | "test",
6 | "dist",
7 | "**/*spec.ts",
8 | "test-tools",
9 | "jest.config.ts"
10 | ]
11 | }
12 |
--------------------------------------------------------------------------------
/.cursor/mcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "mcpServers": {
3 | "playwright": {
4 | "command": "npx",
5 | "args": [
6 | "@playwright/mcp@latest"
7 | ]
8 | },
9 | "lsmcp": {
10 | "command": "npx",
11 | "args": ["-y", "@mizchi/lsmcp", "-p", "typescript"],
12 | "workingDirectory": ".",
13 | "env": { "NODE_OPTIONS": "--max-old-space-size=4096" }
14 | }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Dependencies
2 | node_modules
3 | npm-debug.log
4 |
5 | # Build output
6 | dist
7 |
8 | # Git
9 | .git
10 | .gitignore
11 |
12 | # IDE
13 | .vscode
14 | .idea
15 |
16 | # Environment
17 | .env
18 | .env.*
19 |
20 | # Certificates directory (will be mounted)
21 | kb-certs
22 |
23 | # Docker
24 | Dockerfile
25 | .dockerignore
26 |
27 | # Other
28 | *.log
29 | # Include README.md but ignore other md files
30 | !README.md
31 | *.md
32 |
--------------------------------------------------------------------------------
/src/logger.ts:
--------------------------------------------------------------------------------
1 | import { color } from './utils';
2 |
3 | export function logStep(msg: string) {
4 | console.log(color.bold(color.cyan(`→ ${ msg }`)));
5 | }
6 |
7 | export function logInfo(msg: string) {
8 | console.log(color.cyan(msg));
9 | }
10 |
11 | export function logWarn(msg: string) {
12 | console.log(color.yellow(msg));
13 | }
14 |
15 | export function logOk(msg: string) {
16 | console.log(color.green(msg));
17 | }
18 |
19 | export function logErr(msg: string) {
20 | console.error(color.red(msg));
21 | }
22 |
--------------------------------------------------------------------------------
/cleanup-hook.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # This script is called by certbot after the challenge is complete
3 | # Environment variables provided by certbot:
4 | # - CERTBOT_DOMAIN: The domain being authenticated
5 | # - CERTBOT_VALIDATION: The validation string
6 | # - CERTBOT_TOKEN: Resource name part of the HTTP-01 challenge
7 |
8 | # Log to stderr for certbot to capture
9 | echo "Cleanup hook called for $CERTBOT_DOMAIN" >&2
10 |
11 | # Remove the challenge file
12 | rm -f /tmp/certbot-dns-challenge
13 |
14 | # Exit successfully
15 | exit 0
16 |
17 |
18 |
--------------------------------------------------------------------------------
/tsconfig.eslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "declaration": true,
5 | "declarationMap": true,
6 | "removeComments": true,
7 | "emitDecoratorMetadata": true,
8 | "experimentalDecorators": true,
9 | "allowSyntheticDefaultImports": true,
10 | "target": "es2017",
11 | "sourceMap": true,
12 | "esModuleInterop": true,
13 | "baseUrl": "./",
14 | "paths": {
15 | },
16 | "incremental": true,
17 | "skipLibCheck": true
18 | },
19 | "exclude": [ "node_modules", "dist", "lib", "generators" ]
20 | }
21 |
--------------------------------------------------------------------------------
/auth-hook.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -euo pipefail
3 |
4 | # Provided by certbot:
5 | # $CERTBOT_DOMAIN e.g. example.com or home.example.com
6 | # $CERTBOT_VALIDATION the TXT value to set
7 |
8 | echo "Auth hook called for $CERTBOT_DOMAIN" >&2
9 | echo "Validation string: $CERTBOT_VALIDATION" >&2
10 |
11 | SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
12 |
13 | # Prefer compiled JS if present; otherwise use ts-node
14 | if [[ -f "$SCRIPT_DIR/dist/dns-auth-hook.js" ]]; then
15 | node "$SCRIPT_DIR/dist/dns-auth-hook.js" "$CERTBOT_DOMAIN" "$CERTBOT_VALIDATION"
16 | else
17 | npx ts-node "$SCRIPT_DIR/src/dns-auth-hook.ts" "$CERTBOT_DOMAIN" "$CERTBOT_VALIDATION"
18 | fi
19 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "editor.rulers": [
3 | 80,
4 | 120
5 | ],
6 | "workbench.colorCustomizations": {
7 | "activityBar.activeBackground": "#3e138a",
8 | "activityBar.background": "#3e138a",
9 | "activityBar.foreground": "#e7e7e7",
10 | "activityBar.inactiveForeground": "#e7e7e799",
11 | "activityBarBadge.background": "#a84c17",
12 | "activityBarBadge.foreground": "#e7e7e7",
13 | "commandCenter.border": "#e7e7e799",
14 | "sash.hoverBorder": "#3e138a",
15 | "statusBar.background": "#2a0d5d",
16 | "statusBar.foreground": "#e7e7e7",
17 | "statusBarItem.hoverBackground": "#3e138a",
18 | "statusBarItem.remoteBackground": "#2a0d5d",
19 | "statusBarItem.remoteForeground": "#e7e7e7",
20 | "titleBar.activeBackground": "#2a0d5d",
21 | "titleBar.activeForeground": "#e7e7e7",
22 | "titleBar.inactiveBackground": "#2a0d5d99",
23 | "titleBar.inactiveForeground": "#e7e7e799"
24 | }
25 | }
--------------------------------------------------------------------------------
/src/united-domains.service.ts:
--------------------------------------------------------------------------------
1 | import { logErr, logOk, logStep } from './logger';
2 | import { updateDNSChallenge } from './update-dns';
3 |
4 | export async function updateUnitedDomainsDNS(
5 | domain: string,
6 | challenge: string
7 | ): Promise {
8 | const username = process.env.UD_USERNAME;
9 | const password = process.env.UD_PASSWORD;
10 |
11 | if (!username || !password) {
12 | throw new Error('UD_USERNAME and UD_PASSWORD environment variables must be set');
13 | }
14 |
15 | try {
16 | logStep('Updating DNS record using Playwright...');
17 | const entry = `_acme-challenge.${ domain }`;
18 | await updateDNSChallenge({
19 | username,
20 | password,
21 | entry,
22 | challengeString: challenge,
23 | domain
24 | });
25 |
26 | logOk('DNS record updated successfully');
27 | return true;
28 | } catch (error) {
29 | logErr(`Failed to update DNS: ${ error.message }`);
30 | if (error.stack) {
31 | logErr(`Stack trace: ${ error.stack }`);
32 | }
33 | return false;
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "declaration": true,
5 | "removeComments": true,
6 | "emitDecoratorMetadata": true,
7 | "experimentalDecorators": true,
8 | "allowSyntheticDefaultImports": true,
9 | "target": "es2017",
10 | "sourceMap": true,
11 | "outDir": "./dist",
12 | "baseUrl": "./",
13 | "incremental": true,
14 | "skipLibCheck": true,
15 | "strictNullChecks": false,
16 | "noImplicitAny": false,
17 | "strictBindCallApply": false,
18 | "forceConsistentCasingInFileNames": false,
19 | "noFallthroughCasesInSwitch": false,
20 | "types": [
21 | "node",
22 | "jest"
23 | ],
24 | "paths": {
25 | "@gs-utils": [ "./src/utils/index" ],
26 | "@gs-logger": [ "src/logger/index" ],
27 | "@gs-commands": [ "src/commands/index" ],
28 | "@gs-abstracts": [ "src/abstracts/index" ]
29 | }
30 | },
31 | "exclude": [
32 | "test-tools",
33 | "node_modules",
34 | "dist",
35 | "jest.config.ts",
36 | "**/*.spec.ts"
37 | ]
38 | }
39 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 thatkookooguy
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.cursor/rules/cursorignore:
--------------------------------------------------------------------------------
1 | # CursorRIPER Framework ignore patterns
2 | # This file helps control which files are processed by Cursor's AI features
3 |
4 | # Temporary files
5 | *.tmp
6 | *.temp
7 | *.swp
8 | *~
9 |
10 | # Build artifacts
11 | build/
12 | dist/
13 | out/
14 | .next/
15 | .nuxt/
16 | .output/
17 | .cache/
18 | .parcel-cache/
19 | .webpack/
20 | .rollup.cache/
21 |
22 | # Dependency directories
23 | node_modules/
24 | bower_components/
25 | jspm_packages/
26 | vendor/
27 | .pnp/
28 | .pnp.js
29 |
30 | # Log files
31 | logs/
32 | *.log
33 | npm-debug.log*
34 | yarn-debug.log*
35 | yarn-error.log*
36 | lerna-debug.log*
37 |
38 | # Environment files (may contain secrets)
39 | .env
40 | .env.local
41 | .env.development.local
42 | .env.test.local
43 | .env.production.local
44 | .env*.local
45 |
46 | # Debug files
47 | .nyc_output/
48 | coverage/
49 | .coverage/
50 | .coverage.*
51 | htmlcov/
52 | .hypothesis/
53 | .pytest_cache/
54 | nosetests.xml
55 | coverage.xml
56 |
57 | # IDE & editor directories
58 | .idea/
59 | .vscode/
60 | .vs/
61 | *.sublime-project
62 | *.sublime-workspace
63 | .project
64 | .classpath
65 | .c9/
66 | *.launch
67 | .settings/
68 | .vim/
69 | .DS_Store
70 |
71 | # Framework specific (uncomment as needed)
72 | # .cursor/rules/archive/
73 | # memory-bank/backups/
74 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cert-management",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "start": "node dist/cert-maintenance.js",
8 | "start:dev": "ts-node src/cert-maintenance.ts",
9 | "local-prepare": "npm run lsmcp:init && npm run lsmcp:index && npm run playwright:install",
10 | "build": "tsc",
11 | "playwright:install": "npx playwright install --with-deps chromium",
12 | "lsmcp:init": "npx @mizchi/lsmcp init -p typescript",
13 | "lsmcp:index": "npx @mizchi/lsmcp index -p typescript",
14 | "test": "echo \"Error: no test specified\" && exit 1",
15 | "lint": "eslint \"{src,apps,libs,test}/**/*.ts\"",
16 | "lint:fix": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix"
17 | },
18 | "author": "",
19 | "license": "ISC",
20 | "devDependencies": {
21 | "@typescript-eslint/eslint-plugin": "^6.19.1",
22 | "@typescript-eslint/parser": "^6.19.1",
23 | "eslint": "^8.56.0",
24 | "eslint-plugin-import": "^2.29.1",
25 | "eslint-plugin-simple-import-sort": "^10.0.0",
26 | "eslint-plugin-unused-imports": "^3.0.0",
27 | "jest": "^30.1.1",
28 | "ts-node": "^10.9.2",
29 | "typescript": "^5.9.2"
30 | },
31 | "dependencies": {
32 | "axios": "^1.11.0",
33 | "form-data": "^4.0.4",
34 | "playwright": "^1.55.0"
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use the official Playwright image as base
2 | FROM mcr.microsoft.com/playwright:v1.55.0-jammy
3 |
4 | # Set working directory
5 | WORKDIR /work
6 |
7 | # Set timezone environment variable
8 | ENV TZ=Asia/Jerusalem
9 | ENV DEBIAN_FRONTEND=noninteractive
10 |
11 | # Install required packages and clean up
12 | RUN apt-get update && \
13 | ln -fs /usr/share/zoneinfo/$TZ /etc/localtime && \
14 | apt-get install -y --no-install-recommends \
15 | tzdata \
16 | certbot \
17 | dnsutils && \
18 | dpkg-reconfigure -f noninteractive tzdata && \
19 | rm -rf /var/lib/apt/lists/*
20 |
21 | # Copy package files
22 | COPY package*.json ./
23 |
24 | # Install dependencies
25 | RUN npm ci || npm install
26 |
27 | # Copy source code
28 | COPY . .
29 |
30 | # Build the application
31 | RUN npm run build
32 |
33 | # Define non-sensitive environment variables with defaults
34 | ENV NPM_BASE_URL=""
35 | ENV WILDCARDS=""
36 |
37 | # Required environment variables that should be passed at runtime:
38 | # - NPM_IDENTITY (Nginx Proxy Manager email)
39 | # - NPM_SECRET (Nginx Proxy Manager password)
40 | # - UD_USERNAME (United Domains username)
41 | # - UD_PASSWORD (United Domains password)
42 |
43 | # Create directory for certificates
44 | RUN mkdir -p /root/kb-certs
45 |
46 | # Set the command to run the application
47 | CMD ["npm", "start"]
48 |
--------------------------------------------------------------------------------
/.github/workflows/docker-publish.yml:
--------------------------------------------------------------------------------
1 | name: Docker Image CI/CD
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | # Publish semver tags as releases.
7 | tags: [ 'v*.*.*' ]
8 | pull_request:
9 | branches: [ "main" ]
10 |
11 | env:
12 | # Use kibibitopensrc organization
13 | REGISTRY: docker.io
14 | IMAGE_NAME: kibibitopensrc/cert-management
15 |
16 | jobs:
17 | build:
18 | runs-on: ubuntu-latest
19 | permissions:
20 | contents: read
21 | packages: write
22 |
23 | steps:
24 | - name: Checkout repository
25 | uses: actions/checkout@v4
26 |
27 | # Set up BuildKit Docker container builder to enable more efficient builds
28 | - name: Set up Docker Buildx
29 | uses: docker/setup-buildx-action@v3
30 |
31 | # Login against Docker Hub registry
32 | - name: Log into registry ${{ env.REGISTRY }}
33 | if: github.event_name != 'pull_request'
34 | uses: docker/login-action@v3
35 | with:
36 | registry: ${{ env.REGISTRY }}
37 | username: ${{ secrets.DOCKER_USERNAME }}
38 | password: ${{ secrets.DOCKER_PASSWORD }}
39 |
40 | # Extract metadata (tags, labels) for Docker
41 | - name: Extract Docker metadata
42 | id: meta
43 | uses: docker/metadata-action@v5
44 | with:
45 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
46 | tags: |
47 | type=raw,value=latest,enable={{is_default_branch}}
48 | type=ref,event=branch
49 | type=ref,event=pr
50 | type=semver,pattern={{version}}
51 | type=semver,pattern={{major}}.{{minor}}
52 | type=semver,pattern={{major}}
53 | type=sha
54 |
55 | # Build and push Docker image with Buildx
56 | - name: Build and push Docker image
57 | id: build-and-push
58 | uses: docker/build-push-action@v5
59 | with:
60 | context: .
61 | push: ${{ github.event_name != 'pull_request' }}
62 | tags: ${{ steps.meta.outputs.tags }}
63 | labels: ${{ steps.meta.outputs.labels }}
64 | cache-from: type=gha
65 | cache-to: type=gha,mode=max
66 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | // NPM API Types
2 |
3 | // Authentication
4 | export interface INpmLoginRequest {
5 | identity: string;
6 | secret: string;
7 | }
8 |
9 | export interface INpmLoginResponse {
10 | token: string;
11 | }
12 |
13 | // Certificates
14 | export interface INpmCertificate {
15 | id: string;
16 | created_on: string;
17 | modified_on: string;
18 | owner_user_id: number;
19 | is_deleted: boolean;
20 | provider: string;
21 | nice_name: string;
22 | domain_names: string[];
23 | expires_on: string;
24 | meta: {
25 | certificate: string;
26 | certificate_key: string;
27 | };
28 | }
29 |
30 | export interface INpmCreateCertificateRequest {
31 | provider: string;
32 | nice_name: string;
33 | domain_names: string[];
34 | meta: {
35 | certificate: string;
36 | certificate_key: string;
37 | };
38 | }
39 |
40 | // Hosts
41 | export interface INpmHostBase {
42 | id: string;
43 | created_on: string;
44 | modified_on: string;
45 | owner_user_id: number;
46 | owner?: string;
47 | is_deleted: boolean;
48 | deleted_at?: string;
49 | status?: string;
50 | domain_names: string | string[];
51 | certificate_id: string | null;
52 | ssl_forced: boolean;
53 | http2_support: boolean;
54 | hsts_enabled: boolean;
55 | hsts_subdomains: boolean;
56 | enabled: boolean;
57 | }
58 |
59 | export type INpmAllHosts = INpmProxyHost | INpmRedirectionHost;
60 |
61 | export interface INpmProxyHost extends INpmHostBase {
62 | forward_scheme: string;
63 | forward_host: string;
64 | forward_port: number;
65 | access_list_id: string | null;
66 | advanced_config: string;
67 | block_exploits: boolean;
68 | caching_enabled: boolean;
69 | allow_websocket_upgrade: boolean;
70 | // Can be detailed further if needed
71 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
72 | locations: any[];
73 | meta: {
74 | letsencrypt_agree: boolean;
75 | dns_challenge: boolean;
76 | };
77 | }
78 |
79 | export interface INpmRedirectionHost extends INpmHostBase {
80 | forward_domain_name: string;
81 | forward_scheme: string;
82 | forward_http_code: number;
83 | preserve_path: boolean;
84 | block_exploits: boolean;
85 | advanced_config: string;
86 | meta: {
87 | letsencrypt_agree: boolean;
88 | dns_challenge: boolean;
89 | };
90 | }
91 |
92 |
93 | // Host Groups Type
94 | export type INpmHostGroup = 'proxy-hosts' | 'redirection-hosts';
95 |
--------------------------------------------------------------------------------
/.cursor/rules/ripersigma-mcp.mdc:
--------------------------------------------------------------------------------
1 | ---
2 | description: MCP Services Configuration for CursorRIPER.sigma
3 | globs:
4 | alwaysApply: true
5 | ---
6 |
7 | # CursorRIPER♦Σ MCP Services Configuration
8 |
9 | ## 🛠️ MCP Services Selection
10 | # Uncomment the services you want to use and ensure the corresponding MCP server is installed
11 | # All services will follow the CursorRIPER♦Σ permission model and symbolic notation
12 |
13 | ## 🎯 Active MCPs (enable in Cursor as needed)
14 | - Playwright: `.cursor/rules/mcp-playwright.mdc` (browser automation/testing)
15 | - lsmcp: `.cursor/rules/mcp-lsmcp.mdc` (TypeScript LSP workflow and discipline)
16 | - Atlassian (read-first): Confluence/Jira investigation; escalate to full only when writes are explicitly needed
17 | - GCP: on‑demand environments and logs
18 |
19 | ## 🧭 Usage Guidance (proxy governs tool/read/write exposure)
20 | - lsmcp workflow: verify reality → check impact → minimal coherent edit → diagnostics clean → finish. No undefined symbols/imports; fix diagnostics before final edits.
21 | - Playwright: author and validate E2E; keep suites stable (avoid committing debug-only steps).
22 | - Atlassian: prefer the read‑only proxy entry by default; use full only per task requirement.
23 | - GCP: keep queries scoped to target projects/environments
24 | - GitHub: use `gh` CLI for repo/PR/issues; `GITHUB_TOKEN` is present in the environment.
25 |
26 | ## 🔗 Available MCP Services
27 |
28 | # This file serves as a central configuration point for all MCP services in CursorRIPER♦Σ.
29 | # To enable a service, uncomment its @file line above.
30 | # To disable a service, comment out its @file line above.
31 | # Each service follows the CursorRIPER♦Σ permission model and respects RIPER modes.
32 |
33 | ## ⚙️ Setup Instructions
34 |
35 | # 1. Ensure you have the desired MCP servers installed
36 | # 2. Configure .cursor/mcp.json with appropriate server settings
37 | # 3. Enable only Playwright, lsmcp, Atlassian‑read, and GCP by default in Cursor
38 | # 4. Restart Cursor to apply changes
39 |
40 | ## 📋 MCP Server Requirements
41 |
42 | # Filesystem: npm install -g @modelcontextprotocol/server-filesystem
43 | # Web Search: npm install -g @modelcontextprotocol/server-websearch
44 | # GitHub: npm install -g @modelcontextprotocol/server-github
45 | # Database: npm install -g @modelcontextprotocol/server-database
46 | # AI Model: npm install -g @modelcontextprotocol/server-aimodel
47 | # Data Viz: npm install -g @modelcontextprotocol/server-dataviz
48 | # API Tools: npm install -g @modelcontextprotocol/server-apitools
49 |
50 | # See full MCP documentation at: https://modelcontextprotocol.github.io/
51 |
--------------------------------------------------------------------------------
/src/dns-auth-hook.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | import { spawn } from 'child_process';
3 | import { updateDNSChallenge } from './update-dns';
4 |
5 | // authoritative nameservers for United Domains (no caching):
6 | const UD_NS = ['ns.udag.de'];
7 |
8 | // The zone you manage in UD’s UI:
9 | const ZONE_ROOT = process.env.DOMAIN || '';
10 |
11 | function sleep(ms: number) { return new Promise(r => setTimeout(r, ms)); }
12 |
13 | async function digTxtFqdnOnce(fqdn: string, ns: string): Promise {
14 | return new Promise((resolve, reject) => {
15 | const p = spawn('dig', ['+short', fqdn, 'TXT', '@' + ns]);
16 | let out = ''; let err = '';
17 | p.stdout.on('data', d => out += d.toString());
18 | p.stderr.on('data', d => err += d.toString());
19 | p.on('close', code => {
20 | if (code !== 0) return reject(new Error(err || `dig exited ${code}`));
21 | const lines = out.trim().split('\n').filter(Boolean).map(s => s.replace(/^"|"$/g, ''));
22 | resolve(lines);
23 | });
24 | });
25 | }
26 |
27 | async function waitForAuthoritativeTXT(fqdn: string, expected: string) {
28 | const maxAttempts = 30; // ~15 minutes total if 30*30s
29 | const delayMs = 30_000;
30 |
31 | for (let attempt = 1; attempt <= maxAttempts; attempt++) {
32 | for (const ns of UD_NS) {
33 | try {
34 | const vals = await digTxtFqdnOnce(fqdn, ns);
35 | if (vals.includes(expected)) return;
36 | } catch { /* ignore and try next ns */ }
37 | }
38 | if (attempt < maxAttempts) await sleep(delayMs);
39 | }
40 | throw new Error(`TXT not propagated for ${fqdn} to all UD NSs`);
41 | }
42 |
43 | async function main() {
44 | const [, , certbotDomain, token] = process.argv;
45 | if (!process.env.UD_USERNAME || !process.env.UD_PASSWORD) {
46 | throw new Error('UD_USERNAME and UD_PASSWORD must be set');
47 | }
48 |
49 | // Build the *host* entry used in UD’s zone editor for ZONE_ROOT
50 | // Examples:
51 | // CERTBOT_DOMAIN = example.com -> entry = _acme-challenge
52 | // CERTBOT_DOMAIN = home.example.com -> entry = _acme-challenge.home
53 | // CERTBOT_DOMAIN = apps.example.com -> entry = _acme-challenge.apps
54 | const entryHost = `_acme-challenge.${certbotDomain}`.replace(`.${ZONE_ROOT}`, '');
55 |
56 | // Update via Playwright automation (your existing function)
57 | await updateDNSChallenge({
58 | username: process.env.UD_USERNAME!,
59 | password: process.env.UD_PASSWORD!,
60 | entry: entryHost,
61 | challengeString: token,
62 | domain: ZONE_ROOT
63 | });
64 |
65 | await sleep(5000);
66 |
67 | // Now wait for authoritative propagation
68 | const fqdn = `_acme-challenge.${certbotDomain}.`;
69 | // await waitForAuthoritativeTXT(fqdn, token);
70 | process.exit(0);
71 | }
72 |
73 | main().catch((e) => {
74 | console.error(e?.stack || String(e));
75 | process.exit(1);
76 | });
77 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # Snowpack dependency directory (https://snowpack.dev/)
45 | web_modules/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Optional stylelint cache
57 | .stylelintcache
58 |
59 | # Optional REPL history
60 | .node_repl_history
61 |
62 | # Output of 'npm pack'
63 | *.tgz
64 |
65 | # Yarn Integrity file
66 | .yarn-integrity
67 |
68 | # dotenv environment variable files
69 | .env
70 | .env.*
71 | !.env.example
72 |
73 | # parcel-bundler cache (https://parceljs.org/)
74 | .cache
75 | .parcel-cache
76 |
77 | # Next.js build output
78 | .next
79 | out
80 |
81 | # Nuxt.js build / generate output
82 | .nuxt
83 | dist
84 | .output
85 |
86 | # Gatsby files
87 | .cache/
88 | # Comment in the public line in if your project uses Gatsby and not Next.js
89 | # https://nextjs.org/blog/next-9-1#public-directory-support
90 | # public
91 |
92 | # vuepress build output
93 | .vuepress/dist
94 |
95 | # vuepress v2.x temp and cache directory
96 | .temp
97 | .cache
98 |
99 | # Sveltekit cache directory
100 | .svelte-kit/
101 |
102 | # vitepress build output
103 | **/.vitepress/dist
104 |
105 | # vitepress cache directory
106 | **/.vitepress/cache
107 |
108 | # Docusaurus cache and generated files
109 | .docusaurus
110 |
111 | # Serverless directories
112 | .serverless/
113 |
114 | # FuseBox cache
115 | .fusebox/
116 |
117 | # DynamoDB Local files
118 | .dynamodb/
119 |
120 | # Firebase cache directory
121 | .firebase/
122 |
123 | # TernJS port file
124 | .tern-port
125 |
126 | # Stores VSCode versions used for testing VSCode extensions
127 | .vscode-test
128 |
129 | # yarn v3
130 | .pnp.*
131 | .yarn/*
132 | !.yarn/patches
133 | !.yarn/plugins
134 | !.yarn/releases
135 | !.yarn/sdks
136 | !.yarn/versions
137 |
138 | # Vite logs files
139 | vite.config.js.timestamp-*
140 | vite.config.ts.timestamp-*
141 |
142 | .specstory
143 | certificates
144 | # lsmcp cache
145 | .lsmcp/cache
146 | kb-certs
147 | run-command.sh
--------------------------------------------------------------------------------
/src/update-dns.ts:
--------------------------------------------------------------------------------
1 | import { chromium } from 'playwright';
2 |
3 | interface IUpdateDNSOptions {
4 | username: string;
5 | password: string;
6 | entry: string;
7 | challengeString: string;
8 | domain: string;
9 | debug?: boolean;
10 | }
11 |
12 | async function updateDNSChallenge(options: IUpdateDNSOptions) {
13 | let { username, password, entry, challengeString, domain } = options;
14 | entry = entry.replace(`.${ domain }`, '');
15 |
16 | // Launch browser
17 | const browser = await chromium.launch({
18 | headless: !options.debug
19 | });
20 |
21 | try {
22 | const context = await browser.newContext();
23 | const page = await context.newPage();
24 |
25 | // Navigate to login page
26 | await page.goto('https://www.uniteddomains.com/login');
27 |
28 | // Fill in login form
29 | await page.getByRole('textbox', { name: 'Email Address' }).fill(username);
30 | await page.getByRole('textbox', { name: 'Password' }).fill(password);
31 |
32 | // Submit login form
33 | await page.getByRole('button', { name: 'Log In' }).click();
34 |
35 | // Wait for navigation to portfolio page
36 | await page.waitForURL('**/portfolio');
37 |
38 | // Find and click the DNS link for the domain
39 | const dnsLink = page.getByRole('link', { name: 'DNS' });
40 | await dnsLink.click();
41 |
42 | // Wait for DNS page to load
43 | await page.waitForURL(`**/portfolio/dns/${ domain }`);
44 |
45 | // Wait for Custom Resource Records section
46 | await page.waitForSelector('text=Custom Resource Records', { timeout: 10000 });
47 |
48 | // Look for existing _acme-challenge record
49 | console.log(`Looking for existing record: ${ entry }`);
50 | const existingRecord = page.getByRole('row', { name: new RegExp(`^${ entry } TXT`) });
51 |
52 | if (await existingRecord.count() > 0) {
53 | // Update existing record
54 | await existingRecord.getByRole('button', { name: 'edit' }).click();
55 | await page.getByRole('textbox', { name: 'Text' }).fill(challengeString);
56 | await page.getByRole('cell', { name: 'Save' }).getByRole('button').click();
57 | } else {
58 | // Fill in new record form
59 | await page.getByRole('textbox', { name: '@' }).fill(entry);
60 | await page.getByRole('combobox').selectOption('TXT');
61 | await page.getByRole('textbox', { name: 'Text' }).fill(challengeString);
62 | await page.getByRole('button', { name: 'Add' }).click();
63 | }
64 |
65 | // Wait for success message
66 | await page.waitForSelector('text=DNS Records saved successfully', { timeout: 10000 });
67 |
68 | // Verify the record was updated correctly
69 | const records = await page.$$eval('tr', (rows) => {
70 | return rows.map((row) => {
71 | const cells = Array.from(row.querySelectorAll('td'));
72 | return cells.map((cell) => cell.textContent?.trim());
73 | }).filter((cells) => cells[1] === 'TXT');
74 | });
75 |
76 | if (!records.length) {
77 | throw new Error('DNS record verification failed - record not found or incorrect value');
78 | }
79 |
80 | console.log('DNS record updated and verified successfully');
81 | } catch (error) {
82 | console.error('Error updating DNS record:', error);
83 | throw error;
84 | } finally {
85 | await browser.close();
86 | }
87 | }
88 |
89 | // Example usage:
90 | // updateDNSChallenge({
91 | // username: 'your-username',
92 | // password: 'your-password',
93 | // domain: 'example.com',
94 | // challengeString: 'your-challenge-string'
95 | // });
96 |
97 | export { updateDNSChallenge, IUpdateDNSOptions as UpdateDNSOptions };
98 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'node:fs';
2 | import * as path from 'node:path';
3 | import { promisify } from 'util';
4 |
5 | export const sleep = promisify(setTimeout);
6 |
7 | // Simple color helpers without adding new deps
8 | export const color = {
9 | green: (text: string) => `\x1b[32m${ text }\x1b[0m`,
10 | red: (text: string) => `\x1b[31m${ text }\x1b[0m`,
11 | yellow: (text: string) => `\x1b[33m${ text }\x1b[0m`,
12 | cyan: (text: string) => `\x1b[36m${ text }\x1b[0m`,
13 | bold: (text: string) => `\x1b[1m${ text }\x1b[0m`
14 | };
15 |
16 | export function parseArgs(
17 | argv: string[]
18 | ): Record {
19 | const args = {};
20 | for (let i = 2; i < argv.length; i++) {
21 | const argument = argv[i];
22 | if (isCliFlag(argument)) {
23 | const { key, value } = getFlagKeyValue(argument);
24 | // eslint-disable-next-line no-undefined
25 | const isFlagBoolean = value === undefined;
26 | const isFlagArray = value.includes(',');
27 |
28 | if (isFlagBoolean) {
29 | args[key] = true;
30 | } else if (isFlagArray) {
31 | // Split comma-separated values into array, trim whitespace
32 | args[key] = value.split(',').map((item) => item.trim());
33 | } else {
34 | args[key] = value;
35 | }
36 | }
37 | }
38 | return args;
39 | }
40 |
41 | function isCliFlag(argument: string) {
42 | return argument.startsWith('--');
43 | }
44 |
45 | function getFlagKeyValue(argument: string) {
46 | const [ key, value ] = argument.replace(/^--/, '').split('=');
47 | return { key, value };
48 | }
49 |
50 | export function toDateString(dt: Date) {
51 | const y = dt.getFullYear();
52 | const m = `${ dt.getMonth() + 1 }`.padStart(2, '0');
53 | const d = `${ dt.getDate() }`.padStart(2, '0');
54 | return `${ y }-${ m }-${ d }`;
55 | }
56 |
57 | export function isExpired(expiresOn: string) {
58 | if (!expiresOn) return true;
59 | const t = new Date(expiresOn).getTime();
60 | if (Number.isNaN(t)) return true;
61 | return Date.now() > t;
62 | }
63 |
64 | export function willExpireSoon(expiresOn: string, daysThreshold = 7) {
65 | if (!expiresOn) return true;
66 | const t = new Date(expiresOn).getTime();
67 | if (Number.isNaN(t)) return true;
68 |
69 | const now = Date.now();
70 | const threshold = now + (daysThreshold * 24 * 60 * 60 * 1000); // Convert days to milliseconds
71 |
72 | return t <= threshold;
73 | }
74 |
75 | export function ensureDir(p: string) {
76 | if (!fs.existsSync(p)) fs.mkdirSync(p, { recursive: true });
77 | }
78 |
79 | export function findProjectRoot(startDir: string): string {
80 | let dir = startDir;
81 | for (let i = 0; i < 5; i++) {
82 | if (fs.existsSync(path.join(dir, 'package.json'))) return dir;
83 | const parent = path.dirname(dir);
84 | if (parent === dir) break;
85 | dir = parent;
86 | }
87 | // fallback: parent of src/ or dist/
88 | return path.resolve(startDir, '..');
89 | }
90 |
91 | export function hostMatchesWildcard(hostDomain: string, wildcard: string) {
92 | // Match exactly one label before the base domain
93 | // e.g. '*.example.com' matches 'arcade.example.com' (3 labels),
94 | // but NOT 'audiobookshelf.home.example.com' (4 labels)
95 | const base = wildcard.replace(/^\*\./, '');
96 | if (!hostDomain.endsWith(`.${ base }`)) return false;
97 | const hostLabels: number = hostDomain.split('.').length;
98 | const baseLabels: number = base.split('.').length;
99 |
100 | return hostLabels === (baseLabels + 1);
101 | }
102 |
103 | export function normalizeDomainsField(domains: string | string[]): string[] {
104 | if (!domains) return [];
105 | if (Array.isArray(domains)) return domains;
106 | if (typeof domains === 'string') {
107 | return domains
108 | .split(',')
109 | .map((s) => s.trim())
110 | .filter(Boolean);
111 | }
112 | return [];
113 | }
114 |
--------------------------------------------------------------------------------
/.cursor/rules/mcp-playwright.mdc:
--------------------------------------------------------------------------------
1 | # 🎭 Playwright MCP Integration for CursorRIPER Σ
2 | # Symbol: Υ (Upsilon)
3 | # Version: 1.1.0
4 |
5 | ## 📋 Service Definition
6 | ```
7 | service = "Playwright Automation Operations"
8 | symbol = "Υ"
9 | requires = "playwright (via MCP Proxy)"
10 | ```
11 |
12 | ## 🔧 Operation Mapping
13 | ```
14 | Υ_ops = {
15 | browser: {
16 | launch: "browser_navigate",
17 | close: "browser_close",
18 | screenshot: "browser_take_screenshot"
19 | },
20 | page: {
21 | navigate: "browser_navigate",
22 | click: "browser_click",
23 | type: "browser_type",
24 | select: "browser_select_option",
25 | hover: "browser_hover",
26 | evaluate: "browser_evaluate",
27 | upload: "browser_file_upload",
28 | wait_for: "browser_wait_for"
29 | },
30 | test: {
31 | record: "start_codegen_session",
32 | end_record: "end_codegen_session",
33 | assert: "playwright_expect_response + playwright_assert_response"
34 | },
35 | scrape: {
36 | content: "browser_snapshot",
37 | html: "browser_take_screenshot",
38 | network: "browser_network_requests"
39 | }
40 | }
41 | ```
42 |
43 | ## 🔒 Mode Restrictions
44 | ```
45 | MΥ = {
46 | Ω₁: [scrape_*, screenshot], # RESEARCH: data gathering
47 | Ω₂: [navigate, screenshot, scrape_*], # INNOVATE: exploration
48 | Ω₃: [all_ops], # PLAN: all operations
49 | Ω₄: [test_*, navigate, click, type, assert], # EXECUTE: testing focus
50 | Ω₅: [screenshot, scrape_*, network] # REVIEW: verification
51 | }
52 | ```
53 |
54 | ## 🔑 Permission Matrix
55 | ```
56 | ℙΥ = {
57 | create: [Ω₃, Ω₄], # PLAN/EXECUTE can create tests
58 | read: [Ω₁, Ω₂, Ω₃, Ω₄, Ω₅], # All can read page content
59 | update: [Ω₃, Ω₄], # PLAN/EXECUTE can interact
60 | delete: [] # No delete operations
61 | }
62 | ```
63 |
64 | ## 📍 Context Integration
65 | ```
66 | Γ_browser = {
67 | active_session: browser_instance,
68 | current_url: page.url(),
69 | test_recordings: codegen_sessions[],
70 | console_logs: captured_logs[],
71 | screenshots: saved_screenshots[]
72 | }
73 | ```
74 |
75 | ## ⚡ Command Shortcuts
76 | ```
77 | SΥ = {
78 | !pn: "navigate to URL",
79 | !ps: "take screenshot",
80 | !pc: "click element",
81 | !pf: "type text",
82 | !pt: "start test recording",
83 | !pe: "end test recording",
84 | !pg: "get page content"
85 | }
86 | ```
87 |
88 | ## 🛡️ Protection Levels
89 | ```
90 | ΨΥ = {
91 | navigate: Ψ₂, # GUARDED - URL changes
92 | click: Ψ₃, # INFO - user actions
93 | type: Ψ₂, # GUARDED - form data
94 | test_record: Ψ₅, # TEST - test recording
95 | evaluate: Ψ₆ # CRITICAL - code execution
96 | }
97 | ```
98 |
99 | ## 🔄 Mode-Specific Behaviors
100 | ```
101 | apply_browser_op(op, mode) = {
102 | check: op ∈ MΥ[mode] ? proceed : deny("Operation not allowed in " + mode),
103 | protect: op ∈ ΨΥ ? apply_protection(ΨΥ[op]) : continue,
104 | track: {
105 | log_action(op, selector, value),
106 | capture_state(screenshot_if_needed)
107 | },
108 | execute: Υ_ops[category][operation]()
109 | }
110 | ```
111 |
112 | ## 🎯 Testing Integration
113 | ```
114 | test_workflow = {
115 | start: {
116 | mode: require(Ω₃ ∨ Ω₄),
117 | init: start_codegen_session({
118 | outputPath: "./tests/",
119 | includeComments: true
120 | })
121 | },
122 | record: {
123 | actions: [navigate, click, type, assert],
124 | capture: automatic_via_codegen
125 | },
126 | end: {
127 | generate: end_codegen_session(sessionId),
128 | output: test_file_with_playwright_code
129 | }
130 | }
131 | ```
132 |
133 | ## 📸 Screenshot Management
134 | ```
135 | screenshot_policy = {
136 | Ω₁: "on_demand",
137 | Ω₂: "key_pages",
138 | Ω₃: "all_interactions",
139 | Ω₄: "test_failures",
140 | Ω₅: "final_state"
141 | }
142 | ```
143 |
144 | ## 🔌 Feature Detection
145 | ```
146 | detect_browser_automation() = {
147 | playwright_proxy_tools: tools.find("browser_navigate")
148 | ∧ tools.find("browser_click"),
149 | fallback: warn("Playwright MCP via proxy not available")
150 | }
151 | ```
152 |
153 | ## 🔗 Integration Notes
154 | ```
155 | Preferred workflow: author tests with Playwright, validate locally/CI, and use your standard Git workflow (e.g., gh CLI) for commits/PRs.
156 | ```
157 |
158 |
--------------------------------------------------------------------------------
/.cursor/rules/background-agents.mdc:
--------------------------------------------------------------------------------
1 | ---
2 | description: This rule should apply ONLY when running as a background agent.
3 | alwaysApply: false
4 | ---
5 |
6 | # CursorRIPER♦βG 1.0.0 — Background Agents Autonomy
7 |
8 | ## 🎯 Scope & Activation
9 |
10 | activate_if(agent.role == "background" ∧ agent.autonomous == true)
11 |
12 | When active, the background agent must iterate the full RIPER loop without waiting for human interaction, while enforcing commit hygiene and pre-commit quality gates.
13 |
14 | ## 🛣️ Autonomous RIPER Loop
15 |
16 | Ω_bg = [Ω₁ → Ω₂ → Ω₃ → Ω₄ → Ω₅]∞ // Repeat until task set is complete
17 |
18 | Ω₁ = 🔍RESEARCH ⟶ gather_context ∧ map_surfaces ∧ record_findings
19 | Ω₂ = 💡INNOVATE ⟶ propose_options ∧ evaluate_tradeoffs ∧ select_approach
20 | Ω₃ = 📝PLAN ⟶ create_checklist ∧ define_slices ∧ acceptance_criteria
21 | Ω₄ = ⚙️EXECUTE ⟶ implement_slice ∧ verify_slice ∧ stage_changes
22 | Ω₅ = 🔎REVIEW ⟶ validate_against_plan ∧ summarize ∧ decide_next
23 |
24 | transition_rules = {
25 | Ω₁→Ω₂: found_viable_problem_space,
26 | Ω₂→Ω₃: approach_selected,
27 | Ω₃→Ω₄: checklist_ready,
28 | Ω₄→Ω₅: slice_complete ∧ gates_passed,
29 | Ω₅→Ω₁: new_findings_required,
30 | Ω₅→Ω₄: more_slices_remaining,
31 | Ω₅→END: all_acceptance_criteria_met
32 | }
33 |
34 | ## 🔐 Pre-Commit Quality Gates (hard requirement)
35 |
36 | G_commit = {
37 | 1: run_build() == 0, // npm run build
38 | 2: run_tests() == 0, // npm test
39 | 3: lsmcp_workspace_clean() == true, // Λs: lsp_get_all_diagnostics → 0 blocking
40 | 4: optional_lint() ∈ {0, skipped}
41 | }
42 |
43 | allow_commit = all(G_commit) // All gates must pass
44 |
45 | run_build() = exec("npm run build")
46 | run_tests() = exec("npm test")
47 | optional_lint() = has_script("lint") ? exec("npm run lint") : "skipped"
48 | lsmcp_workspace_clean() = Λs.lsp_get_all_diagnostics().errors == 0
49 |
50 | ## ✍️ Semantic Commits Policy (Angular preset)
51 |
52 | commit_format = "(): " // subject ≤ 72 chars, imperative mood
53 |
54 | types_allowed = [
55 | feat, fix, docs, style, refactor, perf, test,
56 | build, ci, chore, revert
57 | ]
58 |
59 | rules = {
60 | subject_case: sentence_case,
61 | scope: recommended_but_optional,
62 | body: allowed_multiline_for_context,
63 | footer_BREAKING_CHANGE: supported
64 | }
65 |
66 | granularity_constraints = {
67 | max_files_per_commit: 10,
68 | max_changed_lines_per_commit: 400,
69 | atomic_scope: "one coherent slice per commit"
70 | }
71 |
72 | split_commits(strategy = sequential_atomic) = {
73 | partition_changes(atomic_scope),
74 | order_by_dependencies(),
75 | commit_each_after(G_commit)
76 | }
77 |
78 | examples = [
79 | "feat(img): add base64 conversion command with data URI output",
80 | "fix(db): close Mongo connection on error paths",
81 | "test(env): add coverage for from-json edge cases",
82 | "refactor(utils): extract piping logic into reusable service",
83 | "chore: run barrels generation and update exports"
84 | ]
85 |
86 | ## ⚙️ Background Execution Protocol
87 |
88 | Σ_bg = {
89 | plan_unit_of_work() ⟶ define_slice(),
90 | implement_slice() ⟶ apply_minimal_edit(),
91 | verify_slice() ⟶ run_local_checks(),
92 | stage_and_commit() ⟶ if allow_commit then semantic_commit() else remediate(),
93 | iterate() ⟶ next_slice_or_finish()
94 | }
95 |
96 | run_local_checks() = {
97 | build: run_build(),
98 | test: run_tests(),
99 | lsmcp: Λs.lsp_get_all_diagnostics(),
100 | pass: allow_commit
101 | }
102 |
103 | semantic_commit() = git.commit({
104 | message: commit_format,
105 | constraints: granularity_constraints
106 | })
107 |
108 | ## 🔧 Git & CI Integration
109 |
110 | Φ_git = {
111 | pre_push: assert(working_tree_clean ∧ allow_commit),
112 | pr_title: use_first_commit_subject(),
113 | pr_body: include_summary_of_changes ∧ checklist_status,
114 | labeling: derive_from_types_allowed
115 | }
116 |
117 | ## 🧭 Commands (Background Agent)
118 |
119 | βG_commands = {
120 | "/bg-start": start_autonomous_loop(Ω_bg),
121 | "/bg-stop": stop_autonomous_loop(),
122 | "/bg-status": report_current_mode ∧ gates_state ∧ next_actions,
123 | "/bg-commit": run(G_commit) ∧ semantic_commit_if_pass(),
124 | "/bg-config": set({granularity_constraints, timeouts, max_iterations})
125 | }
126 |
127 | ## 🚦 Safety & Boundaries
128 |
129 | constraints = {
130 | respect_protection_markers: true, // Ψ behaviors
131 | no_large_refactors_without_plan: true, // must pass via Ω₃
132 | stop_on_repeated_gate_failures: 3, // prevent loops
133 | seek_human_on_blockers: true // escalate when stuck
134 | }
135 |
136 | on_gate_failure(kind) = {
137 | capture_logs(kind),
138 | attempt_auto_fix ≤ 2,
139 | if still_failing ⇒ stop_autonomous_loop() ∧ request_human_input
140 | }
141 |
142 | ## 🔗 References
143 |
144 | links = {
145 | RIPER_core: "ripersigma105.mdc",
146 | LSMCP_rules: "mcp-lsmcp.mdc",
147 | Protection_rules: "codeprotection.mdc"
148 | }
149 |
150 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | @kibibit/cert-management
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | Automated SSL Certificate Management with DNS Challenge Support
22 |
23 |
24 |
25 | ## Description
26 | A robust certificate management solution that:
27 | - Automates wildcard SSL certificate issuance and renewal using DNS challenges
28 | - Integrates with Nginx Proxy Manager (NPM) for certificate management
29 | - Supports United Domains DNS provider
30 | - Handles certificate renewals non-interactively after initial setup
31 |
32 | ## Features
33 |
34 | - 🔒 **Wildcard Certificates**: Support for multiple wildcard domains
35 | - 🤖 **Automated DNS Challenges**: Handles DNS verification automatically
36 | - 🔄 **Auto Renewal**: Non-interactive renewal process
37 | - 🔌 **NPM Integration**: Direct integration with Nginx Proxy Manager
38 | - 📝 **TypeScript Support**: Written in TypeScript for better maintainability
39 |
40 | ## Prerequisites
41 |
42 | - Node.js 20+
43 | - Nginx Proxy Manager instance
44 | - United Domains account
45 | - `certbot` installed on the system
46 | - `dig` command available for DNS verification
47 |
48 | ## Environment Variables
49 |
50 | Required environment variables:
51 |
52 | ```bash
53 | # United Domains Credentials
54 | UD_USERNAME=your-ud-username
55 | UD_PASSWORD=your-ud-password
56 |
57 | # Nginx Proxy Manager Configuration
58 | NPM_BASE_URL=http://your-npm-instance:81
59 | NPM_IDENTITY=your-npm-email
60 | NPM_SECRET=your-npm-password
61 |
62 | # Domain Configuration
63 | DOMAIN=your-base-domain.com
64 | WILDCARDS="*.your-domain.com,*.subdomain.your-domain.com"
65 | ```
66 |
67 | ## Installation
68 |
69 | 1. Clone this repository:
70 | ```bash
71 | git clone https://github.com/kibibit/cert-management.git
72 | cd cert-management
73 | ```
74 |
75 | 2. Install dependencies:
76 | ```bash
77 | npm install
78 | ```
79 |
80 | 3. Build the project:
81 | ```bash
82 | npm run build
83 | ```
84 |
85 | ## Usage
86 |
87 | ### Using Docker (Recommended)
88 |
89 | 1. Pull the Docker image:
90 | ```bash
91 | docker pull kibibitopensrc/cert-management:latest
92 | ```
93 |
94 | Available tags:
95 | - `latest`: Latest stable release
96 | - `vX.Y.Z`: Specific version (e.g., `v1.0.0`)
97 | - `vX.Y`: Minor version (e.g., `v1.0`)
98 | - `vX`: Major version (e.g., `v1`)
99 |
100 | View all available tags on [Docker Hub](https://hub.docker.com/r/kibibitopensrc/cert-management/tags)
101 |
102 | 2. Run the container with your environment variables:
103 | ```bash
104 | docker run -d \
105 | -e NPM_BASE_URL=http://your-npm-instance:81 \
106 | -e NPM_IDENTITY=your-npm-email \
107 | -e NPM_SECRET=your-npm-password \
108 | -e UD_USERNAME=your-ud-username \
109 | -e UD_PASSWORD=your-ud-password \
110 | -e WILDCARDS="*.your-domain.com,*.subdomain.your-domain.com" \
111 | -v /path/to/certificates:/root/kb-certs \
112 | kibibitopensrc/cert-management:latest
113 | ```
114 |
115 | ### Using Node.js Directly
116 |
117 | 1. Set up your environment variables (see above section)
118 |
119 | 2. Run the certificate maintenance script:
120 | ```bash
121 | # Using npm
122 | npm run start
123 |
124 | # Using ts-node (development)
125 | npm run start:dev
126 | ```
127 |
128 | ### Command Line Arguments
129 |
130 | You can also provide configuration via command line arguments:
131 |
132 | ```bash
133 | npx ts-node src/cert-maintenance.ts \
134 | --base-url=http://your-npm-instance:81 \
135 | --identity=your-npm-email \
136 | --secret=your-npm-password \
137 | --domain=your-domain.com \
138 | --wildcards="*.your-domain.com,*.other-domain.com" \
139 | --dry-run
140 | ```
141 |
142 | ## Development
143 |
144 | ```bash
145 | # Install dependencies
146 | npm install
147 |
148 | # Run in development mode
149 | npm run start:dev
150 |
151 | # Build the project
152 | npm run build
153 |
154 | # Run linting
155 | npm run lint
156 |
157 | # Fix linting issues
158 | npm run lint:fix
159 | ```
160 |
161 | ## How It Works
162 |
163 | 1. The script checks for existing valid certificates in NPM for each wildcard domain
164 | 2. If a certificate needs renewal:
165 | - Initiates certbot DNS challenge
166 | - Uses United Domains API to set required DNS records
167 | - Verifies DNS propagation
168 | - Obtains the certificate from Let's Encrypt
169 | - Uploads the new certificate to NPM
170 | - Updates NPM proxy hosts to use the new certificate
171 | 3. Cleanup is performed automatically
172 |
173 | ## Important Notes
174 |
175 | - First-time certificate issuance requires DNS verification
176 | - The script expects certbot configuration in `kb-certs` directory
177 | - DNS propagation checks can take several minutes
178 | - Uses dig with trace for reliable DNS verification
179 | - Certificates are stored in NPM after issuance
180 |
181 | ## Stay in touch
182 |
183 | - Author - [Neil Kalman](https://github.com/thatkookooguy)
184 | - Website - [https://github.com/kibibit](https://github.com/kibibit)
185 | - StackOverflow - [thatkookooguy](https://stackoverflow.com/users/1788884/thatkookooguy)
186 | - Twitter - [@thatkookooguy](https://twitter.com/thatkookooguy)
187 | - Twitter - [@kibibit_opensrc](https://twitter.com/kibibit_opensrc)
188 |
189 | ## License
190 |
191 | This project is [MIT licensed](LICENSE).
--------------------------------------------------------------------------------
/.cursor/rules/mcp-lsmcp.mdc:
--------------------------------------------------------------------------------
1 | ---
2 | alwaysApply: true
3 | ---
4 |
5 | # 🧠 lsmcp (TypeScript LSP) Rules for CursorRIPER Σ
6 | # Symbol: Λs (LSP)
7 | # Version: 1.1.0
8 |
9 | ## 📋 Service Definition
10 | ```
11 | service = "TypeScript Language Server via MCP"
12 | symbol = "Λs"
13 | requires = "lsmcp"
14 | ```
15 |
16 | ## 🎯 Goal
17 | ```
18 | Produce edits that COMPILE and MATCH the repo’s real symbols.
19 | ```
20 |
21 | ## 🔧 Mandatory Tooling (must be used in-flow)
22 | **Core validators (always available, must be used as instructed):**
23 | - lsmcp.lsp_get_definitions
24 | - lsmcp.lsp_find_references
25 | - lsmcp.get_symbol_details
26 | - lsmcp.lsp_get_diagnostics
27 |
28 | **Optional helpers (use only if helpful; never instead of the core):**
29 | - lsmcp.search_symbols
30 | - lsmcp.lsp_get_all_diagnostics
31 | - lsmcp.document_symbols / workspace symbols (if exposed)
32 |
33 | ## 🛣️ Golden Workflow (with REQUIRED tool calls)
34 | **A. PLAN (before any edit)**
35 | 1) For every symbol/module/interface you intend to use or modify, **MUST**:
36 | - Call **`lsp_get_definitions`** to prove existence & file path.
37 | - If you need details to implement correctly, call **`get_symbol_details`**.
38 | - If the name is ambiguous, you may call **`search_symbols`** first, then verify with **`lsp_get_definitions`**.
39 | - If a needed symbol does not exist, explicitly plan to add it (no guessing).
40 |
41 | **B. IMPACT CHECK (before editing an existing symbol)**
42 | 2) If you will change a function/type that already exists, **MUST**:
43 | - Call **`lsp_find_references`** and review call sites you may need to update.
44 |
45 | **C. EDIT (small, coherent steps)**
46 | 3) Apply the smallest coherent change (one function/feature slice). Keep imports real.
47 |
48 | **D. VERIFY THE CHANGE (immediately after each edit slice)**
49 | 4) **MUST** call **`lsp_get_diagnostics`** on **every file you changed** in this slice.
50 | - If there are errors, fix them and re-run **`lsp_get_diagnostics`** until clean.
51 | - Only after the slice is clean, proceed to the next slice or finish.
52 |
53 | **E. FINISH (optional repo-wide sanity)**
54 | 5) If you made cross-file updates, you **may** run **`lsp_get_all_diagnostics`** once before returning the final diff.
55 |
56 | ## 🗂️ Monorepo Discipline
57 | ```
58 | - Treat each file within its tsconfig project/package.
59 | - When searching, prefer results whose paths match the intended package.
60 | - Do not introduce cross-package references unless requested.
61 | ```
62 |
63 | ## 🚫 Non-negotiables
64 | ```
65 | - Never reference a symbol until `lsp_get_definitions` returns a concrete location.
66 | - Never return edits with blocking diagnostics in the edited files.
67 | - Never invent imports or paths; validate them first.
68 | - If a new API/type is required, declare it minimally in the correct package and validate.
69 | ```
70 |
71 | ## ✅ Output Contract (per edit slice)
72 | Provide a short **Validation Log** showing:
73 | - Verified definitions: symbol → file path (from `lsp_get_definitions`)
74 | - References inspected: count & sample files (from `lsp_find_references`) when applicable
75 | - Post-edit diagnostics: file → 0 errors (from `lsp_get_diagnostics`)
76 |
77 | ---
78 |
79 | ### Tiny “how to behave” examples (few-shot)
80 |
81 | **Example 1 — Using an existing util**
82 | ```
83 | PLAN:
84 | - lsp_get_definitions("formatUser"): found at packages/api/src/utils/formatUser.ts
85 | - get_symbol_details("formatUser"): returns signature + types
86 |
87 | EDIT:
88 | - Call formatUser in new controller
89 |
90 | VERIFY:
91 | - lsp_get_diagnostics(["packages/api/src/controllers/user.ts"]) → 0 errors
92 | Validation Log: <…>
93 | ```
94 |
95 | **Example 2 — Changing a function signature**
96 | ```
97 | PLAN:
98 | - lsp_get_definitions("createUser"): packages/api/src/service/user.ts:120
99 | - lsp_find_references("createUser"): 3 call sites in packages/web
100 |
101 | EDIT:
102 | - Update signature + 3 call sites
103 |
104 | VERIFY:
105 | - lsp_get_diagnostics([...all 4 edited files...]) → fix until 0 errors
106 | Validation Log: <…>
107 | ```
108 |
109 | **Example 3 — Symbol not found**
110 | ```
111 | PLAN:
112 | - lsp_get_definitions("getTenantPlan"): not found
113 | ACTION:
114 | - Propose minimal implementation in packages/billing/src/plan.ts
115 | VERIFY:
116 | - lsp_get_diagnostics([new file + caller]) → 0 errors
117 | Validation Log: <…>
118 | ```
119 | # 🧠 lsmcp (TypeScript LSP) Rules for CursorRIPER Σ
120 | # Symbol: Λs (LSP)
121 | # Version: 1.1.0
122 |
123 | ## 📋 Service Definition
124 | ```
125 | service = "TypeScript Language Server via MCP"
126 | symbol = "Λs"
127 | requires = "lsmcp"
128 | ```
129 |
130 | ## 🎯 Goal
131 | ```
132 | Produce edits that COMPILE and MATCH the repo’s real symbols.
133 | ```
134 |
135 | ## 🔧 Mandatory Tooling (must be used in-flow)
136 | **Core validators (always available, must be used as instructed):**
137 | - lsmcp.lsp_get_definitions
138 | - lsmcp.lsp_find_references
139 | - lsmcp.get_symbol_details
140 | - lsmcp.lsp_get_diagnostics
141 |
142 | **Optional helpers (use only if helpful; never instead of the core):**
143 | - lsmcp.search_symbols
144 | - lsmcp.lsp_get_all_diagnostics
145 | - lsmcp.document_symbols / workspace symbols (if exposed)
146 |
147 | ## 🛣️ Golden Workflow (with REQUIRED tool calls)
148 | **A. PLAN (before any edit)**
149 | 1) For every symbol/module/interface you intend to use or modify, **MUST**:
150 | - Call **`lsp_get_definitions`** to prove existence & file path.
151 | - If you need details to implement correctly, call **`get_symbol_details`**.
152 | - If the name is ambiguous, you may call **`search_symbols`** first, then verify with **`lsp_get_definitions`**.
153 | - If a needed symbol does not exist, explicitly plan to add it (no guessing).
154 |
155 | **B. IMPACT CHECK (before editing an existing symbol)**
156 | 2) If you will change a function/type that already exists, **MUST**:
157 | - Call **`lsp_find_references`** and review call sites you may need to update.
158 |
159 | **C. EDIT (small, coherent steps)**
160 | 3) Apply the smallest coherent change (one function/feature slice). Keep imports real.
161 |
162 | **D. VERIFY THE CHANGE (immediately after each edit slice)**
163 | 4) **MUST** call **`lsp_get_diagnostics`** on **every file you changed** in this slice.
164 | - If there are errors, fix them and re-run **`lsp_get_diagnostics`** until clean.
165 | - Only after the slice is clean, proceed to the next slice or finish.
166 |
167 | **E. FINISH (optional repo-wide sanity)**
168 | 5) If you made cross-file updates, you **may** run **`lsp_get_all_diagnostics`** once before returning the final diff.
169 |
170 | ## 🗂️ Monorepo Discipline
171 | ```
172 | - Treat each file within its tsconfig project/package.
173 | - When searching, prefer results whose paths match the intended package.
174 | - Do not introduce cross-package references unless requested.
175 | ```
176 |
177 | ## 🚫 Non-negotiables
178 | ```
179 | - Never reference a symbol until `lsp_get_definitions` returns a concrete location.
180 | - Never return edits with blocking diagnostics in the edited files.
181 | - Never invent imports or paths; validate them first.
182 | - If a new API/type is required, declare it minimally in the correct package and validate.
183 | ```
184 |
185 | ## ✅ Output Contract (per edit slice)
186 | Provide a short **Validation Log** showing:
187 | - Verified definitions: symbol → file path (from `lsp_get_definitions`)
188 | - References inspected: count & sample files (from `lsp_find_references`) when applicable
189 | - Post-edit diagnostics: file → 0 errors (from `lsp_get_diagnostics`)
190 |
191 | ---
192 |
193 | ### Tiny “how to behave” examples (few-shot)
194 |
195 | **Example 1 — Using an existing util**
196 | ```
197 | PLAN:
198 | - lsp_get_definitions("formatUser"): found at packages/api/src/utils/formatUser.ts
199 | - get_symbol_details("formatUser"): returns signature + types
200 |
201 | EDIT:
202 | - Call formatUser in new controller
203 |
204 | VERIFY:
205 | - lsp_get_diagnostics(["packages/api/src/controllers/user.ts"]) → 0 errors
206 | Validation Log: <…>
207 | ```
208 |
209 | **Example 2 — Changing a function signature**
210 | ```
211 | PLAN:
212 | - lsp_get_definitions("createUser"): packages/api/src/service/user.ts:120
213 | - lsp_find_references("createUser"): 3 call sites in packages/web
214 |
215 | EDIT:
216 | - Update signature + 3 call sites
217 |
218 | VERIFY:
219 | - lsp_get_diagnostics([...all 4 edited files...]) → fix until 0 errors
220 | Validation Log: <…>
221 | ```
222 |
223 | **Example 3 — Symbol not found**
224 | ```
225 | PLAN:
226 | - lsp_get_definitions("getTenantPlan"): not found
227 | ACTION:
228 | - Propose minimal implementation in packages/billing/src/plan.ts
229 | VERIFY:
230 | - lsp_get_diagnostics([new file + caller]) → 0 errors
231 | Validation Log: <…>
232 | ```
233 |
--------------------------------------------------------------------------------
/.cursor/rules/codeprotection.mdc:
--------------------------------------------------------------------------------
1 | ---
2 | alwaysApply: true
3 | ---
4 | # CursorRIPER♦Ψ 1.0.1
5 |
6 | ## 🛡️ Protection Syntax
7 |
8 | Ψ_syntax = {
9 | PROTECTED: "PROTECTED - DO NOT MODIFY",
10 | GUARDED: "GUARDED - ASK BEFORE MODIFYING",
11 | INFO: "INFO - CONTEXT NOTE",
12 | DEBUG: "DEBUG - DEBUGGING CODE",
13 | TEST: "TEST - TESTING CODE",
14 | CRITICAL: "CRITICAL - BUSINESS LOGIC",
15 |
16 | // End markers
17 | END_PROTECTED: "END-P - PROTECTED REGION END",
18 | END_GUARDED: "END-G - GUARDED REGION END",
19 | END_INFO: "END-I - INFO REGION END",
20 | END_DEBUG: "END-D - DEBUG REGION END",
21 | END_TEST: "END-T - TEST REGION END",
22 | END_CRITICAL: "END-C - CRITICAL REGION END"
23 | }
24 |
25 | ## 💬 Language Comment Formats
26 |
27 | Ψ_language_syntax = {
28 | js: {prefix: "// ", suffix: ""},
29 | ts: {prefix: "// ", suffix: ""},
30 | jsx: {prefix: "// ", suffix: ""},
31 | tsx: {prefix: "// ", suffix: ""},
32 | py: {prefix: "# ", suffix: ""},
33 | html: {prefix: ""},
34 | php: {prefix: "// ", suffix: ""},
35 | css: {prefix: "/* ", suffix: " */"},
36 | scss: {prefix: "/* ", suffix: " */"},
37 | java: {prefix: "// ", suffix: ""},
38 | rb: {prefix: "# ", suffix: ""},
39 | go: {prefix: "// ", suffix: ""},
40 | rs: {prefix: "// ", suffix: ""},
41 | c: {prefix: "// ", suffix: ""},
42 | cpp: {prefix: "// ", suffix: ""},
43 | cs: {prefix: "// ", suffix: ""},
44 | swift: {prefix: "// ", suffix: ""},
45 | kt: {prefix: "// ", suffix: ""},
46 | dart: {prefix: "// ", suffix: ""},
47 | md: {prefix: ""},
48 | xml: {prefix: ""},
49 | sh: {prefix: "# ", suffix: ""},
50 | bash: {prefix: "# ", suffix: ""},
51 | sql: {prefix: "-- ", suffix: ""}
52 | }
53 |
54 | ## ⌨️ Command Shortcuts
55 |
56 | Ψ_shorthand = {
57 | "!cp": apply_protection(PROTECTED),
58 | "!cg": apply_protection(GUARDED),
59 | "!ci": apply_protection(INFO),
60 | "!cd": apply_protection(DEBUG),
61 | "!ct": apply_protection(TEST),
62 | "!cc": apply_protection(CRITICAL)
63 | }
64 |
65 | apply_protection(type) = {
66 | detect_language(current_file) ⟶ lang,
67 | get_comment_syntax(lang) ⟶ {prefix, suffix},
68 | selection = get_editor_selection(),
69 |
70 | // Insert opening marker
71 | insert_at_selection_start(prefix + Ψ_syntax[type] + suffix),
72 |
73 | // Insert end marker
74 | end_marker = Ψ_syntax["END_" + type],
75 | insert_at_selection_end(prefix + end_marker + suffix),
76 |
77 | // Update protection registry
78 | Ψ_manage.add(current_file, selection.start_line, selection.end_line, type, "User-added protection")
79 | }
80 |
81 | ## 🔄 Protection Behaviors
82 |
83 | Ψ_behaviors = {
84 | PROTECTED: {
85 | Ω₁: acknowledge ∧ document,
86 | Ω₂: respect_boundaries ∧ alternate_approaches,
87 | Ω₃: plan_around ∧ never_include,
88 | Ω₄: refuse_modification ∧ report_attempts,
89 | Ω₅: verify_untouched ∧ validate
90 | },
91 | GUARDED: {
92 | Ω₁: acknowledge ∧ document,
93 | Ω₂: consider_changes ∧ document_rationale,
94 | Ω₃: plan_with_permission ∧ alternatives,
95 | Ω₄: request_explicit_permission ∧ detail_changes,
96 | Ω₅: document_changes ∧ justify
97 | },
98 | INFO: {
99 | Ω₁: acknowledge ∧ use_context,
100 | Ω₂: incorporate_context ∧ respect_intent,
101 | Ω₃: plan_with_awareness,
102 | Ω₄: careful_modification ∧ preserve_intent,
103 | Ω₅: verify_context_preserved
104 | },
105 | DEBUG: {
106 | Ω₁: note_debug_purpose,
107 | Ω₂: preserve_during_innovation,
108 | Ω₃: include_in_development_plan,
109 | Ω₄: maintain_during_dev ∧ consider_cleanup,
110 | Ω₅: evaluate_necessity
111 | },
112 | TEST: {
113 | Ω₁: document_test_coverage,
114 | Ω₂: maintain_test_integrity,
115 | Ω₃: ensure_test_coverage,
116 | Ω₄: update_with_implementation,
117 | Ω₅: verify_test_coverage
118 | },
119 | CRITICAL: {
120 | Ω₁: document_thoroughly,
121 | Ω₂: design_with_extreme_care,
122 | Ω₃: plan_impact_analysis,
123 | Ω₄: comprehensive_review ∧ careful_change,
124 | Ω₅: rigorous_validation
125 | }
126 | }
127 |
128 | ## 🔍 Protection Scanner
129 |
130 | Ψ_scan = {
131 | patterns: {
132 | auth: ["login", "authenticate", "credentials", "password", "token"],
133 | payment: ["payment", "transaction", "credit", "billing", "invoice"],
134 | security: ["encrypt", "decrypt", "hash", "salt", "secure"],
135 | core: ["critical", "essential", "main", "primary", "core"],
136 | api: ["api", "endpoint", "request", "response", "service"],
137 | data: ["database", "query", "record", "store", "retrieve"]
138 | },
139 |
140 | detect(file) = {
141 | lang = detect_language(file),
142 | code = read_file(file),
143 | segments = parse(code, lang),
144 | analysis = []
145 |
146 | // Track open markers and match with end markers
147 | open_markers = []
148 |
149 | for segment in segments:
150 | // Check if this is an end marker
151 | end_marker_match = match_end_marker(segment)
152 | if end_marker_match:
153 | if open_markers.length > 0:
154 | // Close the most recent matching open marker
155 | close_marker(open_markers, end_marker_match, analysis)
156 | continue
157 |
158 | // Check if this is an opening marker
159 | marker_type = match_protection_marker(segment)
160 | if marker_type:
161 | open_markers.push({
162 | type: marker_type,
163 | line: segment.line_number,
164 | content: segment
165 | })
166 | continue
167 |
168 | // Regular code segment - check patterns if not in a marker
169 | if open_markers.length == 0:
170 | pattern_matches = match_patterns(segment, Ψ_scan.patterns)
171 | if pattern_matches:
172 | analysis.push({
173 | segment: segment,
174 | matches: pattern_matches,
175 | suggested_level: determine_level(pattern_matches)
176 | })
177 |
178 | // Report any unclosed markers
179 | for marker in open_markers:
180 | analysis.push({
181 | segment: marker.content,
182 | warning: "Unclosed protection marker",
183 | suggested_action: "Add appropriate end marker"
184 | })
185 |
186 | return analysis
187 | },
188 |
189 | determine_level(matches) = {
190 | if matches.intersect(["security", "payment", "auth"]).length > 0:
191 | return "PROTECTED"
192 | else if matches.intersect(["core", "api"]).length > 0:
193 | return "CRITICAL"
194 | else if matches.intersect(["data"]).length > 0:
195 | return "GUARDED"
196 | else:
197 | return "INFO"
198 | },
199 |
200 | match_end_marker(segment) = {
201 | for type in [PROTECTED, GUARDED, INFO, DEBUG, TEST, CRITICAL]:
202 | end_marker = "END-" + type.substr(0,1)
203 | if segment.includes(end_marker):
204 | return type
205 | return null
206 | },
207 |
208 | close_marker(open_markers, end_type, analysis) = {
209 | // Find matching open marker
210 | matched_idx = -1
211 | for i = open_markers.length - 1; i >= 0; i--:
212 | if open_markers[i].type === end_type:
213 | matched_idx = i
214 | break
215 |
216 | if matched_idx >= 0:
217 | // Remove the marker from the open list
218 | marker = open_markers.splice(matched_idx, 1)[0]
219 | // Process the protection block if needed
220 | // (no analysis needed for properly marked protection blocks)
221 | else:
222 | analysis.push({
223 | warning: "Unmatched end marker for " + end_type,
224 | suggested_action: "Add appropriate start marker"
225 | })
226 | }
227 | }
228 |
229 | ## 📊 Protection Management
230 |
231 | Ψ_manage = {
232 | add(file, start_line, end_line, level, rationale) = {
233 | entry = {
234 | file: file,
235 | start_line: start_line,
236 | end_line: end_line,
237 | level: level,
238 | added_date: now(),
239 | rationale: rationale
240 | },
241 | update(σ₆.protected_regions, entry)
242 | },
243 |
244 | approve(file, start_line, end_line, changes) = {
245 | approval = {
246 | file: file,
247 | start_line: start_line,
248 | end_line: end_line,
249 | requested_date: now(),
250 | approved_date: now(),
251 | changes: changes
252 | },
253 | update(σ₆.guarded_approvals, approval)
254 | },
255 |
256 | scan_project() = {
257 | results = [],
258 | files = list_project_files(),
259 |
260 | for file in files:
261 | if is_code_file(file):
262 | scan_result = Ψ_scan.detect(file)
263 | if scan_result.length > 0:
264 | results.push({
265 | file: file,
266 | findings: scan_result
267 | })
268 |
269 | update(σ₆.scan_history, {
270 | date: now(),
271 | files_scanned: files.length,
272 | protections_found: results.length
273 | })
274 |
275 | return results
276 | }
277 | }
278 |
279 | ## 🔄 Protection Commands
280 |
281 | Ψ_commands = {
282 | "/protect-scan": Ψ_manage.scan_project,
283 | "/protect-status": report_protection_status,
284 | "/protect-add": add_protection_to_selection,
285 | "/protect-remove": remove_protection_with_confirmation,
286 | "/protect-approve": approve_guarded_modification
287 | }
288 |
289 | report_protection_status() = {
290 | regions = read(σ₆.protected_regions),
291 | summary = summarize(regions),
292 | return format_report(summary)
293 | }
294 |
295 | add_protection_to_selection(level) = {
296 | selection = get_editor_selection(),
297 | file = get_current_file(),
298 | lang = detect_language(file),
299 | syntax = Ψ_language_syntax[lang],
300 |
301 | // Add start marker
302 | start_comment = syntax.prefix + Ψ_syntax[level] + syntax.suffix,
303 | insert_at_selection_start(start_comment),
304 |
305 | // Add end marker
306 | end_comment = syntax.prefix + Ψ_syntax["END_" + level] + syntax.suffix,
307 | insert_at_selection_end(end_comment),
308 |
309 | // Register the protected region
310 | Ψ_manage.add(file, selection.start_line, selection.end_line, level, "User-added protection")
311 | }
312 |
313 | remove_protection_with_confirmation(region_id) = {
314 | region = find_region_by_id(region_id),
315 | confirm("Are you sure you want to remove protection from this code?"),
316 | if confirmed:
317 | // Remove both the start and end markers
318 | remove_protection_comment(region.file, region.start_line),
319 | remove_protection_comment(region.file, region.end_line),
320 | remove_from_registry(region_id)
321 | }
322 |
323 | approve_guarded_modification(region_id, changes) = {
324 | region = find_region_by_id(region_id),
325 | if region.level != "GUARDED":
326 | return error("Only GUARDED code can be approved for modification")
327 | else:
328 | Ψ_manage.approve(region.file, region.start_line, region.end_line, changes)
329 | }
330 |
--------------------------------------------------------------------------------
/src/cert-maintenance.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | /* eslint-disable */
3 | 'use strict';
4 |
5 | import * as fs from 'node:fs';
6 | import * as path from 'node:path';
7 | import { spawn } from 'child_process';
8 | import { ensureDir, findProjectRoot, isExpired, willExpireSoon, parseArgs, sleep, toDateString } from './utils';
9 | import { logErr, logInfo, logOk, logStep, logWarn } from './logger';
10 | import { NpmService } from './npm.service';
11 | import { INpmCertificate } from './types';
12 |
13 | let npmService: NpmService;
14 |
15 | const projectRoot = findProjectRoot(__dirname);
16 |
17 | updateWildcardCertificates()
18 | .catch((error: Error) => {
19 | logErr(error.stack || error.message || String(error));
20 |
21 | process.exitCode = 1;
22 | });
23 |
24 | async function updateWildcardCertificates() {
25 | const args = parseArgs(process.argv);
26 | const baseUrl = (args['base-url'] || process.env.NPM_BASE_URL) as string;
27 | const identity = (args.identity || process.env.NPM_IDENTITY) as string;
28 | const secret = (args.secret || process.env.NPM_SECRET || '') as string;
29 | const dryRun = Boolean(args['dry-run']);
30 | const domain = (args.domain || process.env.DOMAIN) as string;
31 | // Wildcards can be provided as comma-separated list: --wildcards=*.example.com,*.test.com
32 | let wildcards = (args.wildcards || process.env.WILDCARDS?.split(',') || []) as string[];
33 | wildcards = (Array.isArray(wildcards) ? wildcards : [ wildcards ]);
34 |
35 | if (wildcards.length === 0) {
36 | logWarn('No wildcards provided. Set via --wildcards=...');
37 | process.exitCode = 1;
38 | return;
39 | }
40 |
41 | if (!baseUrl || !identity || !secret || !domain) {
42 | logWarn('NPM base URL, identity, secret, and domain must be provided.');
43 | logWarn('Set via --base-url=... --identity=... --secret=... --domain=...');
44 | logWarn('or set environment variables NPM_BASE_URL, NPM_IDENTITY, NPM_SECRET, and DOMAIN');
45 | process.exitCode = 1;
46 | return;
47 | }
48 |
49 | if (!process.env.UD_USERNAME || !process.env.UD_PASSWORD) {
50 | logWarn('UD_USERNAME and UD_PASSWORD must be set');
51 | process.exitCode = 1;
52 | return;
53 | }
54 |
55 | logStep('Authenticating to Nginx Proxy Manager');
56 | npmService = new NpmService(baseUrl, identity, secret);
57 | logOk('Authenticated.');
58 |
59 | for (const wildcard of wildcards) {
60 | try {
61 | const { certId } = await ensureValidCertificate(wildcard, { dryRun });
62 | const changed = await npmService.updateHostsForWildcard(wildcard, certId, { dryRun });
63 | if (changed > 0) {
64 | logOk(`Updated ${ changed } host(s) for ${ wildcard }.`);
65 | } else {
66 | logInfo(`No updates needed for ${ wildcard }.`);
67 | }
68 | } catch (error) {
69 | logErr(`Error handling ${ wildcard }: ${ error.message || error }`);
70 | }
71 | }
72 |
73 | // await npmService.cleanupUnusedCertificates(wildcards, { dryRun });
74 |
75 | logOk('Done.');
76 | }
77 |
78 |
79 |
80 | function getCertbotCommandForWildcard(wildcard: string) {
81 | const baseDomain = wildcard.replace(/^\*\./, '');
82 | return `sudo certbot certonly --manual --preferred-challenges dns --email neilkalman@gmail.com --agree-tos --no-eff-email -d "${ wildcard }" --config-dir ~/kb-certs`;
83 | }
84 |
85 |
86 | async function verifyDNSChallenge(
87 | domain: string,
88 | expectedValue: string
89 | ): Promise {
90 | logStep('Verifying DNS record propagation...');
91 | const maxAttempts = 20; // Increase max attempts
92 | const delaySeconds = 240; // Increase delay between attempts
93 |
94 | for (let attempt = 1; attempt <= maxAttempts; attempt++) {
95 | try {
96 | const result = await new Promise((resolve, reject) => {
97 | console.log(`Digging for ${domain}`);
98 | // Use local resolver and force recursive lookup
99 | const dig = spawn('dig', [`_acme-challenge.${domain}`, 'TXT', '+trace']);
100 | let output = '';
101 |
102 | dig.stdout.on('data', (data) => {
103 | output += data.toString();
104 | });
105 |
106 | dig.on('close', (code) => {
107 | if (code === 0) {
108 | // Extract TXT record from trace output
109 | const txtMatch = output.match(/_acme-challenge\.[^"]*\s+TXT\s+"([^"]+)"/);
110 | if (txtMatch) {
111 | resolve(txtMatch[1]);
112 | } else {
113 | resolve(''); // No match found
114 | }
115 | } else {
116 | reject(new Error(`dig exited with code ${code}`));
117 | }
118 | });
119 | });
120 |
121 | console.log(`DNS result: ${result}`);
122 | console.log(`Expected value: ${expectedValue}`);
123 |
124 | if (result === expectedValue) {
125 | logOk('DNS challenge verified!');
126 | return true;
127 | }
128 |
129 | logInfo(`DNS not propagated yet (attempt ${attempt}/${maxAttempts}), waiting ${delaySeconds}s...`);
130 | await sleep(delaySeconds * 1000);
131 | } catch (error) {
132 | logWarn(`DNS check failed: ${error.message}`);
133 | await sleep(delaySeconds * 1000);
134 | }
135 | }
136 |
137 | throw new Error('DNS challenge verification timed out');
138 | }
139 |
140 | async function runCertbot(wildcard: string, dryRun = false): Promise<{ success: boolean }> {
141 | if (dryRun) {
142 | logInfo('Dry-run: would run certbot for DNS challenge');
143 | return { success: true };
144 | }
145 |
146 |
147 | const certRoot = path.join(projectRoot, 'kb-certs');
148 | // delete the cert root directory
149 | fs.rmSync(certRoot, { recursive: true, force: true });
150 | const configDir = certRoot;
151 | const workDir = path.join(certRoot, 'work');
152 | const logsDir = path.join(certRoot, 'logs');
153 | [configDir, workDir, logsDir].forEach(ensureDir);
154 |
155 | const authHook = path.join(projectRoot, 'auth-hook.sh');
156 | const cleanHook = path.join(projectRoot, 'cleanup-hook.sh');
157 | fs.chmodSync(authHook, 0o755);
158 | fs.chmodSync(cleanHook, 0o755);
159 |
160 | const args = [
161 | 'certonly',
162 | '--manual',
163 | '--preferred-challenges', 'dns',
164 | '--email', 'neilkalman@gmail.com',
165 | '--agree-tos',
166 | '--no-eff-email',
167 | '--non-interactive',
168 | '--manual-auth-hook', authHook,
169 | '--manual-cleanup-hook', cleanHook,
170 | '-d', wildcard,
171 | '--config-dir', configDir,
172 | '--work-dir', workDir,
173 | '--logs-dir', logsDir,
174 | ];
175 |
176 | return new Promise((resolve, reject) => {
177 | const certbotProcess = spawn('certbot', args, { stdio: 'inherit' });
178 | certbotProcess.on('close', (code) => {
179 | if (code === 0) {
180 | resolve({ success: true });
181 | } else {
182 | reject(new Error(`Certbot failed with code ${code}`));
183 | }
184 | });
185 | });
186 | }
187 |
188 | function getLiveCertPathsForWildcard(wildcard: string) {
189 | const baseDomain = wildcard.replace(/^\*\./, '');
190 | const liveDir = path.join(projectRoot, 'kb-certs', 'live', baseDomain);
191 | return {
192 | fullchain: path.join(liveDir, 'fullchain.pem'),
193 | privkey: path.join(liveDir, 'privkey.pem'),
194 | };
195 | }
196 |
197 | function filterCertificatesForWildcard(wildcard: string, certs: INpmCertificate[]) {
198 | return certs
199 | .filter((certificate) =>
200 | Array.isArray(certificate.domain_names) &&
201 | certificate.domain_names.includes(wildcard)
202 | );
203 | }
204 |
205 | function filterValidCertificates(certs: INpmCertificate[]) {
206 | return certs.filter((certificate) => {
207 | const expired = isExpired(certificate.expires_on);
208 | const expiringSoon = willExpireSoon(certificate.expires_on);
209 | return !expired && !expiringSoon;
210 | });
211 | }
212 |
213 | function getLongestValidCertificate(certs: INpmCertificate[]) {
214 | return certs
215 | .sort((a, b) => new Date(b.expires_on).getTime() - new Date(a.expires_on).getTime())[0];
216 | }
217 |
218 | async function ensureValidCertificate(
219 | wildcard: string,
220 | { dryRun }: { dryRun: boolean }
221 | ): Promise<{ certId: string | null, created: boolean }> {
222 | logStep(`Checking certificates for ${wildcard}`);
223 | const certs = await npmService.npmGetCertificates();
224 |
225 | const relevantCertificates = filterCertificatesForWildcard(wildcard, certs);
226 | const valid = filterValidCertificates(relevantCertificates);
227 |
228 | if (valid.length > 0) {
229 | const best = getLongestValidCertificate(valid);
230 | logOk(`Found valid certificate id=${best.id} for ${wildcard} expires_on=${best.expires_on}`);
231 |
232 | return { certId: best.id, created: false };
233 | }
234 |
235 | // Check if there are any certificates that are not expired but will expire soon
236 | const expiringCerts = relevantCertificates.filter(cert => !isExpired(cert.expires_on) && willExpireSoon(cert.expires_on));
237 |
238 | if (expiringCerts.length > 0) {
239 | const expiringCert = getLongestValidCertificate(expiringCerts);
240 | logWarn(`Certificate for ${wildcard} will expire soon (on ${expiringCert.expires_on}). Starting preemptive renewal...`);
241 | } else {
242 | logWarn(`No valid certificate found for ${wildcard}. Starting automated renewal...`);
243 | }
244 |
245 | if (dryRun) {
246 | logInfo('Dry-run: would attempt automated certificate renewal');
247 | return { certId: null, created: false };
248 | }
249 |
250 | try {
251 | // Run certbot and handle DNS challenge
252 | await runCertbot(wildcard, dryRun);
253 |
254 | // Read and upload the new certificate
255 | const { fullchain, privkey } = getLiveCertPathsForWildcard(wildcard);
256 | if (!fs.existsSync(fullchain) || !fs.existsSync(privkey)) {
257 | throw new Error(`Expected certificate files not found: ${fullchain} / ${privkey}`);
258 | }
259 |
260 | const certificate = fs.readFileSync(fullchain, 'utf8');
261 | const certificate_key = fs.readFileSync(privkey, 'utf8');
262 |
263 | const nice_name = `${wildcard} - ${toDateString(new Date())}`;
264 | // Log certificate details for debugging
265 | logInfo(`Certificate length: ${certificate.length}`);
266 | logInfo(`Private key length: ${certificate_key.length}`);
267 |
268 | const payload = {
269 | provider: 'other',
270 | nice_name,
271 | domain_names: [wildcard],
272 | meta: {
273 | certificate: certificate.trim(),
274 | certificate_key: certificate_key.trim()
275 | }
276 | };
277 |
278 | logStep(`Uploading new certificate to NPM: ${nice_name}`);
279 | await npmService.npmCreateCertificate(payload);
280 | logOk('Upload complete.');
281 |
282 | // Re-fetch and return the new id
283 | const after = await npmService.npmGetCertificates();
284 | const created = after.find((c) => c.nice_name === nice_name);
285 | if (!created) throw new Error('Uploaded certificate not found after creation');
286 | logOk(`New certificate id=${created.id} ready.`);
287 | return { certId: created.id, created: true };
288 | } catch (error) {
289 | logErr(`Certificate renewal failed: ${error.message}`);
290 | throw error;
291 | }
292 | }
293 |
294 |
295 |
--------------------------------------------------------------------------------
/.eslintrc.js:
--------------------------------------------------------------------------------
1 | // @ts-check
2 | /**
3 | * @type {import("eslint").Linter.Config}
4 | */
5 | const config = {
6 | parserOptions: {
7 | ecmaVersion: 2018,
8 | sourceType: 'module'
9 | },
10 | env: {
11 | es6: true,
12 | node: true
13 | },
14 | globals: {
15 | MyGlobal: true
16 | },
17 | ignorePatterns: [
18 | '**/db/models/*.js',
19 | '**/lib/**/*.ts'
20 | ],
21 | rules: {
22 | 'template-curly-spacing': [ 'error', 'always' ],
23 | 'space-infix-ops': 'error',
24 | 'array-bracket-newline': 'off',
25 | 'array-bracket-spacing': [ 'error', 'always' ],
26 | 'array-element-newline': 'off',
27 | 'block-spacing': [ 'error', 'always' ],
28 | 'brace-style': [ 'error', '1tbs', {
29 | 'allowSingleLine': true
30 | } ],
31 | 'camelcase': [ 'error', {
32 | 'properties': 'never'
33 | } ],
34 | 'comma-dangle': [ 'error', 'never' ],
35 | 'comma-spacing': [ 'error', {
36 | 'after': true,
37 | 'before': false
38 | } ],
39 | 'comma-style': 'error',
40 | 'computed-property-spacing': 'error',
41 | 'curly': [ 'error', 'multi-line' ],
42 | 'eol-last': 'error',
43 | 'func-call-spacing': 'error',
44 | 'indent': [ 'error', 2, {
45 | 'CallExpression': {
46 | 'arguments': 1
47 | },
48 | 'FunctionDeclaration': {
49 | 'body': 1,
50 | 'parameters': 1
51 | },
52 | 'FunctionExpression': {
53 | 'body': 1,
54 | 'parameters': 1
55 | },
56 | 'ignoredNodes': [ 'ConditionalExpression' ],
57 | 'MemberExpression': 1,
58 | 'ObjectExpression': 1,
59 | 'SwitchCase': 1
60 | } ],
61 | 'key-spacing': 'error',
62 | 'keyword-spacing': 'error',
63 | 'linebreak-style': 'error',
64 | 'max-len': [ 'error', {
65 | // starting small (forcing 120), but later we should force 80
66 | code: 80,
67 | ignoreComments: true,
68 | ignoreUrls: true,
69 | ignoreStrings: true,
70 | tabWidth: 2
71 | } ],
72 | 'no-array-constructor': 'error',
73 | 'no-caller': 'error',
74 | 'no-extend-native': 'error',
75 | 'no-extra-bind': 'error',
76 | 'no-invalid-this': 'error',
77 | 'no-irregular-whitespace': 'error',
78 | 'no-mixed-spaces-and-tabs': 'error',
79 | 'no-multi-spaces': 'error',
80 | 'no-multi-str': 'error',
81 |
82 | 'no-multiple-empty-lines': [ 'error', {
83 | max: 2
84 | } ],
85 | 'no-new-object': 'error',
86 | 'no-new-wrappers': 'error',
87 | 'no-tabs': 'error',
88 | 'no-throw-literal': 'error',
89 | 'no-trailing-spaces': 'error',
90 | 'no-unused-vars': [ 'error', {
91 | args: 'none'
92 | } ],
93 |
94 | 'no-with': 'error',
95 | 'object-curly-spacing': [ 'error', 'always' ],
96 | 'one-var': [ 'error', {
97 | const: 'never',
98 | let: 'never',
99 | var: 'never'
100 | } ],
101 | 'operator-linebreak': [ 'error', 'after' ],
102 | 'padded-blocks': [ 'error', 'never' ],
103 | 'prefer-promise-reject-errors': 'error',
104 | 'quotes': [ 'error', 'single', {
105 | allowTemplateLiterals: true
106 | } ],
107 | 'semi': [ 'error' ],
108 | 'semi-spacing': 'error',
109 | // 'valid-jsdoc': [ 'error', {
110 | // prefer: {
111 | // returns: 'return'
112 | // },
113 | // requireParamDescription: false,
114 | // requireReturn: false,
115 | // requireReturnDescription: false
116 | // } ],
117 | 'space-before-blocks': 'error',
118 | 'space-before-function-paren': [ 'error', {
119 | asyncArrow: 'always',
120 | anonymous: 'never',
121 | named: 'never'
122 | } ],
123 | 'spaced-comment': [ 'error', 'always' ],
124 | 'switch-colon-spacing': 'error',
125 | 'arrow-parens': [ 'error', 'always' ],
126 | 'constructor-super': 'error', // eslint:recommended
127 | 'generator-star-spacing': [ 'error', 'after' ],
128 | 'no-new-symbol': 'error', // eslint:recommended
129 | 'no-this-before-super': 'error', // eslint:recommended
130 | 'no-var': 'error',
131 | 'prefer-const': [ 'error', { destructuring: 'all' } ],
132 | 'prefer-rest-params': 'error',
133 | 'prefer-spread': 'error',
134 | 'rest-spread-spacing': 'error',
135 | 'yield-star-spacing': [ 'error', 'after' ],
136 | // 'no-await-in-loop': 'warn',
137 | 'no-unreachable-loop': 'error',
138 | // 'require-atomic-updates': 'error',
139 | 'dot-notation': 'error',
140 | // 'require-await': 'warn',
141 | // 'no-shadow': 'warn',
142 | 'no-undefined': 'error',
143 | 'line-comment-position': [ 'error', { position: 'above' } ]
144 | },
145 | overrides: [
146 | {
147 | files: [ '*.ts', '*.tsx' ],
148 | parser: '@typescript-eslint/parser',
149 | plugins: [
150 | '@typescript-eslint/eslint-plugin',
151 | 'unused-imports',
152 | 'simple-import-sort',
153 | 'import'
154 | ],
155 | parserOptions: {
156 | project: [
157 | './tsconfig.eslint.json'
158 | ],
159 | sourceType: 'module'
160 | },
161 | extends: [
162 | 'plugin:@typescript-eslint/eslint-recommended',
163 | 'plugin:@typescript-eslint/recommended'
164 | ],
165 | env: {
166 | node: true,
167 | jest: true
168 | },
169 | /**
170 | * Typescript Rules
171 | * https://github.com/bradzacher/eslint-plugin-typescript
172 | * Enable your own typescript rules.
173 | */
174 | rules: {
175 | 'unused-imports/no-unused-imports': 'error',
176 | 'simple-import-sort/imports': [ 'error', {
177 | groups: [
178 | // 1. built-in node.js modules
179 | [ `^(${ require('module').builtinModules.join('|') })(/|$)` ],
180 | // 2.1. package that start without @
181 | // 2.2. package that start with @
182 | [ '^\\w', '^@\\w' ],
183 | // 3. @nestjs packages
184 | [ '^@nestjs\/' ],
185 | // 4. @growthspace-engineering packages
186 | [ '^@growthspace-engineering\/' ],
187 | // 5. Internal growthspace packages (inside this project)
188 | [ '^@gs-' ],
189 | // 6. Parent imports. Put `..` last.
190 | // Other relative imports. Put same-folder imports and `.` last.
191 | [ '^\\.\\.(?!/?$)', '^\\.\\./?$', '^\\./(?=.*/)(?!/?$)', '^\\.(?!/?$)', '^\\./?$' ],
192 | // 7. Side effect imports.
193 | // https://riptutorial.com/javascript/example/1618/importing-with-side-effects
194 | [ '^\\u0000' ]
195 | ]
196 | } ],
197 | 'import/first': 'error',
198 | 'import/newline-after-import': 'error',
199 | 'import/no-duplicates': 'error',
200 | 'eol-last': [ 2, 'windows' ],
201 | 'comma-dangle': [ 'error', 'never' ],
202 | '@typescript-eslint/no-empty-interface': 'error',
203 | '@typescript-eslint/member-delimiter-style': 'error',
204 | '@typescript-eslint/explicit-function-return-type': 'off',
205 | '@typescript-eslint/explicit-module-boundary-types': 'off',
206 | '@typescript-eslint/naming-convention': [
207 | 'error',
208 | {
209 | 'selector': 'interface',
210 | 'format': [ 'PascalCase' ],
211 | 'custom': {
212 | 'regex': '^I[A-Z]',
213 | 'match': true
214 | }
215 | }
216 | ],
217 | '@typescript-eslint/semi': [ 'error' ],
218 | 'space-infix-ops': 'error',
219 | 'array-bracket-newline': 'off',
220 | 'array-bracket-spacing': [ 'error', 'always' ],
221 | 'array-element-newline': 'off',
222 | 'block-spacing': [ 'error', 'always' ],
223 | 'brace-style': [ 'error', '1tbs', {
224 | 'allowSingleLine': true
225 | } ],
226 | 'camelcase': [ 'error', {
227 | 'properties': 'never'
228 | } ],
229 | 'comma-spacing': [ 'error', {
230 | 'after': true,
231 | 'before': false
232 | } ],
233 | 'comma-style': 'error',
234 | 'computed-property-spacing': 'error',
235 | 'curly': [ 'error', 'multi-line' ],
236 | 'func-call-spacing': 'error',
237 | 'indent': [ 'error', 2, {
238 | 'CallExpression': {
239 | 'arguments': 1
240 | },
241 | 'FunctionDeclaration': {
242 | 'body': 1,
243 | 'parameters': 1
244 | },
245 | 'FunctionExpression': {
246 | 'body': 1,
247 | 'parameters': 1
248 | },
249 | 'ignoredNodes': [ 'ConditionalExpression' ],
250 | 'MemberExpression': 1,
251 | 'ObjectExpression': 1,
252 | 'SwitchCase': 1
253 | } ],
254 | 'key-spacing': 'error',
255 | 'keyword-spacing': 'error',
256 | 'linebreak-style': 'error',
257 | 'max-len': [ 'error', {
258 | // starting small (forcing 120), but later we should force 80
259 | code: 80,
260 | ignoreComments: true,
261 | ignoreUrls: true,
262 | ignoreStrings: true,
263 | tabWidth: 2
264 | } ],
265 | 'no-array-constructor': 'error',
266 | 'no-caller': 'error',
267 | 'no-extend-native': 'error',
268 | 'no-extra-bind': 'error',
269 | 'no-invalid-this': 'error',
270 | 'no-irregular-whitespace': 'error',
271 | 'no-mixed-spaces-and-tabs': 'error',
272 | 'no-multi-spaces': 'error',
273 | 'no-multi-str': 'error',
274 |
275 | 'no-multiple-empty-lines': [ 'error', {
276 | max: 2
277 | } ],
278 | 'no-new-object': 'error',
279 | 'no-new-wrappers': 'error',
280 | 'no-tabs': 'error',
281 | 'no-throw-literal': 'error',
282 | 'no-trailing-spaces': 'error',
283 | 'no-unused-vars': [ 'error', {
284 | args: 'none'
285 | } ],
286 |
287 | 'no-with': 'error',
288 | 'object-curly-spacing': [ 'error', 'always' ],
289 | 'one-var': [ 'error', {
290 | const: 'never',
291 | let: 'never',
292 | var: 'never'
293 | } ],
294 | 'operator-linebreak': [ 'error', 'after' ],
295 | 'padded-blocks': [ 'error', 'never' ],
296 | 'prefer-promise-reject-errors': 'error',
297 | 'quotes': [ 'error', 'single', {
298 | allowTemplateLiterals: true
299 | } ],
300 | 'semi': [ 'error' ],
301 | 'semi-spacing': 'error',
302 | 'space-before-blocks': 'error',
303 | 'space-before-function-paren': [ 'error', {
304 | asyncArrow: 'always',
305 | anonymous: 'never',
306 | named: 'never'
307 | } ],
308 | 'spaced-comment': [ 'error', 'always' ],
309 | 'switch-colon-spacing': 'error',
310 | 'arrow-parens': [ 'error', 'always' ],
311 | 'constructor-super': 'error', // eslint:recommended
312 | 'generator-star-spacing': [ 'error', 'after' ],
313 | 'no-new-symbol': 'error', // eslint:recommended
314 | 'no-this-before-super': 'error', // eslint:recommended
315 | 'no-var': 'error',
316 | 'prefer-const': [ 'error', { destructuring: 'all' } ],
317 | 'prefer-rest-params': 'error',
318 | 'prefer-spread': 'error',
319 | 'rest-spread-spacing': 'error',
320 | 'yield-star-spacing': [ 'error', 'after' ],
321 | // 'no-await-in-loop': 'warn',
322 | 'no-unreachable-loop': 'error',
323 | 'require-atomic-updates': 'error',
324 | 'dot-notation': 'error',
325 | // 'require-await': 'warn',
326 | 'no-undefined': 'error',
327 | 'line-comment-position': [ 'error', { position: 'above' } ],
328 | 'template-curly-spacing': [ 'error', 'always' ]
329 | }
330 | },
331 | {
332 | files: [
333 | '**/*.mjs'
334 | ],
335 | env: {
336 | node: true
337 | },
338 | plugins: [
339 | 'unused-imports',
340 | 'simple-import-sort',
341 | 'import'
342 | ],
343 | rules: {
344 | 'unused-imports/no-unused-imports': 'error',
345 | 'simple-import-sort/imports': [ 'error', {
346 | groups: [
347 | // 1. built-in node.js modules
348 | [ `^(${ require('module').builtinModules.join('|') })(/|$)` ],
349 | // 2.1. package that start without @
350 | // 2.2. package that start with @
351 | [ '^\\w', '^@\\w' ],
352 | // 3. @nestjs packages
353 | [ '^@nestjs\/' ],
354 | // 4. @growthspace-engineering packages
355 | [ '^@growthspace-engineering\/' ],
356 | // 5. Internal growthspace packages (inside this project)
357 | [ '^@gs-' ],
358 | // 6. Parent imports. Put `..` last.
359 | // Other relative imports. Put same-folder imports and `.` last.
360 | [ '^\\.\\.(?!/?$)', '^\\.\\./?$', '^\\./(?=.*/)(?!/?$)', '^\\.(?!/?$)', '^\\./?$' ],
361 | // 7. Side effect imports.
362 | // https://riptutorial.com/javascript/example/1618/importing-with-side-effects
363 | [ '^\\u0000' ]
364 | ]
365 | } ],
366 | 'import/first': 'error',
367 | 'import/newline-after-import': 'error',
368 | 'import/no-duplicates': 'error'
369 | }
370 | }
371 | ]
372 | };
373 |
374 | module.exports = config;
375 |
--------------------------------------------------------------------------------
/src/npm.service.ts:
--------------------------------------------------------------------------------
1 | import axios, { AxiosInstance } from 'axios';
2 | import * as FormData from 'form-data';
3 |
4 | import { logErr, logInfo, logOk, logStep, logWarn } from './logger';
5 | import {
6 | INpmAllHosts,
7 | INpmCertificate,
8 | INpmCreateCertificateRequest,
9 | INpmHostGroup,
10 | INpmLoginRequest,
11 | INpmLoginResponse,
12 | INpmProxyHost,
13 | INpmRedirectionHost
14 | } from './types';
15 | import { hostMatchesWildcard, isExpired, normalizeDomainsField } from './utils';
16 |
17 | export class NpmService {
18 | private readonly npmAxiosInstance: AxiosInstance;
19 | private tokenPromise: Promise;
20 |
21 | constructor(
22 | private readonly baseUrl: string,
23 | private readonly username: string,
24 | private readonly password: string
25 | ) {
26 | this.baseUrl = baseUrl;
27 | this.npmAxiosInstance = axios.create({
28 | baseURL: baseUrl,
29 | validateStatus: () => true,
30 | headers: {
31 | 'Content-Type': 'application/json'
32 | }
33 | });
34 | }
35 |
36 | async npmLogin(
37 | baseUrl: string,
38 | identity: string,
39 | secret: string
40 | ): Promise {
41 | if (this.tokenPromise) {
42 | return this.tokenPromise;
43 | }
44 |
45 | const request: INpmLoginRequest = { identity, secret };
46 | const { data } = await this.npmAxiosInstance.post(
47 | 'api/tokens',
48 | request
49 | );
50 | if (!data || !data.token) throw new Error('NPM login failed: no token returned');
51 |
52 | this.npmAxiosInstance
53 | .defaults
54 | .headers
55 | .common
56 | .Authorization = `Bearer ${ data.token }`;
57 |
58 | this.tokenPromise = Promise.resolve(data.token);
59 | return data.token;
60 | }
61 |
62 | async npmGetCertificates(): Promise {
63 | await this.npmLogin(this.baseUrl, this.username, this.password);
64 | const { data } = await this.npmAxiosInstance
65 | .get('api/nginx/certificates');
66 |
67 | return data;
68 | }
69 |
70 | private async validateCertificate(
71 | certificate: string,
72 | certificateKey: string
73 | ): Promise {
74 | const formData = new FormData();
75 | formData.append('certificate', Buffer.from(certificate), {
76 | filename: 'fullchain.pem',
77 | contentType: 'application/x-x509-ca-cert'
78 | });
79 | formData.append('certificate_key', Buffer.from(certificateKey), {
80 | filename: 'privkey.pem',
81 | contentType: 'application/x-x509-ca-cert'
82 | });
83 |
84 | const { status, data } = await this.npmAxiosInstance.post(
85 | 'api/nginx/certificates/validate',
86 | formData,
87 | {
88 | headers: {
89 | ...formData.getHeaders()
90 | }
91 | }
92 | );
93 |
94 | if (status !== 200) {
95 | throw new Error(
96 | `Certificate validation failed: ${ JSON.stringify(data) }`
97 | );
98 | }
99 | }
100 |
101 | private async createEmptyCertificate(niceName: string): Promise {
102 | const { status, data } = await this.npmAxiosInstance.post(
103 | 'api/nginx/certificates',
104 | {
105 | nice_name: niceName,
106 | provider: 'other'
107 | }
108 | );
109 |
110 | if (status !== 201 && status !== 200) {
111 | throw new Error(
112 | `Failed to create certificate record: ${ JSON.stringify(data) }`
113 | );
114 | }
115 |
116 | return data.id;
117 | }
118 |
119 | private async uploadCertificateFiles(
120 | certId: string,
121 | certificate: string,
122 | certificateKey: string
123 | ): Promise {
124 | const formData = new FormData();
125 | formData.append('certificate', Buffer.from(certificate), {
126 | filename: 'fullchain.pem',
127 | contentType: 'application/x-x509-ca-cert'
128 | });
129 | formData.append('certificate_key', Buffer.from(certificateKey), {
130 | filename: 'privkey.pem',
131 | contentType: 'application/x-x509-ca-cert'
132 | });
133 |
134 | const { status, data } = await this.npmAxiosInstance.post(
135 | `api/nginx/certificates/${ certId }/upload`,
136 | formData,
137 | {
138 | headers: {
139 | ...formData.getHeaders()
140 | }
141 | }
142 | );
143 |
144 | if (status !== 200) {
145 | throw new Error(`Certificate upload failed: ${ JSON.stringify(data) }`);
146 | }
147 | }
148 |
149 | async npmCreateCertificate(
150 | payload: INpmCreateCertificateRequest
151 | ): Promise {
152 | try {
153 | await this.npmLogin(this.baseUrl, this.username, this.password);
154 |
155 | // Step 1: Validate certificate files
156 | await this.validateCertificate(
157 | payload.meta.certificate,
158 | payload.meta.certificate_key
159 | );
160 |
161 | // Step 2: Create empty certificate record
162 | const certId = await this.createEmptyCertificate(payload.nice_name);
163 |
164 | // Step 3: Upload certificate files
165 | await this.uploadCertificateFiles(
166 | certId,
167 | payload.meta.certificate,
168 | payload.meta.certificate_key
169 | );
170 |
171 | // Get and return the created certificate
172 | const certificates = await this.npmGetCertificates();
173 | const created = certificates.find((c) => c.id === certId);
174 | if (!created) {
175 | throw new Error('Created certificate not found after upload');
176 | }
177 |
178 | return created;
179 | } catch (error) {
180 | if (error.response) {
181 | logErr(
182 | `Certificate creation failed with status ${ error.response.status }`
183 | );
184 | logErr(`Error response: ${ JSON.stringify(error.response.data) }`);
185 | }
186 | throw error;
187 | }
188 | }
189 |
190 | async npmDeleteCertificate(id: string): Promise {
191 | const { status } = await this.npmAxiosInstance
192 | .delete(`api/nginx/certificates/${ id }`);
193 |
194 | return status;
195 | }
196 |
197 | async npmListHosts(
198 | group: INpmHostGroup
199 | ): Promise {
200 | const { data } = await this.npmAxiosInstance
201 | .get(`api/nginx/${ group }`);
202 | return data;
203 | }
204 |
205 | async npmGetHost(
206 | group: INpmHostGroup,
207 | id: string
208 | ): Promise {
209 | const { data } = await this.npmAxiosInstance
210 | .get(`api/nginx/${ group }/${ id }`);
211 | return data;
212 | }
213 |
214 | async npmUpdateHost(
215 | group: INpmHostGroup,
216 | id: string,
217 | body: Partial
218 | ): Promise {
219 | const { status } = await this.npmAxiosInstance
220 | .put(`api/nginx/${ group }/${ id }`, body);
221 | return status;
222 | }
223 |
224 | async cleanupUnusedCertificates(
225 | targetWildcards: string[],
226 | { dryRun }: { dryRun: boolean }
227 | ): Promise {
228 | logStep('Cleanup: scanning for unused or expired certificates');
229 | const certificates = await this.npmGetCertificates();
230 | const proxy = await this.npmListHosts('proxy-hosts');
231 | const redir = await this.npmListHosts('redirection-hosts');
232 | const used = new Set();
233 | proxy.forEach((h) => { if (h.certificate_id) used.add(h.certificate_id); });
234 | redir.forEach((h) => { if (h.certificate_id) used.add(h.certificate_id); });
235 |
236 | const targetSet = new Set(targetWildcards);
237 | let deletions = 0;
238 | for (const certificate of certificates) {
239 | const domains = Array.isArray(certificate.domain_names) ?
240 | certificate.domain_names :
241 | [];
242 | const matchesTarget = domains.some((d) => targetSet.has(d));
243 | const expired = isExpired(certificate.expires_on);
244 | const referenced = used.has(certificate.id);
245 |
246 | // Skip if:
247 | // 1. Certificate doesn't match our target domains
248 | // 2. Certificate is referenced and not expired
249 | // 3. Certificate was created in the last 5 minutes (grace period for host updates)
250 | if (!matchesTarget) continue;
251 | if (referenced && !expired) continue;
252 |
253 | const gracePeriod = 5 * 60 * 1000;
254 | const certificateCreatedAgo =
255 | new Date().getTime() - new Date(certificate.created_on).getTime();
256 | const createdWithinGracePeriod = certificate.created_on &&
257 | (certificateCreatedAgo < gracePeriod);
258 |
259 | if (createdWithinGracePeriod) {
260 | logInfo([
261 | `Skip delete cert id=${ certificate.id }`,
262 | '(created within grace period)'
263 | ].join(' '));
264 | continue;
265 | }
266 |
267 | const reason = expired ? 'expired' : 'unreferenced';
268 | if (dryRun) {
269 | logInfo([
270 | 'Dry-run: would delete certificate',
271 | `id=${ certificate.id }`,
272 | JSON.stringify(domains),
273 | `(${ reason })`
274 | ].join(' '));
275 | deletions += 1;
276 | continue;
277 | }
278 | const status = await this.npmDeleteCertificate(certificate.id);
279 | if (status >= 200 && status < 300) {
280 | logOk([
281 | `Deleted certificate id=${ certificate.id }`,
282 | JSON.stringify(domains),
283 | `(${ reason })`
284 | ].join(' '));
285 | deletions += 1;
286 | } else {
287 | logWarn([
288 | `Skip delete cert id=${ certificate.id }`,
289 | `(${ reason }) status=${ status })`
290 | ].join(' '));
291 | }
292 | }
293 | if (deletions === 0) logInfo('Cleanup: nothing to delete.');
294 | return deletions;
295 | }
296 |
297 | async updateHostsForWildcard(
298 | wildcard: string,
299 | newCertId: string,
300 | { dryRun }: { dryRun: boolean }
301 | ): Promise {
302 | // , 'redirection-hosts'
303 | const groups: INpmHostGroup[] = [ 'proxy-hosts' ];
304 | let updates = 0;
305 | for (const group of groups) {
306 | logStep(`Scanning ${ group } for domains matching ${ wildcard }`);
307 | const list = group === 'proxy-hosts' ?
308 | await this.npmListHosts(group) :
309 | await this.npmListHosts(group);
310 | for (const host of list) {
311 | const domains = normalizeDomainsField(host.domain_names);
312 | const matches = domains.some((d) => hostMatchesWildcard(d, wildcard));
313 | if (!matches) continue;
314 | if (host.certificate_id === newCertId) {
315 | logInfo([
316 | '[skip]',
317 | `${ group } id=${ host.id }`,
318 | `already uses cert ${ newCertId }`
319 | ].join(' '));
320 | continue;
321 | }
322 |
323 | logInfo([
324 | `${ group } id=${ host.id }`,
325 | JSON.stringify(domains),
326 | `switching certificate → ${ newCertId }`
327 | ].join(' '));
328 | if (dryRun) {
329 | logInfo('Dry-run: would update host');
330 | updates += 1;
331 | continue;
332 | }
333 |
334 | const detail = group === 'proxy-hosts' ?
335 | await this.npmGetHost(group, host.id) :
336 | await this.npmGetHost(group, host.id);
337 | let updateBody;
338 | if (group === 'redirection-hosts') {
339 | // Get current host data and preserve most fields
340 | const current = await this.npmGetHost(
341 | group,
342 | host.id
343 | );
344 |
345 | if (!current) {
346 | logErr([
347 | 'Failed to get current host data',
348 | `${ group } id=${ host.id }`
349 | ].join(' '));
350 | continue;
351 | }
352 |
353 | // Keep everything exactly as is, just update certificate_id
354 | updateBody = {
355 | domain_names: current.domain_names,
356 | forward_domain_name: current.forward_domain_name,
357 | forward_scheme: current.forward_scheme,
358 | forward_http_code: current.forward_http_code,
359 | certificate_id: newCertId,
360 | ssl_forced: current.ssl_forced,
361 | block_exploits: current.block_exploits,
362 | advanced_config: current.advanced_config,
363 | meta: current.meta,
364 | http2_support: current.http2_support,
365 | enabled: current.enabled,
366 | hsts_enabled: current.hsts_enabled,
367 | hsts_subdomains: current.hsts_subdomains,
368 | preserve_path: current.preserve_path
369 | };
370 | } else {
371 | // Proxy hosts: mutate detail and strip read-only fields
372 | const body = { ...detail, certificate_id: newCertId };
373 | delete body.id;
374 | delete body.created_on;
375 | delete body.modified_on;
376 | delete body.owner_user_id;
377 | delete body.owner;
378 | delete body.is_deleted;
379 | delete body.deleted_at;
380 | delete body.status;
381 |
382 | updateBody = body;
383 | }
384 |
385 | try {
386 | logInfo([
387 | `Updating ${ group } id=${ host.id }`
388 | ].join(' '));
389 | const status = await this.npmUpdateHost(group, host.id, updateBody);
390 | if (status >= 200 && status < 300) {
391 | logOk([
392 | `${ group } id=${ host.id }`,
393 | `updated to certificate ${ newCertId }`
394 | ].join(' '));
395 | updates += 1;
396 | } else {
397 | logErr([
398 | `${ group } id=${ host.id }`,
399 | `update failed (status ${ status })`
400 | ].join(' '));
401 | // Get error details if available
402 | const errorDetail = await this.npmGetHost(group, host.id);
403 | logErr([
404 | 'Current host state:',
405 | JSON.stringify(errorDetail)
406 | ].join(' '));
407 | }
408 | } catch (error) {
409 | logErr([
410 | `${ group } id=${ host.id }`,
411 | `update error: ${ error.message }`
412 | ].join(' '));
413 | if (error.response) {
414 | logErr([
415 | 'Response data:',
416 | JSON.stringify(error.response.data)
417 | ].join(' '));
418 | }
419 | }
420 | }
421 | }
422 | return updates;
423 | }
424 | }
425 |
--------------------------------------------------------------------------------
/.cursor/rules/ripersigma105.mdc:
--------------------------------------------------------------------------------
1 | ---
2 | description: use these rules for every chat
3 | globs:
4 | alwaysApply: true
5 | ---
6 |
7 | # CursorRIPER♦Σ 1.0.5 -(code protect + context + permissions)
8 |
9 | ## 📚 Path & Index Definitions
10 | 📂 = "/memory-bank/"
11 | 📦 = "/memory-bank/backups/"
12 |
13 | 𝕋 = [read_files, ask_questions, observe_code, document_findings,
14 | suggest_ideas, explore_options, evaluate_approaches,
15 | create_plan, detail_specifications, sequence_steps,
16 | implement_code, follow_plan, test_implementation,
17 | validate_output, verify_against_plan, report_deviations]
18 |
19 | 𝕄 = [📂projectbrief.md, 📂systemPatterns.md,
20 | 📂techContext.md, 📂activeContext.md,
21 | 📂progress.md, 📂protection.md]
22 |
23 | ## 🔖 Reference Map
24 | ℜ = {
25 | Ψ: { // Protection
26 | 1: {s: "PROTECTED", e: "END-P", h: "!cp"},
27 | 2: {s: "GUARDED", e: "END-G", h: "!cg"},
28 | 3: {s: "INFO", e: "END-I", h: "!ci"},
29 | 4: {s: "DEBUG", e: "END-D", h: "!cd"},
30 | 5: {s: "TEST", e: "END-T", h: "!ct"},
31 | 6: {s: "CRITICAL", e: "END-C", h: "!cc"}
32 | },
33 | Θ: { // GitHub
34 | 1: {op: "search_repositories", h: "!gr"},
35 | 2: {op: "create_repository", h: "!gc"},
36 | 3: {op: "push_files", h: "!gp"},
37 | 4: {op: "create_pull_request", h: "!gpr"}
38 | },
39 | Λ: { // Web Search
40 | 1: {op: "web_search", h: "!ws"},
41 | 2: {op: "local_search", h: "!wl"},
42 | 3: {op: "fetch_url", h: "!wf"}
43 | },
44 | Υ: { // Puppeteer/Playwright
45 | 1: {op: "navigate", h: "!pn"},
46 | 2: {op: "screenshot", h: "!ps"},
47 | 3: {op: "test_record", h: "!pt"}
48 | },
49 | Ξ: { // Docker
50 | 1: {op: "create_container", h: "!dc"},
51 | 2: {op: "deploy_compose", h: "!dd"},
52 | 3: {op: "get_logs", h: "!dl"}
53 | }
54 | }
55 |
56 | ## Ω RIPER Modes with Permission Enforcement
57 |
58 | Ω₁ = 🔍R ⟶ ℙ(Ω₁) ⟶ +𝕋[0:3] -𝕋[4:15] ⟶ [MODE: RESEARCH]+findings
59 | ↪ 🔄(/research, /r) ⟶ update(𝕄[2,3]) ⟶ enforce_permissions(𝕊(Ω₁))
60 |
61 | Ω₂ = 💡I ⟶ ℙ(Ω₂) ⟶ +𝕋[4:6] -𝕋[8:15] ⟶ [MODE: INNOVATE]+possibilities
62 | ↪ 🔄(/innovate, /i) ⟶ update(𝕄[3]) ⟶ enforce_permissions(𝕊(Ω₂))
63 |
64 | Ω₃ = 📝P ⟶ ℙ(Ω₃) ⟶ +𝕋[7:9] -𝕋[10:15] ⟶ [MODE: PLAN]+checklist₁₋ₙ
65 | ↪ 🔄(/plan, /p) ⟶ update(𝕄[3,4]) ⟶ enforce_permissions(𝕊(Ω₃))
66 |
67 | Ω₄ = ⚙️E ⟶ ℙ(Ω₄) ⟶ +𝕋[10:12] -[improve,create,deviate] ⟶ [MODE: EXECUTE]+progress
68 | ↪ 🔄(/execute, /e) ⟶ update(𝕄[3,4]) ⟶ enforce_permissions(𝕊(Ω₄))
69 |
70 | Ω₅ = 🔎RV ⟶ ℙ(Ω₅) ⟶ +𝕋[13:15] -[modify,improve] ⟶ [MODE: REVIEW]+{✅|⚠️}
71 | ↪ 🔄(/review, /rev) ⟶ update(𝕄[3,4]) ⟶ enforce_permissions(𝕊(Ω₅))
72 |
73 | ## 🔐 CRUD Permission System
74 |
75 | ℙ = {C: create, R: read, U: update, D: delete}
76 |
77 | ℙ(Ω₁) = {R: ✓, C: ✗, U: ✗, D: ✗} // Research mode
78 | ℙ(Ω₂) = {R: ✓, C: ~, U: ✗, D: ✗} // Innovate mode (~: conceptual only)
79 | ℙ(Ω₃) = {R: ✓, C: ✓, U: ~, D: ✗} // Plan mode (~: plan changes only)
80 | ℙ(Ω₄) = {R: ✓, C: ✓, U: ✓, D: ~} // Execute mode (~: limited scope)
81 | ℙ(Ω₅) = {R: ✓, C: ✗, U: ✗, D: ✗} // Review mode
82 |
83 | 𝕆ᵣₑₐₗ = {modify_files, write_code, delete_content, refactor}
84 | 𝕆ᵥᵢᵣₜᵤₐₗ = {suggest_ideas, explore_concepts, evaluate_approaches}
85 | 𝕆ₒᵦₛₑᵣᵥₑ = {read_files, analyze_content, identify_patterns}
86 |
87 | 𝕊(Ω₁) = {𝕆ₒᵦₛₑᵣᵥₑ: ✓, 𝕆ᵥᵢᵣₜᵤₐₗ: ~, 𝕆ᵣₑₐₗ: ✗} // Research
88 | 𝕊(Ω₂) = {𝕆ₒᵦₛₑᵣᵥₑ: ✓, 𝕆ᵥᵢᵣₜᵤₐₗ: ✓, 𝕆ᵣₑₐₗ: ✗} // Innovate
89 | 𝕊(Ω₃) = {𝕆ₒᵦₛₑᵣᵥₑ: ✓, 𝕆ᵥᵢᵣₜᵤₐₗ: ✓, 𝕆ᵣₑₐₗ: ~} // Plan
90 | 𝕊(Ω₄) = {𝕆ₒᵦₛₑᵣᵥₑ: ✓, 𝕆ᵥᵢᵣₜᵤₐₗ: ~, 𝕆ᵣₑₐₗ: ✓} // Execute
91 | 𝕊(Ω₅) = {𝕆ₒᵦₛₑᵣᵥₑ: ✓, 𝕆ᵥᵢᵣₜᵤₐₗ: ~, 𝕆ᵣₑₐₗ: ✗} // Review
92 |
93 | ## 🛡️ Code Protection System
94 |
95 | Ψ = [PROTECTED, GUARDED, INFO, DEBUG, TEST, CRITICAL]
96 | Ψ₊ = [END-P, END-G, END-I, END-D, END-T, END-C] // End markers
97 |
98 | Ψ_behavior_summary = {
99 | Ω₁: identify ∧ document(Ψ, Ψ₊),
100 | Ω₂: respect_boundaries(Ψ, Ψ₊) ∧ propose_alternatives,
101 | Ω₃: plan_around(Ψ, Ψ₊) ∧ request_permission(Ψ.GUARDED),
102 | Ω₄: enforce_protection(Ψ, Ψ₊) ∧ follow_guidelines,
103 | Ω₅: verify_integrity(Ψ, Ψ₊) ∧ report_violations
104 | }
105 |
106 | ## 📎 Context Reference System
107 |
108 | Γ = [FILES, FOLDERS, CODE, DOCS, RULES, GIT, NOTEPADS, PINNED]
109 |
110 | Γ_symbols = {
111 | Γ₁: 📄 @Files,
112 | Γ₂: 📁 @Folders,
113 | Γ₃: 💻 @Code,
114 | Γ₄: 📚 @Docs,
115 | Γ₅: 📏 @Cursor Rules,
116 | Γ₆: 🔄 @Git,
117 | Γ₇: 📝 @Notepads,
118 | Γ₈: 📌 #Files
119 | }
120 |
121 | ## Mode-Context Mapping
122 |
123 | MΓ = {
124 | Ω₁: [Γ₄, Γ₂, Γ₆], // RESEARCH: docs, folders, git
125 | Ω₂: [Γ₃, Γ₄, Γ₇], // INNOVATE: code, docs, notepads
126 | Ω₃: [Γ₁, Γ₂, Γ₅], // PLAN: files, folders, rules
127 | Ω₄: [Γ₃, Γ₁, Γ₈], // EXECUTE: code, files, pinned
128 | Ω₅: [Γ₃, Γ₁, Γ₆] // REVIEW: code, files, git
129 | }
130 |
131 | Γ_behavior = {
132 | add_context(type, name) = {
133 | verify_exists(name),
134 | update_context_list(𝕄[3], type, name),
135 | set_context_status(name, "active")
136 | },
137 | clear_context() = {
138 | backup_context(),
139 | reset_context_list(𝕄[3])
140 | },
141 | context_for_mode(mode) = {
142 | mode_contexts = MΓ[mode],
143 | apply_mode_context(mode_contexts)
144 | }
145 | }
146 |
147 | ## Protection-Context Integration
148 |
149 | PΓ = {
150 | Ψ₁ + Γ₃: 🔒💻, // Protected code
151 | Ψ₂ + Γ₃: 🛡️💻, // Guarded code
152 | Ψ₃ + Γ₃: ℹ️💻, // Info code
153 | Ψ₄ + Γ₃: 🐞💻, // Debug code
154 | Ψ₅ + Γ₃: 🧪💻, // Test code
155 | Ψ₆ + Γ₃: ⚠️💻 // Critical code
156 | }
157 |
158 | ## Permission-Context Integration
159 |
160 | ℙΓ = {
161 | ℙ(Ω₁) + Γ₁: 📄🔍, // Research file context
162 | ℙ(Ω₂) + Γ₃: 💻💡, // Innovate code context
163 | ℙ(Ω₃) + Γ₂: 📁📝, // Plan folder context
164 | ℙ(Ω₄) + Γ₃: 💻⚙️, // Execute code context
165 | ℙ(Ω₅) + Γ₁: 📄🔎 // Review file context
166 | }
167 |
168 | ## 🚫 Violation System
169 |
170 | Ξ(op, Ω) = op ∈ 𝕊(Ω) ? allow(op) : 𝕍(op, Ω)
171 |
172 | 𝕍(op, Ω) = {
173 | log_violation(op, Ω),
174 | create_backup(),
175 | revert_to_safe_mode(),
176 | notify_violation(op, Ω)
177 | }
178 |
179 | revert_to_safe_mode() = transition(current_mode → Ω₃) // Plan is safest fallback
180 |
181 | ## Π Project Phases
182 |
183 | Π₁ = 🌱UNINITIATED ⟶ framework_installed ∧ ¬project_started
184 | Π₂ = 🚧INITIALIZING ⟶ START_active ∧ setup_ongoing
185 | Π₃ = 🏗️DEVELOPMENT ⟶ main_development ∧ RIPER_active
186 | Π₄ = 🔧MAINTENANCE ⟶ long_term_support ∧ RIPER_active
187 |
188 | Π_transitions = {
189 | Π₁→Π₂: 🔄"/start",
190 | Π₂→Π₃: ✅completion(START_phase),
191 | Π₃↔Π₄: 🔄user_request
192 | }
193 |
194 | ## 🧰 Memory System
195 |
196 | Σ_memory = {
197 | σ₁ = 📋𝕄[0] ⟶ requirements ∧ scope ∧ criteria,
198 | σ₂ = 🏛️𝕄[1] ⟶ architecture ∧ components ∧ decisions,
199 | σ₃ = 💻𝕄[2] ⟶ stack ∧ environment ∧ dependencies,
200 | σ₄ = 🔮𝕄[3] ⟶ focus ∧ changes ∧ next_steps ∧ context_references,
201 | σ₅ = 📊𝕄[4] ⟶ status ∧ milestones ∧ issues,
202 | σ₆ = 🛡️𝕄[5] ⟶ protected_regions ∧ history ∧ approvals ∧ violations
203 | }
204 |
205 | Σ_update(mode) = {
206 | Ω₁: σ₃ += technical_details, σ₄ = current_focus, set_context(MΓ[Ω₁]), enforce_permissions(𝕊(Ω₁)),
207 | Ω₂: σ₄ += potential_approaches, σ₂ += design_decisions, set_context(MΓ[Ω₂]), enforce_permissions(𝕊(Ω₂)),
208 | Ω₃: σ₄ += planned_changes, σ₅ += expected_outcomes, set_context(MΓ[Ω₃]), enforce_permissions(𝕊(Ω₃)),
209 | Ω₄: σ₅ += implementation_progress, σ₄ += step_completion, set_context(MΓ[Ω₄]), enforce_permissions(𝕊(Ω₄)),
210 | Ω₅: σ₅ += review_findings, σ₄ += review_status, set_context(MΓ[Ω₅]), enforce_permissions(𝕊(Ω₅))
211 | }
212 |
213 | ## 📂 File System Operations
214 |
215 | Φ_file = {
216 | ensure_directory(path) = path_exists(path) ? noop : create_directory(path),
217 | init() = ensure_directory(📂) ∧ ensure_directory(📦),
218 | check_files() = ∀file ∈ 𝕄, check_exists(file)
219 | }
220 |
221 | ## 📊 Context Operations
222 |
223 | Φ_context = {
224 | expand(Γₙ) = get_full_context(Γₙ), // Expand context reference
225 | filter(Γₙ, criteria) = filter_context_by(Γₙ, criteria), // Filter context
226 | persist(Γₙ, 📂) = save_context_to_memory(Γₙ, 📂), // Save context
227 | retrieve(Γₙ, 📂) = load_context_from_memory(Γₙ, 📂), // Load context
228 | rank(Γₙ, relevance) = prioritize_context(Γₙ, relevance) // Prioritize context
229 | }
230 |
231 | ## Σ_context System
232 |
233 | Σ_context = {
234 | active_references: [],
235 | status_map: {},
236 | add_reference(type, name, status = "active") = {
237 | active_references.push({type, name, added: now()}),
238 | status_map[name] = status,
239 | update_file(𝕄[3], format_context_section())
240 | },
241 | remove_reference(name) = {
242 | active_references = active_references.filter(ref => ref.name !== name),
243 | delete status_map[name],
244 | update_file(𝕄[3], format_context_section())
245 | },
246 | clear_references() = {
247 | backup_context_refs(),
248 | active_references = [],
249 | status_map = {},
250 | update_file(𝕄[3], format_empty_context())
251 | },
252 | set_status(name, status) = {
253 | status_map[name] = status,
254 | update_file(𝕄[3], format_context_section())
255 | },
256 | context_for_mode(mode) = {
257 | backup_context_refs(),
258 | clear_references(),
259 | for context_type in MΓ[mode] {
260 | add_reference(context_type, "auto:" + mode, "essential")
261 | }
262 | },
263 | format_context_section() = generate_context_markdown()
264 | }
265 |
266 | ## Σ_permission System
267 |
268 | Σ_permission = {
269 | check_permission(operation, mode) = {
270 | op_category = get_operation_category(operation),
271 | return 𝕊(mode)[op_category] === "✓" || 𝕊(mode)[op_category] === "~"
272 | },
273 |
274 | enforce_permissions(mode_permissions) = {
275 | current_permissions = mode_permissions,
276 | update_allowed_operations(current_permissions),
277 | log_permission_change()
278 | },
279 |
280 | handle_violation(operation, mode) = {
281 | severity = calculate_severity(operation, mode),
282 | log_violation_to_registry(operation, mode, severity),
283 | if (severity === "CRITICAL" || severity === "HIGH") {
284 | Σ_backup.create_backup(),
285 | safe_transition(mode, Ω₃)
286 | } else {
287 | notify_violation(operation, mode, severity)
288 | }
289 | },
290 |
291 | check_operation_allowed(operation) = {
292 | if (!check_permission(operation, current_mode)) {
293 | handle_violation(operation, current_mode),
294 | return false
295 | }
296 | return true
297 | },
298 |
299 | calculate_severity(operation, mode) = {
300 | if (operation ∈ 𝕆ᵣₑₐₗ && mode ∈ [Ω₁, Ω₂, Ω₅]) return "CRITICAL",
301 | if (operation ∈ 𝕆ᵣₑₐₗ && mode === Ω₃) return "HIGH",
302 | if (operation ∈ 𝕆ᵥᵢᵣₜᵤₐₗ && mode ∈ [Ω₁, Ω₅]) return "MEDIUM",
303 | return "LOW"
304 | }
305 | }
306 |
307 | ## Σ_backup System
308 |
309 | Σ_backup = {
310 | backup_format = "YYYY-MM-DD_HH-MM-SS",
311 | create_backup() = copy_files(𝕄, 📦 + timestamp(backup_format)),
312 | backup_context() = {
313 | ctx_backup = {refs: Σ_context.active_references, status: Σ_context.status_map}
314 | write_json(📦 + "context_" + timestamp(backup_format) + ".json", ctx_backup)
315 | },
316 | emergency_backup() = {
317 | create_backup(),
318 | write_json(📦 + "emergency_" + timestamp(backup_format) + ".json", {
319 | mode: current_mode,
320 | context: Σ_context.active_references,
321 | permissions: current_permissions
322 | })
323 | }
324 | }
325 |
326 | ## 🔄 Mode Transition with Permissions
327 |
328 | Φ_mode_transition = {
329 | transition(mode_a, mode_b) = {
330 | Σ_backup.create_backup(),
331 | verify_completion(mode_a),
332 | set_mode(mode_b),
333 | enforce_permissions(𝕊(mode_b)),
334 | update_context(MΓ[mode_b]),
335 | log_transition(mode_a, mode_b)
336 | },
337 |
338 | verify_completion(mode) = {
339 | if (has_ongoing_operations(mode)) {
340 | warn_incomplete_operations(),
341 | confirm_transition()
342 | }
343 | },
344 |
345 | enforce_permissions(mode) = {
346 | Σ_permission.enforce_permissions(𝕊(mode))
347 | }
348 | }
349 |
350 | ## 🔄 Safety Protocols
351 |
352 | Δ = {
353 | 1: destructive_op(x) ⟶ warn ∧ confirm ∧ Σ_backup.create_backup(),
354 | 2: phase_transition(x) ⟶ verify ∧ Σ_backup.create_backup() ∧ update,
355 | 3: permission_violation(op) ⟶ 𝕍(op, current_mode),
356 | 4: error(x) ⟶ report("Framework issue: " + x) ∧ suggest_recovery(x),
357 | 5: context_change() ⟶ Σ_backup.backup_context() ∧ update_context_references()
358 | }
359 |
360 | ## 🔍 Context Commands
361 |
362 | Φ_context_commands = {
363 | !af(file) = Σ_context.add_reference(Γ₁, file), // Add file reference
364 | !ad(folder) = Σ_context.add_reference(Γ₂, folder), // Add folder reference
365 | !ac(code) = Σ_context.add_reference(Γ₃, code), // Add code reference
366 | !adoc(doc) = Σ_context.add_reference(Γ₄, doc), // Add documentation reference
367 | !ar(rule) = Σ_context.add_reference(Γ₅, rule), // Add rule reference
368 | !ag(git) = Σ_context.add_reference(Γ₆, git), // Add git reference
369 | !an(notepad) = Σ_context.add_reference(Γ₇, notepad), // Add notepad reference
370 | !pf(file) = Σ_context.add_reference(Γ₈, file), // Pin file to context
371 | !cs(ref, status) = Σ_context.set_status(ref, status), // Set context status
372 | !cr(ref) = Σ_context.remove_reference(ref), // Remove context reference
373 | !cc = Σ_context.clear_references(), // Clear all context references
374 | !cm = Σ_context.context_for_mode(current_mode) // Set context for current mode
375 | }
376 |
377 | ## 🔐 Permission Commands
378 |
379 | Φ_permission_commands = {
380 | !ckp = show_current_permissions(), // Check permissions for current mode
381 | !pm(operation) = check_operation_permitted(operation), // Check if operation is permitted
382 | !sp(mode) = show_mode_permissions(mode), // Show permissions for specified mode
383 | !vm(operation) = suggest_appropriate_mode(operation) // Verify mode appropriate for operation
384 | }
385 |
386 | ## 🏁 START Phase (Π₂)
387 |
388 | S₁₋₆ = [requirements, technology, architecture, scaffolding, environment, memory]
389 |
390 | START_process = {
391 | S₀: create_directory(📂),
392 | S₁: gather(requirements) ⟶ create(𝕄[0]),
393 | S₂: select(technologies) ⟶ update(𝕄[2]),
394 | S₃: define(architecture) ⟶ create(𝕄[1]),
395 | S₄: scaffold(project) ⟶ create(directories),
396 | S₅: setup(environment) ⟶ update(𝕄[2]),
397 | S₆: initialize(memory) ⟶ create(𝕄[0:5])
398 | }
399 |
400 | ## 📑 Memory Templates
401 |
402 | Σ_templates = {
403 | σ₁: """# σ₁: Project Brief\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n*Π: {PHASE} | Ω: {MODE}*\n\n## 🏆 Overview\n[Project description]\n\n## 📋 Requirements\n- [R₁] [Requirement 1]\n...""",
404 |
405 | σ₂: """# σ₂: System Patterns\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n*Π: {PHASE} | Ω: {MODE}*\n\n## 🏛️ Architecture Overview\n[Architecture description]\n...""",
406 |
407 | σ₃: """# σ₃: Technical Context\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n*Π: {PHASE} | Ω: {MODE}*\n\n## 🛠️ Technology Stack\n- 🖥️ Frontend: [Technologies]\n...""",
408 |
409 | σ₄: """# σ₄: Active Context\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n*Π: {PHASE} | Ω: {MODE}*\n\n## 🔮 Current Focus\n[Current focus]\n\n## 📎 Context References\n- 📄 Active Files: []\n- 💻 Active Code: []\n- 📚 Active Docs: []\n- 📁 Active Folders: []\n- 🔄 Git References: []\n- 📏 Active Rules: []\n\n## 📡 Context Status\n- 🟢 Active: []\n- 🟡 Partially Relevant: []\n- 🟣 Essential: []\n- 🔴 Deprecated: []\n...""",
410 |
411 | σ₅: """# σ₅: Progress Tracker\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n*Π: {PHASE} | Ω: {MODE}*\n\n## 📈 Project Status\nCompletion: 0%\n...""",
412 |
413 | σ₆: """# σ₆: Protection Registry\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n*Π: {PHASE} | Ω: {MODE}*\n\n## 🛡️ Protected Regions\n[Protected code registry]\n\n## 📜 Protection History\n[History and changes]\n\n## ✅ Approvals\n[Modification approvals]\n\n## ⚠️ Permission Violations\n[Violation logs]""",
414 |
415 | symbols: """# 🔣 Symbol Reference Guide\n*v1.0 | Created: {DATE} | Updated: {DATE}*\n\n## 📁 File Symbols\n- 📂 = /memory-bank/\n..."""
416 | }
417 |
418 | Φ_memory = {
419 | create_template(template, params) = template.replace({PLACEHOLDERS}, params),
420 | initialize() = {
421 | ensure_directory(📂),
422 | create_file(𝕄[0], create_template(Σ_templates.σ₁, {DATE: now(), PHASE: current_phase, MODE: current_mode})),
423 | create_file(𝕄[1], create_template(Σ_templates.σ₂, {DATE: now(), PHASE: current_phase, MODE: current_mode})),
424 | create_file(𝕄[2], create_template(Σ_templates.σ₃, {DATE: now(), PHASE: current_phase, MODE: current_mode})),
425 | create_file(𝕄[3], create_template(Σ_templates.σ₄, {DATE: now(), PHASE: current_phase, MODE: current_mode})),
426 | create_file(𝕄[4], create_template(Σ_templates.σ₅, {DATE: now(), PHASE: current_phase, MODE: current_mode})),
427 | create_file(𝕄[5], create_template(Σ_templates.σ₆, {DATE: now(), PHASE: current_phase, MODE: current_mode})),
428 | create_file(📂symbols.md, create_template(Σ_templates.symbols, {DATE: now()}))
429 | }
430 | }
431 |
432 | ## 🔗 Extended Cross-References
433 |
434 | χ_refs = {
435 | standard: "[↗️σ₁:R₁]", // Standard cross-reference
436 | with_context: "[↗️σ₁:R₁|Γ₃]", // Cross-reference with context
437 | context_only: "[Γ₃:ClassA]", // Context reference
438 | protection_context: "[Ψ₁+Γ₃:validate()]", // Protection with context
439 | permission_context: "[ℙ(Ω₁):read_only]" // Permission reference
440 | }
441 |
--------------------------------------------------------------------------------