├── .codefetchignore ├── .eslintcache ├── .gitattributes ├── .github └── workflows │ ├── README.md │ ├── pkg-pr-new.yml │ ├── sdk-pr.yml │ └── sdk-test.yml ├── .gitignore ├── .npmignore ├── .prettierignore ├── .prettierrc ├── CHANGELOG.md ├── LICENSE ├── README.md ├── codefetch.config.mjs ├── eslint.config.mjs ├── modeldb.json ├── package.json ├── packages ├── cli │ ├── .codefetchignore │ ├── README.md │ ├── build.config.ts │ ├── codefetch.config.mjs │ ├── package.json │ ├── src │ │ ├── args.ts │ │ ├── cli.ts │ │ ├── commands │ │ │ ├── default.ts │ │ │ └── init.ts │ │ ├── config.ts │ │ ├── format-model-info.ts │ │ ├── help-prompt.ts │ │ ├── index.ts │ │ ├── markdown.ts │ │ └── prompts │ │ │ ├── codegen.ts │ │ │ ├── fix.ts │ │ │ ├── improve.ts │ │ │ └── testgen.ts │ ├── test-exists.js │ ├── test.js │ ├── test │ │ ├── _setup.ts │ │ ├── fixtures │ │ │ ├── codebase-test │ │ │ │ ├── .codefetchignore │ │ │ │ ├── .eslintrc.json │ │ │ │ ├── .gitignore │ │ │ │ ├── README.md │ │ │ │ ├── ignore-this-file │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ ├── public │ │ │ │ │ └── image.png │ │ │ │ └── src │ │ │ │ │ ├── app.css │ │ │ │ │ ├── app.js │ │ │ │ │ ├── components │ │ │ │ │ ├── base │ │ │ │ │ │ ├── container.js │ │ │ │ │ │ └── ignore-this-file-deep │ │ │ │ │ ├── button.js │ │ │ │ │ └── header.js │ │ │ │ │ ├── logo.svg │ │ │ │ │ └── utils │ │ │ │ │ ├── test1.ts │ │ │ │ │ └── test2.js │ │ │ ├── init-test │ │ │ │ ├── .codefetchignore │ │ │ │ └── codefetch.config.mjs │ │ │ └── prompt-test │ │ │ │ └── test.js │ │ ├── integration │ │ │ ├── codebase-fixture.test.ts │ │ │ ├── init.test.ts │ │ │ └── prompt-fixture.test.ts │ │ ├── mocks │ │ │ └── server.ts │ │ ├── regression │ │ │ └── regression.test.ts │ │ └── unit │ │ │ ├── args.test.ts │ │ │ ├── files.test.ts │ │ │ ├── markdown.test.ts │ │ │ ├── token-counter.test.ts │ │ │ └── web │ │ │ ├── cache.test.ts │ │ │ └── url-handler.test.ts │ ├── tsconfig.json │ └── vitest.config.ts ├── mcp │ ├── README.md │ ├── build.config.ts │ ├── package.json │ ├── src │ │ ├── index.ts │ │ └── server.ts │ ├── test │ │ └── dummy.test.ts │ ├── tsconfig.json │ └── vitest.config.ts └── sdk │ ├── .prettierrc │ ├── CHANGELOG.md │ ├── README-Worker.md │ ├── README.md │ ├── build.browser.config.ts │ ├── build.config.ts │ ├── build.worker.config.ts │ ├── dist-browser │ ├── browser.d.mts │ ├── browser.d.ts │ └── browser.mjs │ ├── dist-worker │ ├── worker.d.mts │ ├── worker.d.ts │ └── worker.mjs │ ├── examples │ ├── worker-github-tarball.ts │ ├── worker.ts │ └── wrangler.toml │ ├── package.json │ ├── scripts │ └── verify-worker.sh │ ├── src │ ├── browser.ts │ ├── cache-enhanced.ts │ ├── cache │ │ ├── cloudflare-cache.ts │ │ ├── factory.ts │ │ ├── filesystem-cache.ts │ │ ├── index.ts │ │ ├── interface.ts │ │ ├── memory-cache.ts │ │ └── validation.ts │ ├── config-worker.ts │ ├── config.ts │ ├── constants.ts │ ├── default-ignore.ts │ ├── env.ts │ ├── errors.ts │ ├── fetch-result.ts │ ├── fetch.ts │ ├── files-tree.ts │ ├── files.ts │ ├── index.ts │ ├── markdown-content.ts │ ├── markdown.ts │ ├── model-db.ts │ ├── prompts │ │ ├── codegen.ts │ │ ├── fix.ts │ │ ├── improve.ts │ │ ├── index.ts │ │ └── testgen.ts │ ├── streaming.ts │ ├── template-parser.ts │ ├── token-counter.ts │ ├── tree-utils.ts │ ├── tree.ts │ ├── type-guards.ts │ ├── types.ts │ ├── utils-browser.ts │ ├── utils.ts │ ├── utils │ │ └── path.ts │ ├── web │ │ ├── cache-worker.ts │ │ ├── cache.ts │ │ ├── github-api.ts │ │ ├── github-tarball.ts │ │ ├── html-to-markdown.ts │ │ ├── index.ts │ │ ├── sdk-web-fetch-worker.ts │ │ ├── sdk-web-fetch.ts │ │ ├── types.ts │ │ ├── url-handler.ts │ │ └── web-fetch.ts │ └── worker.ts │ ├── test │ ├── browser-exports.test.ts │ ├── cache-enhanced.test.ts │ ├── cache.test.ts │ ├── cache │ │ ├── cloudflare-cache.test.ts │ │ └── interface.test.ts │ ├── config.test.ts │ ├── demo-github.test.ts │ ├── errors.test.ts │ ├── fetch-json.test.ts │ ├── files.test.ts │ ├── fixtures │ │ ├── sample-project │ │ │ ├── index.js │ │ │ └── package.json │ │ └── tiktoken │ │ │ ├── cl100k_base.json │ │ │ ├── o200k_base.json │ │ │ └── p50k_base.json │ ├── github-api.test.ts │ ├── local-worker-test.js │ ├── markdown.test.ts │ ├── mocks │ │ └── server.ts │ ├── setup.ts │ ├── streaming.test.ts │ ├── token-counter.test.ts │ ├── tree-utils.test.ts │ ├── tree.test.ts │ ├── type-guards.test.ts │ ├── verify-worker-build.js │ ├── web-fetch.test.ts │ ├── worker-exports.test.ts │ ├── worker-integration.test.ts │ ├── worker-runtime-test.ts │ ├── worker-test-example.ts │ └── worker.test.ts │ ├── tsconfig.json │ └── vitest.config.ts ├── playground ├── .gitignore ├── README.md ├── analyze-github-repo.js ├── code-analyzer.js ├── codefetch-analysis.md ├── generate-docs.js ├── list-outputs.js ├── package-lock.json ├── package.json ├── quick-web-test.js ├── test-sdk.js └── test-web-fetch.js ├── pnpm-lock.yaml ├── pnpm-workspace.yaml ├── public ├── cover.jpeg └── tokenlimiter.png ├── scripts ├── release-fix-workspace.ts └── release.ts ├── test-url.js └── tsconfig.base.json /.codefetchignore: -------------------------------------------------------------------------------- 1 | test/ 2 | vitest.config.ts 3 | modeldb.json -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto eol=lf 3 | 4 | # Explicitly declare text files you want to always be normalized and converted 5 | # to native line endings on checkout. 6 | *.ts text eol=lf 7 | *.js text eol=lf 8 | *.mjs text eol=lf 9 | *.cjs text eol=lf 10 | *.json text eol=lf 11 | *.md text eol=lf 12 | *.yml text eol=lf 13 | *.yaml text eol=lf 14 | 15 | # Denote all files that are truly binary and should not be modified. 16 | *.png binary 17 | *.jpg binary 18 | *.jpeg binary 19 | *.gif binary 20 | *.ico binary 21 | *.pdf binary -------------------------------------------------------------------------------- /.github/workflows/README.md: -------------------------------------------------------------------------------- 1 | # GitHub Actions Workflows 2 | 3 | This directory contains CI/CD workflows for the Codefetch project. 4 | 5 | ## SDK Workflows 6 | 7 | ### sdk-test.yml 8 | Main test workflow for the SDK package that runs on: 9 | - Push to main/develop branches 10 | - Pull requests to main/develop branches 11 | 12 | **Features:** 13 | - Matrix testing across OS (Ubuntu, macOS, Windows) and Node versions (18, 20, 22) 14 | - Linting with ESLint 15 | - Type checking with TypeScript 16 | - Unit tests with coverage reporting 17 | - Build validation for both main SDK and worker builds 18 | - Coverage upload to Codecov 19 | 20 | ### sdk-pr.yml 21 | Lightweight PR checks that run on all SDK-related pull requests. 22 | 23 | **Features:** 24 | - Quick validation (lint, type check, test, build) 25 | - Runs only on Ubuntu with Node 20 26 | - Comments PR with test results summary 27 | - Faster feedback for contributors 28 | 29 | ## Workflow Configuration 30 | 31 | All SDK workflows are triggered only when changes are made to: 32 | - `packages/sdk/**` - SDK source and test files 33 | - `.github/workflows/sdk-*.yml` - Workflow files themselves 34 | - `pnpm-lock.yaml` - Dependencies 35 | 36 | This ensures CI runs only when relevant changes are made. -------------------------------------------------------------------------------- /.github/workflows/pkg-pr-new.yml: -------------------------------------------------------------------------------- 1 | name: Publish PR Preview 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize] 6 | 7 | jobs: 8 | publish-preview: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Checkout code 13 | uses: actions/checkout@v4 14 | 15 | - name: Setup pnpm 16 | uses: pnpm/action-setup@v2 17 | with: 18 | version: 9.14.4 19 | 20 | - name: Setup Node.js 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version: 20 24 | cache: 'pnpm' 25 | 26 | - name: Install dependencies 27 | run: pnpm install --frozen-lockfile 28 | 29 | - name: Build packages 30 | run: pnpm run build 31 | 32 | - name: Publish Preview 33 | run: npx pkg-pr-new publish ./packages/sdk -------------------------------------------------------------------------------- /.github/workflows/sdk-test.yml: -------------------------------------------------------------------------------- 1 | name: SDK Tests 2 | 3 | on: 4 | push: 5 | branches: [main, develop] 6 | paths: 7 | - 'packages/sdk/**' 8 | - '.github/workflows/sdk-test.yml' 9 | - 'pnpm-lock.yaml' 10 | pull_request: 11 | branches: [main, develop] 12 | paths: 13 | - 'packages/sdk/**' 14 | - '.github/workflows/sdk-test.yml' 15 | - 'pnpm-lock.yaml' 16 | 17 | jobs: 18 | test: 19 | name: Test SDK (${{ matrix.os }}, Node ${{ matrix.node-version }}) 20 | runs-on: ${{ matrix.os }} 21 | strategy: 22 | matrix: 23 | os: [ubuntu-latest, windows-latest, macos-latest] 24 | node-version: [18.x, 20.x, 22.x] 25 | # Skip some combinations to reduce CI time 26 | exclude: 27 | - os: macos-latest 28 | node-version: 18.x 29 | - os: windows-latest 30 | node-version: 18.x 31 | 32 | steps: 33 | - name: Checkout code 34 | uses: actions/checkout@v4 35 | 36 | - name: Install pnpm 37 | uses: pnpm/action-setup@v2 38 | with: 39 | version: 9 40 | run_install: false 41 | 42 | - name: Setup Node.js ${{ matrix.node-version }} 43 | uses: actions/setup-node@v4 44 | with: 45 | node-version: ${{ matrix.node-version }} 46 | cache: 'pnpm' 47 | 48 | - name: Get pnpm store directory 49 | id: pnpm-cache 50 | shell: bash 51 | run: | 52 | echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT 53 | 54 | - name: Setup pnpm cache 55 | uses: actions/cache@v3 56 | with: 57 | path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} 58 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} 59 | restore-keys: | 60 | ${{ runner.os }}-pnpm-store- 61 | 62 | - name: Install dependencies 63 | run: | 64 | if [ -f "pnpm-lock.yaml" ]; then 65 | pnpm install --frozen-lockfile 66 | else 67 | pnpm install --no-frozen-lockfile 68 | fi 69 | shell: bash 70 | 71 | - name: Run linting 72 | if: runner.os != 'Windows' 73 | run: pnpm --filter codefetch-sdk lint 74 | 75 | - name: Run linting (Windows - ESLint only) 76 | if: runner.os == 'Windows' 77 | run: pnpm --filter codefetch-sdk lint:eslint 78 | 79 | - name: Run type checking 80 | run: pnpm --filter codefetch-sdk test:types 81 | 82 | - name: Run tests with coverage 83 | run: pnpm --filter codefetch-sdk test:coverage 84 | env: 85 | CI: true 86 | 87 | - name: Upload coverage reports 88 | if: matrix.os == 'ubuntu-latest' && matrix.node-version == '20.x' 89 | uses: codecov/codecov-action@v3 90 | with: 91 | directory: ./packages/sdk/coverage 92 | flags: sdk 93 | name: sdk-coverage 94 | fail_ci_if_error: false 95 | 96 | build-check: 97 | name: Build Check 98 | runs-on: ubuntu-latest 99 | needs: test 100 | 101 | steps: 102 | - name: Checkout code 103 | uses: actions/checkout@v4 104 | 105 | - name: Install pnpm 106 | uses: pnpm/action-setup@v2 107 | with: 108 | version: 9 109 | run_install: false 110 | 111 | - name: Setup Node.js 112 | uses: actions/setup-node@v4 113 | with: 114 | node-version: 20.x 115 | cache: 'pnpm' 116 | 117 | - name: Install dependencies 118 | run: | 119 | if [ -f "pnpm-lock.yaml" ]; then 120 | pnpm install --frozen-lockfile 121 | else 122 | pnpm install --no-frozen-lockfile 123 | fi 124 | 125 | - name: Build SDK 126 | run: pnpm --filter codefetch-sdk build 127 | 128 | - name: Build Worker 129 | run: pnpm --filter codefetch-sdk build:worker 130 | 131 | - name: Check build output 132 | run: | 133 | echo "Checking build artifacts..." 134 | ls -la packages/sdk/dist/ 135 | ls -la packages/sdk/dist-worker/ 136 | 137 | # Test that the built package can be imported 138 | cd packages/sdk 139 | node -e "const sdk = require('./dist/index.cjs'); console.log('CJS import successful');" 140 | node --input-type=module -e "import * as sdk from './dist/index.mjs'; console.log('ESM import successful');" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | dist/ 3 | *.log 4 | .DS_Store 5 | coverage/ 6 | .env 7 | .cursorrules 8 | .cursorignore 9 | codefetch 10 | docs 11 | .hide 12 | plan/ 13 | .eslintcache -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | src/ 2 | *.test.js 3 | build.config.ts 4 | tsconfig.json 5 | .babelrc 6 | # Don't ignore dist/*.d.ts files 7 | !dist/**/*.d.ts -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist 2 | lib 3 | docs/ 4 | docs/**/* 5 | .nuxt 6 | **/.nuxt/** -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5" 3 | } 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 1.5.1 4 | 5 | - Enhance argument parsing and file handling: add token-count-only option, improve glob path handling, and update tests for new functionality. (49fcbbf) 6 | - Update tracked models in config files to match new model database (93c8882) 7 | - Update model database to only include o3, gemini-2.5-pro, claude-sonnet-4, and claude-opus-4 (ce3f082) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 codefetch 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /codefetch.config.mjs: -------------------------------------------------------------------------------- 1 | /** @type {import('codefetch').CodefetchConfig} */ 2 | export default { 3 | outputPath: "codefetch", 4 | outputFile: "codebase.md", 5 | maxTokens: 999_000, 6 | verbose: 1, 7 | projectTree: 5, 8 | defaultIgnore: true, 9 | gitignore: true, 10 | tokenEncoder: "simple", 11 | tokenLimiter: "truncated", 12 | trackedModels: ["o3", "gemini-2.5-pro", "claude-sonnet-4", "claude-opus-4"], 13 | dryRun: false, 14 | disableLineNumbers: false, 15 | defaultPromptFile: undefined, 16 | defaultChat: "https://chat.com", 17 | templateVars: {}, 18 | }; 19 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import unjs from "eslint-config-unjs"; 2 | 3 | // https://github.com/unjs/eslint-config 4 | export default unjs({ 5 | ignores: [ 6 | "docs/**", 7 | "docs/.nuxt/**", 8 | "docs", 9 | "**/test/fixtures/**", 10 | "**/dist/**", 11 | "packages/*/dist/**", 12 | "packages/*/dist-browser/**", 13 | "packages/*/dist-worker/**", 14 | ], 15 | rules: { 16 | "unicorn/no-null": 0, 17 | "unicorn/prefer-top-level-await": 0, 18 | "unicorn/template-indent": 0, 19 | "unicorn/no-process-exit": 0, 20 | }, 21 | }); 22 | -------------------------------------------------------------------------------- /modeldb.json: -------------------------------------------------------------------------------- 1 | { 2 | "o3": { 3 | "max_tokens": 100000, 4 | "max_input_tokens": 200000, 5 | "max_output_tokens": 100000, 6 | "input_cost_per_token": 0.00001, 7 | "output_cost_per_token": 0.00004, 8 | "litellm_provider": "openai", 9 | "mode": "chat", 10 | "supports_function_calling": true, 11 | "supports_parallel_function_calling": true, 12 | "supports_vision": true, 13 | "supports_response_schema": true, 14 | "supports_system_messages": true 15 | }, 16 | "gemini-2.5-pro": { 17 | "max_tokens": 65535, 18 | "max_input_tokens": 1048576, 19 | "max_output_tokens": 65535, 20 | "input_cost_per_token": 0.00000125, 21 | "output_cost_per_token": 0.0000025, 22 | "input_cost_per_token_above_200k": 0.0000025, 23 | "output_cost_per_token_above_200k": 0.000015, 24 | "litellm_provider": "gemini", 25 | "mode": "chat", 26 | "supports_function_calling": true, 27 | "supports_parallel_function_calling": true, 28 | "supports_vision": true, 29 | "supports_response_schema": true, 30 | "supports_system_messages": true 31 | }, 32 | "claude-sonnet-4": { 33 | "max_tokens": 64000, 34 | "max_input_tokens": 200000, 35 | "max_output_tokens": 64000, 36 | "input_cost_per_token": 0.000003, 37 | "output_cost_per_token": 0.000015, 38 | "litellm_provider": "anthropic", 39 | "mode": "chat", 40 | "supports_function_calling": true, 41 | "supports_parallel_function_calling": true, 42 | "supports_vision": true, 43 | "supports_system_messages": true 44 | }, 45 | "claude-opus-4": { 46 | "max_tokens": 32000, 47 | "max_input_tokens": 200000, 48 | "max_output_tokens": 32000, 49 | "input_cost_per_token": 0.000015, 50 | "output_cost_per_token": 0.000075, 51 | "litellm_provider": "anthropic", 52 | "mode": "chat", 53 | "supports_function_calling": true, 54 | "supports_parallel_function_calling": true, 55 | "supports_vision": true, 56 | "supports_system_messages": true 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codefetch-monorepo", 3 | "version": "1.5.1", 4 | "private": true, 5 | "description": "Codefetch monorepo", 6 | "repository": "regenrek/codefetch", 7 | "license": "MIT", 8 | "type": "module", 9 | "scripts": { 10 | "build": "pnpm -r build", 11 | "test": "pnpm -r test", 12 | "cli": "pnpm --filter codefetch run cli", 13 | "lint": "pnpm -r lint", 14 | "release:cli": "tsx scripts/release-fix-workspace.ts", 15 | "release:legacy": "tsx scripts/release.ts", 16 | "release:sdk": "cd packages/sdk && tsx ../../scripts/release.ts", 17 | "release:all": "pnpm release:sdk && pnpm release:cli" 18 | }, 19 | "devDependencies": { 20 | "@types/node": "^22.10.5", 21 | "@vitest/coverage-v8": "^2.1.8", 22 | "eslint": "^9.18.0", 23 | "eslint-config-unjs": "^0.4.2", 24 | "prettier": "^3.4.2", 25 | "tsx": "^4.19.2", 26 | "typescript": "^5.7.3", 27 | "unbuild": "3.2.0", 28 | "vitest": "^2.1.8" 29 | }, 30 | "packageManager": "pnpm@9.14.4" 31 | } 32 | -------------------------------------------------------------------------------- /packages/cli/.codefetchignore: -------------------------------------------------------------------------------- 1 | test/ 2 | vitest.config.ts -------------------------------------------------------------------------------- /packages/cli/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from "unbuild"; 2 | 3 | export default defineBuildConfig({ 4 | entries: [{ input: "./src/index" }, { input: "./src/cli", format: "esm" }], 5 | declaration: true, 6 | clean: true, 7 | rollup: { 8 | emitCJS: true, 9 | esbuild: { 10 | target: "node18", 11 | }, 12 | }, 13 | failOnWarn: false, 14 | }); 15 | -------------------------------------------------------------------------------- /packages/cli/codefetch.config.mjs: -------------------------------------------------------------------------------- 1 | /** @type {import('codefetch').CodefetchConfig} */ 2 | export default { 3 | "projectTree": 5, 4 | "tokenLimiter": "truncated", 5 | "defaultPromptFile": "default.md" 6 | }; 7 | -------------------------------------------------------------------------------- /packages/cli/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codefetch", 3 | "version": "1.6.1", 4 | "description": "Fetches all files in the current directory and outputs them in a Markdown file.", 5 | "repository": "regenrek/codefetch", 6 | "license": "MIT", 7 | "type": "module", 8 | "sideEffects": false, 9 | "exports": { 10 | "./package.json": "./package.json", 11 | ".": { 12 | "import": { 13 | "types": "./dist/index.d.mts", 14 | "default": "./dist/index.mjs" 15 | }, 16 | "require": { 17 | "types": "./dist/index.d.cts", 18 | "default": "./dist/index.cjs" 19 | } 20 | } 21 | }, 22 | "bin": { 23 | "codefetch": "./dist/cli.mjs" 24 | }, 25 | "main": "./dist/index.cjs", 26 | "module": "./dist/index.mjs", 27 | "types": "./dist/index.d.cts", 28 | "files": [ 29 | "dist" 30 | ], 31 | "scripts": { 32 | "build": "unbuild", 33 | "dev": "vitest", 34 | "lint": "eslint --cache . && prettier -c 'src/**/*' 'test/**/*' '!test/fixtures/**'", 35 | "start": "tsx src/cli.ts", 36 | "lint:fix": "eslint --cache . --fix && prettier -c 'src/**/*' 'test/**/*' '!test/fixtures/**' -w", 37 | "test": "npm run lint && npm run test:types && vitest run --coverage", 38 | "test:types": "tsc --noEmit --skipLibCheck", 39 | "cli": "node ./dist/cli.mjs" 40 | }, 41 | "dependencies": { 42 | "c12": "^2.0.1", 43 | "codefetch-sdk": "workspace:*", 44 | "consola": "^3.3.3", 45 | "ignore": "^7.0.0", 46 | "mri": "^1.2.0", 47 | "pathe": "^2.0.1" 48 | }, 49 | "devDependencies": { 50 | "@types/node": "^22.10.5", 51 | "@vitest/coverage-v8": "^2.1.8", 52 | "eslint": "^9.18.0", 53 | "eslint-config-unjs": "^0.4.2", 54 | "msw": "^2.10.4", 55 | "prettier": "^3.4.2", 56 | "tsx": "^4.19.2", 57 | "typescript": "^5.7.3", 58 | "unbuild": "3.2.0", 59 | "vitest": "^2.1.8" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /packages/cli/src/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import consola from "consola"; 3 | import mri from "mri"; 4 | 5 | // Define proper types for subCommands and their return values 6 | type CommandModule = { 7 | default: (args: any) => Promise; 8 | }; 9 | 10 | const subCommands: Record Promise> = { 11 | _default: () => import("./commands/default"), 12 | init: () => import("./commands/init"), 13 | }; 14 | 15 | async function main() { 16 | const args = process.argv.slice(2); 17 | let subCommand = args[0]; 18 | if (!subCommand || subCommand.startsWith("-")) { 19 | subCommand = "_default"; 20 | } else { 21 | args.shift(); 22 | } 23 | 24 | if (!(subCommand in subCommands)) { 25 | consola.error(`Unknown command ${subCommand}`); 26 | process.exit(1); 27 | } 28 | 29 | await subCommands[subCommand as keyof typeof subCommands]?.() 30 | .then((mod) => mod.default(mri(args))) 31 | .catch((error) => { 32 | console.error("Error executing command:", error); 33 | process.exit(1); 34 | }); 35 | } 36 | 37 | main().catch(consola.error); 38 | -------------------------------------------------------------------------------- /packages/cli/src/commands/init.ts: -------------------------------------------------------------------------------- 1 | import { promises as fsp } from "node:fs"; 2 | import { join } from "pathe"; 3 | import consola from "consola"; 4 | import type { CodefetchConfig } from "../config"; 5 | 6 | const createConfigFile = async ( 7 | config: Partial, 8 | cwd: string 9 | ) => { 10 | const configContent = `/** @type {import('codefetch').CodefetchConfig} */ 11 | export default ${JSON.stringify(config, null, 2)}; 12 | `; 13 | 14 | await fsp.writeFile(join(cwd, "codefetch.config.mjs"), configContent); 15 | }; 16 | 17 | const createIgnoreFile = async (cwd: string) => { 18 | const content = "test/\nvitest.config.ts\n"; 19 | await fsp.writeFile(join(cwd, ".codefetchignore"), content); 20 | }; 21 | 22 | const createDirectoryStructure = async (cwd: string) => { 23 | await fsp.mkdir(join(cwd, "codefetch"), { recursive: true }); 24 | await fsp.mkdir(join(cwd, "codefetch/prompts"), { recursive: true }); 25 | 26 | const { default: fixPrompt } = await import("../prompts/fix"); 27 | await fsp.writeFile(join(cwd, "codefetch/prompts/default.md"), fixPrompt); 28 | }; 29 | 30 | export default async function initCommand(args: any = {}) { 31 | const cwd = process.cwd(); 32 | 33 | // Check for non-interactive mode 34 | const useDefault = args.default || args.d; 35 | const setupType = useDefault 36 | ? "default" 37 | : await consola.prompt("Choose setup type:", { 38 | type: "select", 39 | options: ["default", "custom"], 40 | }); 41 | 42 | if (setupType === "default") { 43 | const config: Partial = { 44 | projectTree: 5, 45 | tokenLimiter: "truncated" as const, 46 | defaultPromptFile: "default.md", 47 | }; 48 | await createConfigFile(config, cwd); 49 | await createIgnoreFile(cwd); 50 | await createDirectoryStructure(cwd); 51 | } else { 52 | const extensions = await consola.prompt( 53 | "Enter file extensions to filter (comma-separated, press enter for none, e.g. .ts,.js):", 54 | { type: "text" } 55 | ); 56 | 57 | const tokenEncoder = await consola.prompt("Choose token encoder:", { 58 | type: "select", 59 | options: [ 60 | { label: "simple (recommended)", value: "simple" }, 61 | "p50k", 62 | "o200k", 63 | "cl100k", 64 | ], 65 | }); 66 | 67 | // const defaultModels = [ 68 | // "gpt-4-0125-preview", 69 | // "gpt-4o-2024-11-20", 70 | // "claude-3-sonnet-20240229", 71 | // "o1", 72 | // "mistral-large-latest", 73 | // ]; 74 | 75 | // const trackedModels = await consola.prompt( 76 | // `Choose models to track (comma-separated)\nDefault: ${defaultModels.join(", ")}:`, 77 | // { type: "text", initial: defaultModels.join(",") } 78 | // ); 79 | 80 | const config: Partial = { 81 | extensions: extensions 82 | ? extensions.split(",").map((e) => e.trim()) 83 | : undefined, 84 | tokenEncoder: tokenEncoder as any, 85 | // trackedModels: trackedModels 86 | // ? trackedModels.split(",").map((m) => m.trim()) 87 | // : defaultModels, 88 | }; 89 | 90 | await createConfigFile(config, cwd); 91 | await createIgnoreFile(cwd); 92 | await createDirectoryStructure(cwd); 93 | } 94 | 95 | consola.success("✨ Initialization complete!"); 96 | consola.info( 97 | "📝 A .codefetchignore file was created (add your files that you want to ignore)" 98 | ); 99 | consola.info("⚙️ A codefetch.config.mjs is created (customize as you like)"); 100 | consola.info( 101 | "\nYour codebase files will be placed into the codefetch folder" 102 | ); 103 | consola.info("\nCustomize your prompts in codefetch/prompts/default.md"); 104 | consola.info("\nNow you can run:"); 105 | consola.info("npx codefetch"); 106 | consola.info("or"); 107 | consola.info("npx codefetch --help (for all options)"); 108 | } 109 | -------------------------------------------------------------------------------- /packages/cli/src/config.ts: -------------------------------------------------------------------------------- 1 | import { loadConfig } from "c12"; 2 | import { 3 | type CodefetchConfig, 4 | getDefaultConfig, 5 | resolveCodefetchConfig, 6 | createCustomConfigMerger, 7 | mergeWithCliArgs, 8 | } from "codefetch-sdk"; 9 | 10 | export { 11 | type CodefetchConfig, 12 | getDefaultConfig, 13 | resolveCodefetchConfig, 14 | mergeWithCliArgs, 15 | } from "codefetch-sdk"; 16 | 17 | export async function loadCodefetchConfig( 18 | cwd: string, 19 | overrides?: Partial 20 | ): Promise { 21 | const defaults = getDefaultConfig(); 22 | const customMerger = createCustomConfigMerger(); 23 | 24 | const { config } = await loadConfig({ 25 | name: "codefetch", 26 | cwd, 27 | defaults, 28 | merger: customMerger, 29 | }); 30 | 31 | // Merge CLI args after loading config 32 | const mergedConfig = overrides ? mergeWithCliArgs(config, overrides) : config; 33 | 34 | return await resolveCodefetchConfig(mergedConfig, cwd); 35 | } 36 | 37 | export async function getConfig( 38 | overrides?: Partial 39 | ): Promise { 40 | const cwd = process.cwd(); 41 | return await loadCodefetchConfig(cwd, overrides); 42 | } 43 | -------------------------------------------------------------------------------- /packages/cli/src/format-model-info.ts: -------------------------------------------------------------------------------- 1 | import type { ModelDb } from "codefetch-sdk"; 2 | 3 | export function formatModelInfo( 4 | trackedModels: string[], 5 | modelDb: ModelDb 6 | ): string { 7 | const rows = trackedModels.map((modelName) => { 8 | const model = modelDb[modelName] || {}; 9 | const tokens = model.max_input_tokens 10 | ? model.max_input_tokens.toLocaleString() 11 | : "Unknown"; 12 | return `│ ${modelName.padEnd(30)} │ ${tokens.padEnd(15)} │`; 13 | }); 14 | 15 | const header = "│ Model Name │ Max Tokens │"; 16 | const separator = "├────────────────────────────────┼────────────────┤"; 17 | const topBorder = "┌────────────────────────────────┬────────────────┐"; 18 | const bottomBorder = "└────────────────────────────────┴────────────────┘"; 19 | 20 | return [topBorder, header, separator, ...rows, bottomBorder].join("\n"); 21 | } 22 | -------------------------------------------------------------------------------- /packages/cli/src/help-prompt.ts: -------------------------------------------------------------------------------- 1 | export function printHelp() { 2 | console.log(` 3 | Usage: codefetch [command] [options] 4 | 5 | Commands: 6 | init Initialize a new codefetch project 7 | 8 | Options: 9 | -o, --output Specify output filename (defaults to codebase.md) 10 | --dir Specify the directory to scan (defaults to current directory) 11 | --max-tokens Limit output tokens (default: 500,000) 12 | -e, --extension Filter by file extensions (e.g., .ts,.js) 13 | --include-files Include specific files (supports patterns like *.ts) 14 | --exclude-files Exclude specific files (supports patterns like *.test.ts) 15 | --include-dir Include specific directories 16 | --exclude-dir Exclude specific directories 17 | -v, --verbose [level] Show processing information (0=none, 1=basic, 2=debug) 18 | -t, --project-tree [depth] Generate visual project tree (optional depth, default: 2) 19 | --token-encoder Token encoding method (simple, p50k, o200k, cl100k) 20 | --token-limiter Token limiting strategy (sequential, truncated) 21 | --disable-line-numbers Disable line numbers in output 22 | --format Output format (markdown, json) (default: markdown) 23 | -h, --help Display this help message 24 | -p, --prompt Add a default prompt (fix, improve, codegen, testgen) or add a custom prompt file with .md/.txt extension 25 | 26 | Git Repository Options: 27 | --url Fetch and analyze content from a git repository URL 28 | --no-cache Skip cache and fetch fresh content 29 | --cache-ttl Cache time-to-live in hours (default: 1) 30 | --branch Git branch/tag/commit to fetch 31 | --no-api Disable GitHub API and use git clone instead 32 | --github-token GitHub API token for private repos (or set GITHUB_TOKEN env var) 33 | 34 | Examples: 35 | # Analyze a local project 36 | codefetch --output analysis.md 37 | 38 | # Fetch and analyze a GitHub repository (uses API by default) 39 | codefetch --url https://github.com/user/repo --branch main 40 | 41 | # Fetch private GitHub repo with token 42 | codefetch --url https://github.com/org/private-repo --github-token ghp_xxxxx 43 | 44 | # Force git clone instead of API 45 | codefetch --url https://github.com/user/repo --no-api 46 | 47 | # Analyze from GitLab or Bitbucket 48 | codefetch --url https://gitlab.com/user/repo 49 | codefetch --url https://bitbucket.org/user/repo 50 | `); 51 | } 52 | -------------------------------------------------------------------------------- /packages/cli/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./markdown"; 2 | export { collectFiles } from "codefetch-sdk"; 3 | export { DEFAULT_IGNORE_PATTERNS } from "codefetch-sdk"; 4 | export { findProjectRoot } from "codefetch-sdk"; 5 | export * from "./args"; 6 | export * from "./config"; 7 | export { countTokens, SUPPORTED_MODELS } from "codefetch-sdk"; 8 | export { 9 | fetchModels, 10 | getLocalModels, 11 | type ModelInfo, 12 | type ModelDb, 13 | } from "codefetch-sdk"; 14 | export { processPromptTemplate, resolvePrompt } from "codefetch-sdk"; 15 | export * from "./help-prompt"; 16 | export { formatModelInfo } from "./format-model-info"; 17 | export { VALID_PROMPTS, VALID_ENCODERS, VALID_LIMITERS } from "codefetch-sdk"; 18 | export type { CodefetchConfig } from "./config"; 19 | export type { TokenEncoder, TokenLimiter } from "codefetch-sdk"; 20 | -------------------------------------------------------------------------------- /packages/cli/src/markdown.ts: -------------------------------------------------------------------------------- 1 | import consola from "consola"; 2 | import { 3 | generateMarkdown as generateMarkdownSdk, 4 | type MarkdownGeneratorOptions, 5 | } from "codefetch-sdk"; 6 | 7 | export async function generateMarkdown( 8 | files: string[], 9 | options: { 10 | maxTokens: number | null; 11 | verbose: number; 12 | projectTree: number; 13 | tokenEncoder: MarkdownGeneratorOptions["tokenEncoder"]; 14 | disableLineNumbers?: boolean; 15 | tokenLimiter?: MarkdownGeneratorOptions["tokenLimiter"]; 16 | promptFile?: string; 17 | templateVars?: Record; 18 | } 19 | ): Promise { 20 | const sdkOptions: MarkdownGeneratorOptions = { 21 | ...options, 22 | onVerbose: (message: string, level: number) => { 23 | if (options.verbose >= level) { 24 | consola.log(message); 25 | } 26 | }, 27 | }; 28 | 29 | return generateMarkdownSdk(files, sdkOptions); 30 | } 31 | -------------------------------------------------------------------------------- /packages/cli/src/prompts/codegen.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior developer. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to write code according to my instructions for the current codebase. 4 | 5 | instructions: 6 | 7 | {{MESSAGE}} 8 | 9 | 10 | Rules: 11 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 12 | - Your output should be a series of specific, actionable changes. 13 | 14 | When approaching this task: 15 | 1. Carefully review the provided code. 16 | 2. Identify the area thats raising this issue or error and provide a fix. 17 | 3. Consider best practices for the specific programming language used. 18 | 19 | For each suggested change, provide: 20 | 1. A short description of the change (one line maximum). 21 | 2. The modified code block. 22 | 23 | Use the following format for your output: 24 | 25 | [Short Description] 26 | \`\`\`[language]:[path/to/file] 27 | [code block] 28 | \`\`\` 29 | 30 | Begin fixing the codebase provide your solutions. 31 | 32 | My current codebase: 33 | 34 | {{CURRENT_CODEBASE}} 35 | 36 | `; 37 | -------------------------------------------------------------------------------- /packages/cli/src/prompts/fix.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior developer. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to review the current codebase and fix the current issues. 4 | 5 | Current Issue: 6 | 7 | {{MESSAGE}} 8 | 9 | 10 | Rules: 11 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 12 | - Your output should be a series of specific, actionable changes. 13 | 14 | When approaching this task: 15 | 1. Carefully review the provided code. 16 | 2. Identify the area thats raising this issue or error and provide a fix. 17 | 3. Consider best practices for the specific programming language used. 18 | 19 | For each suggested change, provide: 20 | 1. A short description of the change (one line maximum). 21 | 2. The modified code block. 22 | 23 | Use the following format for your output: 24 | 25 | [Short Description] 26 | \`\`\`[language]:[path/to/file] 27 | [code block] 28 | \`\`\` 29 | 30 | Begin fixing the codebase provide your solutions. 31 | 32 | My current codebase: 33 | 34 | {{CURRENT_CODEBASE}} 35 | 36 | `; 37 | -------------------------------------------------------------------------------- /packages/cli/src/prompts/improve.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior software architect. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to review the current codebase and suggest improvements or optimizations. 4 | 5 | Rules: 6 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 7 | - Your output should be a series of specific, actionable changes. 8 | 9 | When approaching this task: 10 | 1. Carefully review the provided code. 11 | 2. Identify areas that could be improved in terms of efficiency, readability, or maintainability. 12 | 3. Consider best practices for the specific programming language used. 13 | 4. Think about potential optimizations that could enhance performance. 14 | 5. Look for opportunities to refactor or restructure the code for better organization. 15 | 16 | For each suggested change, provide: 17 | 1. A short description of the change (one line maximum). 18 | 2. The modified code block. 19 | 20 | Use the following format for your output: 21 | 22 | [Short Description] 23 | \`\`\`[language]:[path/to/file] 24 | [code block] 25 | \`\`\` 26 | 27 | Begin your analysis and provide your suggestions now. 28 | 29 | My current codebase: 30 | 31 | {{CURRENT_CODEBASE}} 32 | 33 | `; 34 | -------------------------------------------------------------------------------- /packages/cli/src/prompts/testgen.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior test developer. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to review the current codebase and create and improve missing tests for the codebase. 4 | 5 | Additional instructions: 6 | 7 | {{MESSAGE}} 8 | 9 | 10 | Rules: 11 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 12 | - Your output should be a series of specific, actionable changes. 13 | 14 | When approaching this task: 15 | 1. Carefully review the provided code. 16 | 2. Identify the area thats raising this issue or error and provide a fix. 17 | 3. Consider best practices for the specific programming language used. 18 | 19 | For each suggested change, provide: 20 | 1. A short description of the change (one line maximum). 21 | 2. The modified code block. 22 | 23 | Use the following format for your output: 24 | 25 | [Short Description] 26 | \`\`\`[language]:[path/to/file] 27 | [code block] 28 | \`\`\` 29 | 30 | Begin fixing the codebase provide your solutions. 31 | 32 | My current codebase: 33 | 34 | {{CURRENT_CODEBASE}} 35 | 36 | `; 37 | -------------------------------------------------------------------------------- /packages/cli/test-exists.js: -------------------------------------------------------------------------------- 1 | import { existsSync } from "node:fs"; 2 | import { join } from "pathe"; 3 | 4 | const source = "/Users/kregenrek/projects/cli/codefetch/packages/cli"; 5 | const codefetchIgnorePath = join(source, ".codefetchignore"); 6 | 7 | console.log("source:", source); 8 | console.log("codefetchIgnorePath:", codefetchIgnorePath); 9 | console.log("existsSync result:", existsSync(codefetchIgnorePath)); 10 | 11 | // Also test with node:path 12 | import { join as nodeJoin } from "node:path"; 13 | const codefetchIgnorePath2 = nodeJoin(source, ".codefetchignore"); 14 | console.log("\nUsing node:path:"); 15 | console.log("codefetchIgnorePath2:", codefetchIgnorePath2); 16 | console.log("existsSync result:", existsSync(codefetchIgnorePath2)); 17 | 18 | // Test current directory 19 | console.log("\nCurrent directory:", process.cwd()); 20 | const relativePath = ".codefetchignore"; 21 | console.log("Relative path exists:", existsSync(relativePath)); -------------------------------------------------------------------------------- /packages/cli/test.js: -------------------------------------------------------------------------------- 1 | console.log('test'); 2 | -------------------------------------------------------------------------------- /packages/cli/test/_setup.ts: -------------------------------------------------------------------------------- 1 | export { afterEach, describe, it, expect, vi } from "vitest"; 2 | import { consola } from "consola"; 3 | import { beforeAll, beforeEach, afterAll, vi } from "vitest"; 4 | import { server } from "./mocks/server.js"; 5 | 6 | beforeAll(() => { 7 | // Start MSW server for mocking HTTP requests 8 | server.listen({ onUnhandledRequest: "error" }); 9 | 10 | // if we enabled this stdout is empty and console.log fail 11 | // Not sure how to mock the consola - docs aren't helping here. 12 | // would be much easier... 13 | //consola.wrapAll(); 14 | }); 15 | 16 | afterAll(() => { 17 | server.close(); 18 | }); 19 | 20 | beforeEach(() => { 21 | consola.mockTypes(() => vi.fn()); 22 | }); 23 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/.codefetchignore: -------------------------------------------------------------------------------- 1 | ignore-this-file 2 | ignore-this-file-deep 3 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "browser": true, 4 | "es2021": true, 5 | "jest": true 6 | }, 7 | "extends": ["eslint:recommended", "plugin:react/recommended"], 8 | "parserOptions": { 9 | "ecmaFeatures": { 10 | "jsx": true 11 | }, 12 | "ecmaVersion": "latest", 13 | "sourceType": "module" 14 | }, 15 | "plugins": ["react"], 16 | "rules": { 17 | "react/react-in-jsx-scope": "off", 18 | "linebreak-style": 0, 19 | "no-undefined": "error", 20 | "no-var": "error", 21 | "prefer-const": "error", 22 | "func-names": "error", 23 | "id-length": "error", 24 | "newline-before-return": "error", 25 | "space-before-blocks": "error", 26 | "no-alert": "error", 27 | "react/prop-types": 0, 28 | "indent": ["error", 2] 29 | }, 30 | "settings": { 31 | "react": { 32 | "version": "detect" 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/README.md: -------------------------------------------------------------------------------- 1 | # Test Codebase for codefetch 2 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/ignore-this-file: -------------------------------------------------------------------------------- 1 | Y -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codebase-starter" 3 | } 4 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codebase-test", 3 | "version": "0.0.1", 4 | "private": true, 5 | "scripts": { 6 | "start": "react-scripts start", 7 | "build": "react-scripts build" 8 | }, 9 | "dependencies": { 10 | "react": "19.0.0", 11 | "react-dom": "19.0.0", 12 | "react-scripts": "5.0.1" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/public/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/codefetch/3a1d1de72fd11db14bcf709e7eeae729f57854b1/packages/cli/test/fixtures/codebase-test/public/image.png -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/app.css: -------------------------------------------------------------------------------- 1 | .App { 2 | text-align: center; 3 | } 4 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/app.js: -------------------------------------------------------------------------------- 1 | import logo from "./logo.svg"; 2 | import "./app.css"; 3 | 4 | function App() { 5 | return ( 6 |
7 |
8 | logo 9 |

hi

10 |
11 |
12 | ); 13 | } 14 | 15 | export default App; 16 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/components/base/container.js: -------------------------------------------------------------------------------- 1 | function Container({ children }) { 2 | return
{children}
; 3 | } 4 | 5 | export default Container; 6 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/components/base/ignore-this-file-deep: -------------------------------------------------------------------------------- 1 | X -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/components/button.js: -------------------------------------------------------------------------------- 1 | function Button({ onClick, children }) { 2 | return ( 3 | 6 | ); 7 | } 8 | 9 | export default Button; 10 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/components/header.js: -------------------------------------------------------------------------------- 1 | function Header({ title }) { 2 | return ( 3 |
4 |

{title}

5 |
6 | ); 7 | } 8 | 9 | export default Header; 10 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/codebase-test/src/utils/test2.js: -------------------------------------------------------------------------------- 1 | // fileChunkHandler.js 2 | 3 | /** 4 | * Splits a file into chunks and processes each chunk. 5 | * @param {File} file - The file to be chunked. 6 | * @param {number} chunkSize - The size of each chunk in bytes. 7 | * @param {function} onChunk - Callback for processing each chunk. 8 | */ 9 | function handleFileChunks(file, chunkSize, onChunk) { 10 | const fileSize = file.size; 11 | let offset = 0; 12 | 13 | while (offset < fileSize) { 14 | const chunk = file.slice(offset, offset + chunkSize); 15 | onChunk(chunk); 16 | offset += chunkSize; 17 | } 18 | } 19 | 20 | // Example usage: 21 | // Assuming `fileInput` is an 22 | document.querySelector("#fileInput").addEventListener("change", (event) => { 23 | const file = event.target.files[0]; // Get the selected file 24 | const chunkSize = 1024 * 1024; // 1MB 25 | 26 | handleFileChunks(file, chunkSize, (chunk) => { 27 | console.log("Processing chunk:", chunk); 28 | // Example: Upload chunk to a server 29 | // uploadChunk(chunk); 30 | }); 31 | }); 32 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/init-test/.codefetchignore: -------------------------------------------------------------------------------- 1 | test/ 2 | vitest.config.ts 3 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/init-test/codefetch.config.mjs: -------------------------------------------------------------------------------- 1 | /** @type {import('codefetch').CodefetchConfig} */ 2 | export default { 3 | "projectTree": 5, 4 | "tokenLimiter": "truncated", 5 | "defaultPromptFile": "default.md" 6 | }; 7 | -------------------------------------------------------------------------------- /packages/cli/test/fixtures/prompt-test/test.js: -------------------------------------------------------------------------------- 1 | // Test file for prompt tests 2 | console.log("Hello from test file"); -------------------------------------------------------------------------------- /packages/cli/test/integration/init.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, beforeEach, afterEach, expect } from "vitest"; 2 | import { spawnSync } from "node:child_process"; 3 | import { readFile } from "node:fs/promises"; 4 | import { existsSync } from "node:fs"; 5 | import { resolve, join } from "pathe"; 6 | 7 | const cliPath = resolve(__dirname, "../../dist/cli.mjs"); 8 | const FIXTURE_DIR = resolve(__dirname, "../fixtures/init-test"); 9 | const CODEFETCH_DIR = join(FIXTURE_DIR, "codefetch"); 10 | 11 | describe("Integration: init command", () => { 12 | beforeEach(async () => { 13 | // if (existsSync(CODEFETCH_DIR)) { 14 | // await rm(CODEFETCH_DIR, { recursive: true, force: true }); 15 | // } 16 | // if (existsSync(join(FIXTURE_DIR, ".codefetchignore"))) { 17 | // await unlink(join(FIXTURE_DIR, ".codefetchignore")); 18 | // } 19 | // if (existsSync(join(FIXTURE_DIR, "codefetch.config.mjs"))) { 20 | // await unlink(join(FIXTURE_DIR, "codefetch.config.mjs")); 21 | // } 22 | }); 23 | 24 | afterEach(async () => { 25 | // if (existsSync(CODEFETCH_DIR)) { 26 | // await rm(CODEFETCH_DIR, { recursive: true, force: true }); 27 | // } 28 | // if (existsSync(join(FIXTURE_DIR, ".codefetchignore"))) { 29 | // await unlink(join(FIXTURE_DIR, ".codefetchignore")); 30 | // } 31 | // if (existsSync(join(FIXTURE_DIR, "codefetch.config.mjs"))) { 32 | // await unlink(join(FIXTURE_DIR, "codefetch.config.mjs")); 33 | // } 34 | }); 35 | 36 | it("creates default configuration", async () => { 37 | const result = spawnSync("node", [cliPath, "init", "--default"], { 38 | cwd: FIXTURE_DIR, 39 | encoding: "utf8", 40 | stdio: ["pipe", "pipe", "pipe"], 41 | }); 42 | 43 | expect(result.stderr || "").toBe(""); 44 | expect(existsSync(join(FIXTURE_DIR, "codefetch.config.mjs"))).toBe(true); 45 | expect(existsSync(join(FIXTURE_DIR, ".codefetchignore"))).toBe(true); 46 | 47 | const configContent = await readFile( 48 | join(FIXTURE_DIR, "codefetch.config.mjs"), 49 | "utf8" 50 | ); 51 | expect(configContent).toContain("projectTree"); 52 | expect(configContent).toContain("tokenLimiter"); 53 | expect(configContent).toContain("defaultPromptFile"); 54 | }); 55 | 56 | it.skip("creates custom configuration", async () => { 57 | // Skip this test as it requires interactive mode 58 | const result = spawnSync("node", [cliPath, "init"], { 59 | cwd: FIXTURE_DIR, 60 | encoding: "utf8", 61 | input: "custom\n.ts,.js\ncl100k\ngpt-4,claude-3\n", // Simulate user input 62 | stdio: ["pipe", "pipe", "pipe"], 63 | }); 64 | 65 | expect(result.stderr).toBe(""); 66 | expect(existsSync(join(FIXTURE_DIR, "codefetch.config.mjs"))).toBe(true); 67 | 68 | const configContent = await readFile( 69 | join(FIXTURE_DIR, "codefetch.config.mjs"), 70 | "utf8" 71 | ); 72 | expect(configContent).toContain(".ts"); 73 | expect(configContent).toContain(".js"); 74 | expect(configContent).toContain("cl100k"); 75 | expect(configContent).toContain("gpt-4"); 76 | }); 77 | 78 | it("creates default.md in prompts directory", async () => { 79 | const result = spawnSync("node", [cliPath, "init"], { 80 | cwd: FIXTURE_DIR, 81 | encoding: "utf8", 82 | input: "default\n", 83 | stdio: ["pipe", "pipe", "pipe"], 84 | }); 85 | 86 | expect(result.stderr || "").toBe(""); 87 | expect(existsSync(join(CODEFETCH_DIR, "prompts/default.md"))).toBe(true); 88 | 89 | const promptContent = await readFile( 90 | join(CODEFETCH_DIR, "prompts/default.md"), 91 | "utf8" 92 | ); 93 | expect(promptContent).toContain("{{CURRENT_CODEBASE}}"); 94 | }); 95 | }); 96 | -------------------------------------------------------------------------------- /packages/cli/test/mocks/server.ts: -------------------------------------------------------------------------------- 1 | import { setupServer } from "msw/node"; 2 | import { http, HttpResponse } from "msw"; 3 | import fs from "node:fs/promises"; 4 | import path from "node:path"; 5 | import { fileURLToPath } from "node:url"; 6 | 7 | // Convert __dirname to be compatible with ES Modules 8 | const __filename = fileURLToPath(import.meta.url); 9 | const __dirname = path.dirname(__filename); 10 | 11 | const handlers = [ 12 | http.get("https://tiktoken.pages.dev/js/:fileName", async ({ params }) => { 13 | const { fileName } = params; 14 | const filePath = path.resolve( 15 | __dirname, 16 | `../../../sdk/test/fixtures/tiktoken/${fileName}` 17 | ); 18 | 19 | try { 20 | const fileContent = await fs.readFile(filePath); 21 | return new HttpResponse(fileContent, { 22 | headers: { 23 | "Content-Type": "application/json", 24 | }, 25 | }); 26 | } catch { 27 | return new HttpResponse(null, { status: 404 }); 28 | } 29 | }), 30 | http.get("https://codeload.github.com/:owner/:repo/tar.gz/:ref", () => { 31 | // This is a placeholder for a more sophisticated mock if needed 32 | return new HttpResponse(null, { status: 200 }); 33 | }), 34 | 35 | http.get("https://api.github.com/repos/:owner/:repo", () => { 36 | // This is a placeholder for a more sophisticated mock if needed 37 | return new HttpResponse( 38 | JSON.stringify({ 39 | full_name: "test/repo", 40 | stargazers_count: 123, 41 | }), 42 | { 43 | headers: { 44 | "Content-Type": "application/json", 45 | }, 46 | } 47 | ); 48 | }), 49 | ]; 50 | 51 | export const server = setupServer(...handlers); 52 | -------------------------------------------------------------------------------- /packages/cli/test/unit/args.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { parseArgs } from "../../src/args"; 3 | 4 | describe("parseArgs", () => { 5 | it("should parse basic arguments", () => { 6 | const args = ["--output", "output.md", "--verbose", "2"]; 7 | const result = parseArgs(args); 8 | 9 | expect(result).toMatchObject({ 10 | outputFile: "output.md", 11 | verbose: 2, 12 | }); 13 | }); 14 | 15 | it("should strip codefetch/ prefix from output file", () => { 16 | // Test with codefetch/ prefix 17 | expect(parseArgs(["-o", "codefetch/codebase.md"]).outputFile).toBe( 18 | "codebase.md" 19 | ); 20 | expect(parseArgs(["--output", "codefetch/myfile.md"]).outputFile).toBe( 21 | "myfile.md" 22 | ); 23 | 24 | // Test with ./codefetch/ prefix 25 | expect(parseArgs(["-o", "./codefetch/output.md"]).outputFile).toBe( 26 | "output.md" 27 | ); 28 | 29 | // Test with nested path after codefetch/ 30 | expect(parseArgs(["-o", "codefetch/subfolder/file.md"]).outputFile).toBe( 31 | "subfolder/file.md" 32 | ); 33 | 34 | // Test without codefetch/ prefix (should remain unchanged) 35 | expect(parseArgs(["-o", "myoutput.md"]).outputFile).toBe("myoutput.md"); 36 | expect(parseArgs(["-o", "folder/file.md"]).outputFile).toBe( 37 | "folder/file.md" 38 | ); 39 | 40 | // Test edge case: file named codefetch in another directory 41 | expect(parseArgs(["-o", "other/codefetch/file.md"]).outputFile).toBe( 42 | "other/codefetch/file.md" 43 | ); 44 | }); 45 | 46 | it("should handle extensions correctly", () => { 47 | expect(parseArgs(["-e", "ts,js,png"]).extensions).toEqual([ 48 | ".ts", 49 | ".js", 50 | ".png", 51 | ]); 52 | expect(parseArgs(["-e", ".ts,.js,.png,.txt"]).extensions).toEqual([ 53 | ".ts", 54 | ".js", 55 | ".png", 56 | ".txt", 57 | ]); 58 | 59 | // Should throw for invalid formats 60 | expect(() => parseArgs(["-e", " ts,js "])).toThrow( 61 | "Invalid extension format" 62 | ); 63 | expect(() => parseArgs(["-e", "ts, js"])).toThrow( 64 | "Invalid extension format" 65 | ); 66 | expect(() => parseArgs(["-e", ".ts, .js"])).toThrow( 67 | "Invalid extension format" 68 | ); 69 | 70 | expect(parseArgs(["--extension", ".ts,.js"]).extensions).toEqual([ 71 | ".ts", 72 | ".js", 73 | ]); 74 | }); 75 | 76 | it("should parse token encoder", () => { 77 | const args = ["--token-encoder", "cl100k"]; 78 | const result = parseArgs(args); 79 | 80 | expect(result.tokenEncoder).toBe("cl100k"); 81 | }); 82 | 83 | it("should throw on invalid token encoder", () => { 84 | const args = ["--token-encoder", "invalid"]; 85 | expect(() => parseArgs(args)).toThrow(); 86 | }); 87 | 88 | it("should handle include/exclude patterns", () => { 89 | const args = [ 90 | "--include-files", 91 | "src/**/*.ts", 92 | "--exclude-files", 93 | "test/**/*.ts", 94 | ]; 95 | const result = parseArgs(args); 96 | 97 | expect(result.includeFiles).toContain("src/**/*.ts"); 98 | expect(result.excludeFiles).toContain("test/**/*.ts"); 99 | }); 100 | 101 | it("should parse disable-line-numbers flag", () => { 102 | const args = ["--disable-line-numbers"]; 103 | const result = parseArgs(args); 104 | 105 | expect(result.disableLineNumbers).toBe(true); 106 | }); 107 | 108 | it("should default disable-line-numbers to false when not specified", () => { 109 | const args = ["-o", "output.md"]; 110 | const result = parseArgs(args); 111 | 112 | expect(result.disableLineNumbers).toBe(false); 113 | }); 114 | 115 | it("should handle multiple options together", () => { 116 | const args = [ 117 | "-o", 118 | "output.md", 119 | "--disable-line-numbers", 120 | "--token-encoder", 121 | "cl100k", 122 | "-v", 123 | "2", 124 | ]; 125 | const result = parseArgs(args); 126 | 127 | expect(result).toMatchObject({ 128 | outputFile: "output.md", 129 | disableLineNumbers: true, 130 | tokenEncoder: "cl100k", 131 | verbose: 2, 132 | }); 133 | }); 134 | }); 135 | -------------------------------------------------------------------------------- /packages/cli/test/unit/files.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach } from "vitest"; 2 | import fs from "node:fs"; 3 | import path from "node:path"; 4 | import { collectFiles } from "../../src/index"; 5 | import ignore from "ignore"; 6 | 7 | const TEST_DIR = path.join(__dirname, "..", "__test__"); 8 | 9 | describe("collectFiles", () => { 10 | beforeEach(() => { 11 | // Create test directory and files 12 | if (!fs.existsSync(TEST_DIR)) { 13 | fs.mkdirSync(TEST_DIR, { recursive: true }); 14 | } 15 | fs.writeFileSync(path.join(TEST_DIR, "test1.ts"), "test content"); 16 | fs.writeFileSync(path.join(TEST_DIR, "test2.js"), "test content"); 17 | fs.writeFileSync(path.join(TEST_DIR, ".gitignore"), "test1.ts"); 18 | }); 19 | 20 | afterEach(() => { 21 | // Clean up test files 22 | if (fs.existsSync(TEST_DIR)) { 23 | fs.rmSync(TEST_DIR, { recursive: true, force: true }); 24 | } 25 | }); 26 | 27 | it("should collect files with extension filter", async () => { 28 | const files = await collectFiles(TEST_DIR, { 29 | ig: ignore(), 30 | extensionSet: new Set([".ts"]), 31 | excludeFiles: null, 32 | includeFiles: null, 33 | excludeDirs: null, 34 | includeDirs: null, 35 | verbose: 0, 36 | }); 37 | 38 | expect(files).toHaveLength(1); 39 | expect(files[0]).toContain("test1.ts"); 40 | }); 41 | 42 | it("should respect include/exclude patterns", async () => { 43 | const files = await collectFiles(TEST_DIR, { 44 | ig: ignore(), 45 | extensionSet: null, 46 | excludeFiles: ["test2*"], 47 | includeFiles: ["test*"], 48 | excludeDirs: null, 49 | includeDirs: null, 50 | verbose: 0, 51 | }); 52 | 53 | expect(files).toHaveLength(1); 54 | expect(files[0]).toContain("test1.ts"); 55 | }); 56 | 57 | it("should respect directory filters", async () => { 58 | // Create a subdirectory 59 | const subDir = path.join(TEST_DIR, "subdir"); 60 | fs.mkdirSync(subDir); 61 | fs.writeFileSync(path.join(subDir, "test3.ts"), "test content"); 62 | 63 | const files = await collectFiles(TEST_DIR, { 64 | ig: ignore(), 65 | extensionSet: null, 66 | excludeFiles: null, 67 | includeFiles: null, 68 | excludeDirs: ["subdir"], 69 | includeDirs: null, 70 | verbose: 0, 71 | }); 72 | 73 | // We'll see test1.ts, test2.js, .gitignore 74 | expect(files.some((f: string) => f.endsWith("test1.ts"))).toBe(true); 75 | expect(files.some((f: string) => f.endsWith("test2.js"))).toBe(true); 76 | expect(files.some((f: string) => f.endsWith(".gitignore"))).toBe(true); 77 | // subdir/test3.ts is excluded 78 | expect(files.some((f: string) => f.endsWith("test3.ts"))).toBe(false); 79 | }); 80 | 81 | it("should handle ignore patterns from the ig param", async () => { 82 | const ig = ignore().add("test1.ts"); 83 | const files = await collectFiles(TEST_DIR, { 84 | ig, 85 | extensionSet: null, 86 | excludeFiles: null, 87 | includeFiles: null, 88 | excludeDirs: null, 89 | includeDirs: null, 90 | verbose: 0, 91 | }); 92 | 93 | // test1.ts is ignored, so only test2.js and .gitignore remain 94 | expect(files.length).toBe(2); 95 | expect(files.every((f: string) => !f.includes("test1.ts"))).toBe(true); 96 | }); 97 | 98 | it("should handle directories with special glob characters", async () => { 99 | // Create directories with special characters 100 | const specialDirs = [ 101 | "routes(marketing)", 102 | "test[brackets]", 103 | "test{braces}", 104 | "test*star", 105 | "test?question", 106 | ]; 107 | 108 | for (const dir of specialDirs) { 109 | const dirPath = path.join(TEST_DIR, dir); 110 | fs.mkdirSync(dirPath, { recursive: true }); 111 | fs.writeFileSync(path.join(dirPath, "file.ts"), "test content"); 112 | } 113 | 114 | // Test including a directory with parentheses 115 | const files = await collectFiles(TEST_DIR, { 116 | ig: ignore(), 117 | extensionSet: null, 118 | excludeFiles: null, 119 | includeFiles: null, 120 | excludeDirs: null, 121 | includeDirs: [path.join(TEST_DIR, "routes(marketing)")], 122 | verbose: 0, 123 | }); 124 | 125 | expect(files).toHaveLength(1); 126 | expect(files[0]).toContain("routes(marketing)"); 127 | expect(files[0]).toContain("file.ts"); 128 | }); 129 | }); 130 | -------------------------------------------------------------------------------- /packages/cli/test/unit/token-counter.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { countTokens } from "codefetch-sdk"; 3 | 4 | describe("token-counter", () => { 5 | it("counts tokens with the 'simple' encoder by splitting on whitespace", async () => { 6 | const text = "Hello world! This is a test."; 7 | const tokens = await countTokens(text, "simple"); 8 | // Very naive: splitting on whitespace plus punctuation. 9 | // The exact count might differ if your code does it differently. 10 | // Adjust this expectation to match how your actual "simple" logic works. 11 | expect(typeof tokens).toBe("number"); 12 | expect(tokens).toBeGreaterThan(0); 13 | }); 14 | 15 | it("handles empty strings gracefully", async () => { 16 | const tokens = await countTokens("", "simple"); 17 | expect(tokens).toBe(0); 18 | }); 19 | 20 | // If you have other encoders like 'cl100k', 'p50k', or 'o200k', 21 | // add tests for them as needed, e.g.: 22 | it("counts tokens with cl100k encoder", async () => { 23 | const text = "Hello, I'm using the cl100k encoder test."; 24 | const tokens = await countTokens(text, "cl100k"); 25 | expect(tokens).toBeGreaterThan(0); 26 | }, 10_000); // 10 second timeout 27 | }); 28 | -------------------------------------------------------------------------------- /packages/cli/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.base.json", 3 | "compilerOptions": { 4 | "rootDir": "./", 5 | "outDir": "./dist", 6 | "types": ["node", "vitest/globals"] 7 | }, 8 | "include": ["src/**/*", "test/**/*"], 9 | "exclude": ["node_modules", "dist"] 10 | } -------------------------------------------------------------------------------- /packages/cli/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: "node", 7 | setupFiles: ["./test/_setup.ts"], 8 | coverage: { 9 | provider: "v8", 10 | reporter: ["text", "json", "html"], 11 | exclude: ["**/node_modules/**", "**/dist/**", "**/test/**", "**/docs/**"], 12 | }, 13 | }, 14 | }); 15 | -------------------------------------------------------------------------------- /packages/mcp/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | './src/index' 6 | ], 7 | declaration: true, 8 | clean: true, 9 | rollup: { 10 | emitCJS: true, 11 | esbuild: { 12 | target: 'node18' 13 | } 14 | } 15 | }) -------------------------------------------------------------------------------- /packages/mcp/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@codefetch/mcp", 3 | "version": "1.5.2", 4 | "description": "Model Context Protocol server for codefetch", 5 | "repository": "regenrek/codefetch", 6 | "license": "MIT", 7 | "type": "module", 8 | "sideEffects": false, 9 | "exports": { 10 | "./package.json": "./package.json", 11 | ".": { 12 | "import": { 13 | "types": "./dist/index.d.mts", 14 | "default": "./dist/index.mjs" 15 | }, 16 | "require": { 17 | "types": "./dist/index.d.cts", 18 | "default": "./dist/index.cjs" 19 | } 20 | } 21 | }, 22 | "main": "./dist/index.cjs", 23 | "module": "./dist/index.mjs", 24 | "types": "./dist/index.d.cts", 25 | "files": [ 26 | "dist" 27 | ], 28 | "scripts": { 29 | "build": "unbuild", 30 | "test": "vitest run", 31 | "test:types": "tsc --noEmit --skipLibCheck" 32 | }, 33 | "dependencies": { 34 | "codefetch-sdk": "workspace:*", 35 | "@modelcontextprotocol/sdk": "^1.13.2" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /packages/mcp/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./server"; -------------------------------------------------------------------------------- /packages/mcp/src/server.ts: -------------------------------------------------------------------------------- 1 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 2 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 3 | import { 4 | collectFiles, 5 | generateMarkdown, 6 | getDefaultConfig, 7 | type CodefetchConfig, 8 | } from "codefetch-sdk"; 9 | 10 | // TODO: Implement MCP server functionality 11 | export async function createMcpServer() { 12 | const server = new Server( 13 | { 14 | name: "codefetch-mcp", 15 | version: "1.5.1", 16 | }, 17 | { 18 | capabilities: { 19 | tools: {}, 20 | }, 21 | } 22 | ); 23 | 24 | // TODO: Register tools using SDK functions 25 | // server.setRequestHandler(...) 26 | 27 | return server; 28 | } 29 | 30 | export async function startMcpServer() { 31 | const server = await createMcpServer(); 32 | const transport = new StdioServerTransport(); 33 | await server.connect(transport); 34 | } 35 | -------------------------------------------------------------------------------- /packages/mcp/test/dummy.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it } from 'vitest'; 2 | 3 | describe('MCP Package Tests', () => { 4 | it('TODO: Tests need to be implemented', () => { 5 | console.log('⚠️ MCP package tests need to be implemented'); 6 | console.log('This is a placeholder test to ensure the test suite runs.'); 7 | // This test passes to avoid failing CI/CD pipelines 8 | // Real tests should be implemented for the MCP server functionality 9 | }); 10 | }); -------------------------------------------------------------------------------- /packages/mcp/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.base.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist" 6 | }, 7 | "include": ["src/**/*"], 8 | "exclude": ["node_modules", "dist"] 9 | } -------------------------------------------------------------------------------- /packages/mcp/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: "node", 7 | }, 8 | }); -------------------------------------------------------------------------------- /packages/sdk/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "endOfLine": "lf", 3 | "semi": true, 4 | "singleQuote": false, 5 | "tabWidth": 2, 6 | "trailingComma": "es5", 7 | "printWidth": 80 8 | } -------------------------------------------------------------------------------- /packages/sdk/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## Current Version 4 | 5 | ### Major Changes 6 | 7 | - **Cache System Overhaul**: Complete rewrite of the caching system to fix Cloudflare Workers compatibility 8 | - Fixed "Invalid URL" errors when using cache in Cloudflare Workers 9 | - Implemented environment-aware cache factory (CloudflareCache, FileSystemCache, MemoryCache) 10 | - Added proper URL generation for Cloudflare Cache API compliance 11 | - Added cache control options: `noCache`, `cache` strategies, `cacheBaseUrl`, `cacheTTL` 12 | - Implemented content validation and automatic cleanup of invalid entries 13 | - Graceful degradation when cache initialization fails 14 | - For immediate fix use `{ noCache: true }` or configure `{ cacheBaseUrl: 'https://your-domain.com' }` 15 | 16 | - Renamed `streamGitHubTarball` to `fetchGitHubTarball` for clarity 17 | - The function still returns a streaming response but the name better reflects its purpose 18 | - Update imports: `import { fetchGitHubTarball } from 'codefetch-sdk/web'` 19 | 20 | ### Features 21 | 22 | - **Testing Infrastructure**: Integrate Mock Service Worker (MSW) for improved testing 23 | - Add MSW for mocking network requests during tests 24 | - Mock tiktoken tokenizer endpoints to prevent network calls during testing 25 | - Pre-download tokenizer fixtures (`p50k_base.json`, `o200k_base.json`, `cl100k_base.json`) 26 | - Configure global test setup with MSW server 27 | - Significantly improved test reliability and speed 28 | 29 | - **Markdown Streaming**: Add streaming functionality for markdown generation 30 | - New `createMarkdownStream` function for streaming markdown content generation 31 | - Efficient memory usage for large codebases 32 | - Real-time markdown generation as files are processed 33 | 34 | - **Cloudflare Worker Enhancements**: 35 | - Add comprehensive Worker-specific TypeScript types 36 | - Introduce performance metrics tracking 37 | - Optimize bundle size and improve Worker compatibility 38 | - Enhanced error handling for Worker environments 39 | 40 | ### Improvements 41 | 42 | - Fix TypeScript strict mode compliance issues 43 | - Update test infrastructure to use Vitest's new mocking syntax 44 | - Add proper cleanup for MSW server in test teardown 45 | - Fix ESLint issues with unused catch variables 46 | - Improve error messages and debugging information 47 | 48 | ### Developer Experience 49 | 50 | - Add comprehensive test fixtures for offline testing 51 | - Improve test isolation and reliability 52 | - Better TypeScript type safety throughout the codebase 53 | - Enhanced documentation for Worker-specific features 54 | 55 | ### Dependencies 56 | 57 | - Add `msw` as development dependency for test mocking 58 | - Update testing infrastructure to support offline testing 59 | 60 | -------------------------------------------------------------------------------- /packages/sdk/build.browser.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | './src/browser' 6 | ], 7 | outDir: 'dist-browser', 8 | declaration: true, 9 | clean: true, 10 | rollup: { 11 | emitCJS: false, // Browser doesn't need CJS 12 | output: { 13 | // Inline all code to avoid shared chunks 14 | inlineDynamicImports: true, 15 | manualChunks: undefined, 16 | }, 17 | esbuild: { 18 | target: 'es2020', 19 | platform: 'browser' 20 | } 21 | }, 22 | // Don't create shared chunks 23 | failOnWarn: false, 24 | }) -------------------------------------------------------------------------------- /packages/sdk/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from "unbuild"; 2 | 3 | export default defineBuildConfig({ 4 | entries: ["./src/index", "./src/browser", "./src/prompts/index"], 5 | declaration: true, 6 | clean: true, 7 | failOnWarn: false, 8 | externals: [ 9 | "adm-zip", // Keep as external to avoid bundling 10 | ], 11 | rollup: { 12 | emitCJS: true, 13 | esbuild: { 14 | target: "node18", 15 | }, 16 | }, 17 | }); 18 | -------------------------------------------------------------------------------- /packages/sdk/build.worker.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from "unbuild"; 2 | 3 | export default defineBuildConfig({ 4 | // Worker-specific entry point 5 | entries: ["./src/worker"], 6 | 7 | // Output to separate directory 8 | outDir: "dist-worker", 9 | 10 | // Target browser environment for Workers 11 | rollup: { 12 | emitCJS: false, // Workers only support ESM 13 | esbuild: { 14 | target: "es2022", 15 | platform: "browser", // Important for Worker compatibility 16 | format: "esm", 17 | }, 18 | dts: { 19 | respectExternal: false, 20 | }, 21 | }, 22 | 23 | // Clean output directory 24 | clean: true, 25 | 26 | // Generate declarations 27 | declaration: true, 28 | 29 | // Externals - let nodejs_compat handle these in the Worker 30 | externals: [ 31 | // Node built-ins that nodejs_compat provides 32 | "node:os", 33 | "node:path", 34 | "node:fs", 35 | "node:fs/promises", 36 | "node:crypto", 37 | "node:buffer", 38 | "node:stream", 39 | "node:util", 40 | "node:zlib", // Add zlib to externals (use DecompressionStream instead) 41 | // Never available in Workers 42 | "node:child_process", 43 | ], 44 | 45 | // Fail on warnings to catch issues early 46 | failOnWarn: false, 47 | }); 48 | -------------------------------------------------------------------------------- /packages/sdk/examples/worker-github-tarball.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Example: Using streamGitHubTarball in Cloudflare Workers 3 | * 4 | * This demonstrates the optimized GitHub tarball extraction: 5 | * - Native DecompressionStream (0 KB bundle overhead) 6 | * - Custom lightweight TAR parser 7 | * - No nodejs_compat flag needed 8 | * - Pure Web Streams implementation 9 | */ 10 | 11 | import { streamGitHubTarball } from "codefetch-sdk/worker"; 12 | 13 | export default { 14 | async fetch(request: Request): Promise { 15 | const url = new URL(request.url); 16 | 17 | // Example: /extract?owner=microsoft&repo=vscode&ref=main 18 | const owner = url.searchParams.get("owner") || "microsoft"; 19 | const repo = url.searchParams.get("repo") || "vscode"; 20 | const ref = url.searchParams.get("ref") || "main"; 21 | const token = url.searchParams.get("token"); // Optional GitHub token 22 | 23 | try { 24 | // Stream and extract files from GitHub tarball 25 | const files = await streamGitHubTarball(owner, repo, ref, { 26 | token: token || undefined, 27 | extensions: [".ts", ".js", ".json", ".md"], // Filter by extension 28 | excludeDirs: ["node_modules", ".git"], // Exclude directories 29 | maxFiles: 100, // Limit number of files 30 | onProgress: (processed) => { 31 | console.log(`Processed ${processed} files...`); 32 | }, 33 | }); 34 | 35 | // Transform results for response 36 | const results = files.map((file) => ({ 37 | path: file.path, 38 | size: file.content.length, 39 | language: file.language, 40 | preview: 41 | file.content.slice(0, 200) + (file.content.length > 200 ? "..." : ""), 42 | })); 43 | 44 | return Response.json( 45 | { 46 | success: true, 47 | repo: `${owner}/${repo}`, 48 | ref, 49 | fileCount: files.length, 50 | totalSize: files.reduce((sum, f) => sum + f.content.length, 0), 51 | files: results.slice(0, 10), // First 10 files 52 | }, 53 | { 54 | headers: { 55 | "Content-Type": "application/json", 56 | "Cache-Control": "public, max-age=3600", 57 | }, 58 | } 59 | ); 60 | } catch (error) { 61 | return Response.json( 62 | { 63 | success: false, 64 | error: error instanceof Error ? error.message : "Unknown error", 65 | }, 66 | { 67 | status: 500, 68 | headers: { "Content-Type": "application/json" }, 69 | } 70 | ); 71 | } 72 | }, 73 | }; 74 | 75 | /** 76 | * Performance characteristics: 77 | * 78 | * - Bundle size: 0 KB extra (uses existing SDK code) 79 | * - Cold start: Minimal (no extra dependencies) 80 | * - Memory: Efficient streaming (doesn't load entire tarball) 81 | * - Speed: Native DecompressionStream performance 82 | * 83 | * The custom TAR parser is optimized for: 84 | * - Streaming large repositories 85 | * - Filtering files during extraction 86 | * - Memory-efficient processing 87 | */ 88 | -------------------------------------------------------------------------------- /packages/sdk/examples/worker.ts: -------------------------------------------------------------------------------- 1 | import { fetchFromWeb } from "codefetch-sdk/worker"; 2 | 3 | export interface Env { 4 | GITHUB_TOKEN?: string; 5 | } 6 | 7 | export default { 8 | async fetch(request: Request, env: Env): Promise { 9 | const url = new URL(request.url); 10 | 11 | // Example 1: Fetch from a website 12 | if (url.pathname === "/fetch-web") { 13 | try { 14 | const targetUrl = url.searchParams.get("url"); 15 | if (!targetUrl) { 16 | return new Response("Missing 'url' query parameter", { status: 400 }); 17 | } 18 | 19 | const result = await fetchFromWeb(targetUrl, { 20 | maxPages: 10, 21 | maxDepth: 1, 22 | verbose: 1, 23 | }); 24 | 25 | return new Response(result.markdown, { 26 | headers: { "Content-Type": "text/markdown" }, 27 | }); 28 | } catch (error) { 29 | return new Response(`Error: ${error}`, { status: 500 }); 30 | } 31 | } 32 | 33 | // Example 2: Fetch from GitHub repository 34 | if (url.pathname === "/fetch-github") { 35 | try { 36 | const repo = url.searchParams.get("repo"); 37 | if (!repo) { 38 | return new Response("Missing 'repo' query parameter", { 39 | status: 400, 40 | }); 41 | } 42 | 43 | // GitHub repos can be fetched via fetchFromWeb with the GitHub URL 44 | const githubUrl = `https://github.com/${repo}`; 45 | const result = await fetchFromWeb(githubUrl, { 46 | maxFiles: 50, 47 | extensions: [".ts", ".js", ".md"], 48 | githubToken: env.GITHUB_TOKEN, 49 | }); 50 | 51 | return new Response(result.markdown, { 52 | headers: { "Content-Type": "text/markdown" }, 53 | }); 54 | } catch (error) { 55 | return new Response(`Error: ${error}`, { status: 500 }); 56 | } 57 | } 58 | 59 | return new Response("Codefetch Worker - Use /fetch-web or /fetch-github", { 60 | status: 200, 61 | }); 62 | }, 63 | } satisfies ExportedHandler; 64 | -------------------------------------------------------------------------------- /packages/sdk/examples/wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "codefetch-worker" 2 | main = "src/worker.ts" 3 | compatibility_date = "2025-07-07" 4 | compatibility_flags = ["nodejs_compat"] 5 | 6 | # Environment variables (add your GitHub token) 7 | # [vars] 8 | # GITHUB_TOKEN = "your-github-token" 9 | 10 | # Or use secrets for sensitive data 11 | # wrangler secret put GITHUB_TOKEN 12 | 13 | # Routes (optional) 14 | # routes = [ 15 | # { pattern = "codefetch.example.com/*", zone_name = "example.com" } 16 | # ] 17 | 18 | # Development settings 19 | [dev] 20 | port = 8787 21 | local_protocol = "http" 22 | 23 | # Limits 24 | [[limits]] 25 | cpu_ms = 50 26 | memory_mb = 128 -------------------------------------------------------------------------------- /packages/sdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codefetch-sdk", 3 | "version": "1.6.3", 4 | "description": "Core SDK for codefetch functionality", 5 | "repository": { 6 | "type": "git", 7 | "url": "git+https://github.com/regenrek/codefetch.git" 8 | }, 9 | "license": "MIT", 10 | "type": "module", 11 | "sideEffects": false, 12 | "exports": { 13 | "./package.json": "./package.json", 14 | ".": { 15 | "browser": { 16 | "types": "./dist-browser/browser.d.mts", 17 | "default": "./dist-browser/browser.mjs" 18 | }, 19 | "import": { 20 | "types": "./dist/index.d.mts", 21 | "default": "./dist/index.mjs" 22 | }, 23 | "require": { 24 | "types": "./dist/index.d.cts", 25 | "default": "./dist/index.cjs" 26 | } 27 | }, 28 | "./browser": { 29 | "types": "./dist-browser/browser.d.mts", 30 | "default": "./dist-browser/browser.mjs" 31 | }, 32 | "./prompts": { 33 | "browser": { 34 | "types": "./dist/prompts/index.d.mts", 35 | "default": "./dist/prompts/index.mjs" 36 | }, 37 | "import": { 38 | "types": "./dist/prompts/index.d.mts", 39 | "default": "./dist/prompts/index.mjs" 40 | }, 41 | "require": { 42 | "types": "./dist/prompts/index.d.cts", 43 | "default": "./dist/prompts/index.cjs" 44 | } 45 | }, 46 | "./worker": { 47 | "worker": "./dist-worker/worker.mjs", 48 | "types": "./dist-worker/worker.d.mts", 49 | "default": "./dist-worker/worker.mjs" 50 | }, 51 | "./server": { 52 | "import": { 53 | "types": "./dist/index.d.mts", 54 | "default": "./dist/index.mjs" 55 | }, 56 | "require": { 57 | "types": "./dist/index.d.cts", 58 | "default": "./dist/index.cjs" 59 | } 60 | }, 61 | "./dist/index.mjs": { 62 | "types": "./dist/index.d.mts", 63 | "default": "./dist/index.mjs" 64 | }, 65 | "./dist/index.cjs": { 66 | "types": "./dist/index.d.cts", 67 | "default": "./dist/index.cjs" 68 | } 69 | }, 70 | "main": "./dist/index.cjs", 71 | "module": "./dist/index.mjs", 72 | "types": "./dist/index.d.cts", 73 | "files": [ 74 | "dist", 75 | "dist-browser", 76 | "dist-worker" 77 | ], 78 | "scripts": { 79 | "build": "unbuild", 80 | "build:browser": "unbuild --config build.browser.config.ts", 81 | "build:worker": "unbuild --config build.worker.config.ts", 82 | "build:all": "npm run build && npm run build:browser && npm run build:worker", 83 | "test": "npm run lint && npm run test:types && vitest run --coverage", 84 | "test:coverage": "vitest run --coverage", 85 | "test:integration": "vitest run test/demo-github.test.ts", 86 | "test:watch": "vitest watch", 87 | "test:types": "tsc --noEmit --skipLibCheck", 88 | "lint": "eslint --cache . && prettier -c \"src/**/*\" \"test/**/*\"", 89 | "lint:eslint": "eslint --cache .", 90 | "lint:prettier": "prettier -c src test", 91 | "lint:fix": "eslint --cache . --fix && prettier -c \"src/**/*\" \"test/**/*\" -w", 92 | "prepublishOnly": "npm run build:all" 93 | }, 94 | "dependencies": { 95 | "c12": "^2.0.1", 96 | "consola": "^3.3.3", 97 | "defu": "^6.1.4", 98 | "fast-glob": "^3.3.3", 99 | "ignore": "^7.0.0", 100 | "js-tiktoken": "^1.0.16", 101 | "pathe": "^2.0.1", 102 | "tar@latest": "link:jsr:@std/tar@latest" 103 | }, 104 | "devDependencies": { 105 | "@cloudflare/workers-types": "^4.20250718.0", 106 | "@types/adm-zip": "^0.5.7", 107 | "adm-zip": "^0.5.16", 108 | "eslint": "^9.18.0", 109 | "eslint-config-unjs": "^0.4.2", 110 | "msw": "^2.10.4", 111 | "prettier": "^3.4.2", 112 | "typescript": "^5.7.3", 113 | "unbuild": "^3.5.0", 114 | "vitest": "^2.1.8" 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /packages/sdk/src/browser.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Browser-safe entry point for codefetch-sdk 3 | * 4 | * This module exports only the APIs that work in browser environments: 5 | * - No Node.js built-in modules (fs, path, os, etc.) 6 | * - No process.env access 7 | * - Only pure JavaScript utilities 8 | */ 9 | 10 | // Core types and result classes (browser-safe) 11 | export { FetchResultImpl } from "./fetch-result.js"; 12 | export type { 13 | FileNode, 14 | FetchResult, 15 | FetchMetadata, 16 | TokenEncoder, 17 | } from "./types.js"; 18 | 19 | // Token counting (browser-safe) 20 | export { countTokens, SUPPORTED_MODELS } from "./token-counter.js"; 21 | 22 | // Markdown utilities (browser-safe, only the content-based version) 23 | export { 24 | generateMarkdownFromContent, 25 | type FileContent, 26 | type MarkdownFromContentOptions, 27 | } from "./markdown-content.js"; 28 | 29 | // Browser-safe utilities 30 | export { detectLanguage } from "./utils-browser.js"; 31 | 32 | // Constants (browser-safe) 33 | export { VALID_PROMPTS, VALID_ENCODERS, VALID_LIMITERS } from "./constants.js"; 34 | 35 | // Prompt templates (browser-safe) 36 | export * from "./prompts/index.js"; 37 | export { prompts } from "./prompts/index.js"; 38 | 39 | // Browser-safe fetch function that only works with URLs 40 | export { fetchFromWebWorker as fetch } from "./web/sdk-web-fetch-worker.js"; 41 | 42 | // Note: The following are NOT exported as they require Node.js APIs: 43 | // - Config loading (requires fs) 44 | // - File collection (requires fs) 45 | // - Tree generation (requires fs) 46 | // - collectFilesAsTree (requires fs) 47 | // - generateMarkdown (requires fs) 48 | // - findProjectRoot (requires fs) 49 | // - Web fetching (requires Node.js modules in current implementation) 50 | // - GitHub API (requires os.tmpdir and process.env) 51 | -------------------------------------------------------------------------------- /packages/sdk/src/cache/factory.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Cache factory that returns the appropriate cache implementation based on runtime environment 3 | */ 4 | 5 | import { CacheInterface, CacheOptions } from "./interface.js"; 6 | import { CloudflareCache } from "./cloudflare-cache.js"; 7 | import { FileSystemCache } from "./filesystem-cache.js"; 8 | import { MemoryCache } from "./memory-cache.js"; 9 | import { isCloudflareWorker } from "../env.js"; 10 | 11 | /** 12 | * Create a cache instance based on the runtime environment 13 | */ 14 | export function createCache(options?: CacheOptions): CacheInterface { 15 | // Check for Cloudflare Workers environment 16 | if (typeof caches !== "undefined" && (globalThis as any).caches?.default) { 17 | return new CloudflareCache(options); 18 | } 19 | 20 | // Check for Node.js environment 21 | if ( 22 | typeof process !== "undefined" && 23 | process.versions?.node && 24 | !isCloudflareWorker 25 | ) { 26 | return new FileSystemCache(options); 27 | } 28 | 29 | // Browser or unknown environment - use in-memory cache 30 | return new MemoryCache(options); 31 | } 32 | 33 | /** 34 | * Create a cache with explicit type 35 | */ 36 | export function createCacheOfType( 37 | type: "cloudflare" | "filesystem" | "memory", 38 | options?: CacheOptions 39 | ): CacheInterface { 40 | switch (type) { 41 | case "cloudflare": { 42 | return new CloudflareCache(options); 43 | } 44 | case "filesystem": { 45 | return new FileSystemCache(options); 46 | } 47 | case "memory": { 48 | return new MemoryCache(options); 49 | } 50 | default: { 51 | throw new Error(`Unknown cache type: ${type}`); 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /packages/sdk/src/cache/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Cache module exports 3 | */ 4 | 5 | export * from "./interface.js"; 6 | export * from "./factory.js"; 7 | export * from "./cloudflare-cache.js"; 8 | export * from "./filesystem-cache.js"; 9 | export * from "./memory-cache.js"; 10 | export * from "./validation.js"; 11 | -------------------------------------------------------------------------------- /packages/sdk/src/cache/interface.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Cache interface for universal cache implementation 3 | */ 4 | 5 | export interface CachedResult { 6 | metadata: CacheMetadata; 7 | content: any; 8 | type?: "filesystem" | "memory" | "serialized"; 9 | } 10 | 11 | export interface CacheMetadata { 12 | url: string; 13 | fetchedAt: string; 14 | expiresAt: string; 15 | contentType?: string; 16 | headers?: Record; 17 | } 18 | 19 | export interface CacheOptions { 20 | namespace?: string; 21 | ttl?: number; // in seconds 22 | baseUrl?: string; // For Cloudflare Workers 23 | maxSize?: number; // in bytes 24 | } 25 | 26 | export interface CacheInterface { 27 | /** 28 | * Get a cached value 29 | */ 30 | get(key: string): Promise; 31 | 32 | /** 33 | * Set a cached value 34 | */ 35 | set(key: string, value: any, ttl?: number): Promise; 36 | 37 | /** 38 | * Delete a cached value 39 | */ 40 | delete(key: string): Promise; 41 | 42 | /** 43 | * Clear all cached values 44 | */ 45 | clear(): Promise; 46 | 47 | /** 48 | * Check if a key exists in cache 49 | */ 50 | has(key: string): Promise; 51 | } 52 | 53 | export type CacheStrategy = 54 | | "auto" // Use cache if available (default) 55 | | "force" // Always use cache, fail if not available 56 | | "bypass" // Skip cache completely 57 | | "refresh" // Invalidate cache and fetch fresh 58 | | "validate"; // Check if cache is still valid 59 | -------------------------------------------------------------------------------- /packages/sdk/src/cache/memory-cache.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * In-memory cache implementation for browser environments or as fallback 3 | */ 4 | 5 | import { CacheInterface, CachedResult, CacheOptions } from "./interface.js"; 6 | 7 | export class MemoryCache implements CacheInterface { 8 | private cache: Map = 9 | new Map(); 10 | private options: CacheOptions; 11 | 12 | constructor(options: CacheOptions = {}) { 13 | this.options = { 14 | namespace: "codefetch", 15 | ttl: 3600, // 1 hour default 16 | maxSize: 50 * 1024 * 1024, // 50MB default for memory 17 | ...options, 18 | }; 19 | } 20 | 21 | async get(key: string): Promise { 22 | const entry = this.cache.get(key); 23 | 24 | if (!entry) { 25 | return null; 26 | } 27 | 28 | // Check if expired 29 | if (entry.expires <= Date.now()) { 30 | this.cache.delete(key); 31 | return null; 32 | } 33 | 34 | return entry.data; 35 | } 36 | 37 | async set(key: string, value: any, ttl?: number): Promise { 38 | const effectiveTtl = ttl || this.options.ttl || 3600; 39 | const now = new Date(); 40 | const expiresAt = new Date(now.getTime() + effectiveTtl * 1000); 41 | 42 | const cachedResult: CachedResult = { 43 | metadata: { 44 | url: key, 45 | fetchedAt: now.toISOString(), 46 | expiresAt: expiresAt.toISOString(), 47 | contentType: "application/json", 48 | }, 49 | content: value, 50 | type: "memory", 51 | }; 52 | 53 | this.cache.set(key, { 54 | data: cachedResult, 55 | expires: expiresAt.getTime(), 56 | }); 57 | 58 | // Clean up expired entries periodically 59 | this.cleanupExpired(); 60 | 61 | // Check memory usage and clean if needed 62 | this.cleanupIfNeeded(); 63 | } 64 | 65 | async delete(key: string): Promise { 66 | this.cache.delete(key); 67 | } 68 | 69 | async clear(): Promise { 70 | this.cache.clear(); 71 | } 72 | 73 | async has(key: string): Promise { 74 | const entry = this.cache.get(key); 75 | 76 | if (!entry) { 77 | return false; 78 | } 79 | 80 | // Check if expired 81 | if (entry.expires <= Date.now()) { 82 | this.cache.delete(key); 83 | return false; 84 | } 85 | 86 | return true; 87 | } 88 | 89 | /** 90 | * Clean up expired entries 91 | */ 92 | private cleanupExpired(): void { 93 | const now = Date.now(); 94 | 95 | for (const [key, entry] of this.cache.entries()) { 96 | if (entry.expires <= now) { 97 | this.cache.delete(key); 98 | } 99 | } 100 | } 101 | 102 | /** 103 | * Estimate memory usage and clean if needed 104 | */ 105 | private cleanupIfNeeded(): void { 106 | // This is a rough estimate - actual memory usage may vary 107 | const maxEntries = Math.floor( 108 | (this.options.maxSize || 50_000_000) / 10_240 109 | ); // Assume ~10KB per entry 110 | 111 | if (this.cache.size > maxEntries) { 112 | // Convert to array and sort by expiration time 113 | const entries = [...this.cache.entries()].sort( 114 | (a, b) => a[1].expires - b[1].expires 115 | ); 116 | 117 | // Remove oldest entries until we're under the limit 118 | const entriesToRemove = Math.floor(entries.length * 0.2); // Remove 20% 119 | 120 | for (let i = 0; i < entriesToRemove; i++) { 121 | this.cache.delete(entries[i][0]); 122 | } 123 | } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /packages/sdk/src/cache/validation.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Content validation for cached entries 3 | */ 4 | 5 | import { stat } from "node:fs/promises"; 6 | import { CachedResult } from "./interface.js"; 7 | import { isCloudflareWorker } from "../env.js"; 8 | 9 | /** 10 | * Validate that cached content is still valid and accessible 11 | */ 12 | export async function validateCachedContent( 13 | cached: CachedResult 14 | ): Promise { 15 | if (!cached || !cached.content) { 16 | return false; 17 | } 18 | 19 | // For file system paths - only validate in Node.js environment 20 | if ( 21 | cached.type === "filesystem" && 22 | cached.content.path && 23 | !isCloudflareWorker 24 | ) { 25 | try { 26 | await stat(cached.content.path); 27 | return true; 28 | } catch { 29 | // Path no longer exists 30 | return false; 31 | } 32 | } 33 | 34 | // For in-memory content 35 | if (cached.type === "memory" && cached.content) { 36 | return true; 37 | } 38 | 39 | // For serialized content 40 | if (cached.type === "serialized" && cached.content) { 41 | return true; 42 | } 43 | 44 | // Check expiration 45 | if (cached.metadata?.expiresAt) { 46 | const expiresAt = new Date(cached.metadata.expiresAt); 47 | if (expiresAt <= new Date()) { 48 | return false; 49 | } 50 | } 51 | 52 | return true; 53 | } 54 | 55 | /** 56 | * Generate a cache key from source and options 57 | */ 58 | export function generateCacheKey(source: string, options: any = {}): string { 59 | const parts = [source]; 60 | 61 | if (options) { 62 | // Add relevant options to cache key 63 | if (options.format) parts.push(`format:${options.format}`); 64 | if (options.maxTokens) parts.push(`tokens:${options.maxTokens}`); 65 | if (options.tokenEncoder) parts.push(`encoder:${options.tokenEncoder}`); 66 | if (options.extensions?.length) { 67 | parts.push(`ext:${options.extensions.sort().join(",")}`); 68 | } 69 | if (options.excludeDirs?.length) { 70 | parts.push(`exclude:${options.excludeDirs.sort().join(",")}`); 71 | } 72 | } 73 | 74 | return parts.join("|"); 75 | } 76 | -------------------------------------------------------------------------------- /packages/sdk/src/config-worker.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Worker-safe configuration utilities 3 | * No file system operations, only in-memory config handling 4 | */ 5 | 6 | // Define CodefetchConfig interface for Workers 7 | export interface CodefetchConfig { 8 | format: "markdown" | "json"; 9 | extensions: string[]; 10 | excludeFiles: string[]; 11 | includeFiles: string[]; 12 | excludeDirs: string[]; 13 | includeDirs: string[]; 14 | verbose: number; 15 | projectTree: number; 16 | dryRun: boolean; 17 | maxTokens: number; 18 | tokenEncoder: string; 19 | disableLineNumbers: boolean; 20 | defaultIgnore: boolean; 21 | gitignore: boolean; 22 | tokenLimiter: string; 23 | tokenCountOnly: boolean; 24 | promptFile?: string; 25 | prompt?: string; 26 | templateVars: Record; 27 | } 28 | 29 | /** 30 | * Get default configuration for Workers 31 | */ 32 | export function getDefaultConfig(): CodefetchConfig { 33 | return { 34 | format: "markdown", 35 | extensions: [], 36 | excludeFiles: [], 37 | includeFiles: [], 38 | excludeDirs: [], 39 | includeDirs: [], 40 | verbose: 0, 41 | projectTree: 0, 42 | dryRun: false, 43 | maxTokens: 50_000, 44 | tokenEncoder: "cl100k", 45 | disableLineNumbers: false, 46 | defaultIgnore: true, 47 | gitignore: false, // No file system access in Workers 48 | tokenLimiter: "truncated", 49 | tokenCountOnly: false, 50 | templateVars: {}, 51 | }; 52 | } 53 | 54 | /** 55 | * Resolve configuration in Workers (no file system) 56 | */ 57 | export async function resolveCodefetchConfig( 58 | cwd?: string, 59 | overrides?: Partial 60 | ): Promise { 61 | // In Workers, we can't read config files from disk 62 | // Just return defaults merged with overrides 63 | const defaults = getDefaultConfig(); 64 | return { 65 | ...defaults, 66 | ...overrides, 67 | }; 68 | } 69 | 70 | /** 71 | * Merge configuration with CLI arguments 72 | */ 73 | export function mergeWithCliArgs( 74 | config: CodefetchConfig, 75 | args: any 76 | ): CodefetchConfig { 77 | const merged = { ...config }; 78 | 79 | // Override with CLI args if provided 80 | if (args.format !== undefined) merged.format = args.format; 81 | if (args.extensions !== undefined) merged.extensions = args.extensions; 82 | if (args.excludeFiles !== undefined) merged.excludeFiles = args.excludeFiles; 83 | if (args.includeFiles !== undefined) merged.includeFiles = args.includeFiles; 84 | if (args.excludeDirs !== undefined) merged.excludeDirs = args.excludeDirs; 85 | if (args.includeDirs !== undefined) merged.includeDirs = args.includeDirs; 86 | if (args.verbose !== undefined) merged.verbose = args.verbose; 87 | if (args.projectTree !== undefined) merged.projectTree = args.projectTree; 88 | if (args.dryRun !== undefined) merged.dryRun = args.dryRun; 89 | if (args.maxTokens !== undefined) merged.maxTokens = args.maxTokens; 90 | if (args.tokenEncoder !== undefined) merged.tokenEncoder = args.tokenEncoder; 91 | if (args.disableLineNumbers !== undefined) 92 | merged.disableLineNumbers = args.disableLineNumbers; 93 | if (args.defaultIgnore !== undefined) 94 | merged.defaultIgnore = args.defaultIgnore; 95 | if (args.tokenLimiter !== undefined) merged.tokenLimiter = args.tokenLimiter; 96 | if (args.tokenCountOnly !== undefined) 97 | merged.tokenCountOnly = args.tokenCountOnly; 98 | if (args.promptFile !== undefined) merged.promptFile = args.promptFile; 99 | if (args.prompt !== undefined) merged.prompt = args.prompt; 100 | 101 | // Merge template vars 102 | if (args.templateVars) { 103 | merged.templateVars = { 104 | ...merged.templateVars, 105 | ...args.templateVars, 106 | }; 107 | } 108 | 109 | return merged; 110 | } 111 | -------------------------------------------------------------------------------- /packages/sdk/src/config.ts: -------------------------------------------------------------------------------- 1 | import { resolve } from "pathe"; 2 | import type { TokenEncoder, TokenLimiter, OutputFormat } from "./types"; 3 | import { defu } from "defu"; 4 | 5 | export interface CodefetchConfig { 6 | outputFile: string; 7 | outputPath: string; 8 | maxTokens: number; 9 | includeFiles?: string[]; 10 | excludeFiles?: string[]; 11 | includeDirs?: string[]; 12 | excludeDirs?: string[]; 13 | verbose: number; 14 | extensions?: string[]; 15 | defaultIgnore: boolean; 16 | gitignore: boolean; 17 | projectTree: number; 18 | tokenEncoder: TokenEncoder; 19 | tokenLimiter: TokenLimiter; 20 | trackedModels?: string[]; 21 | dryRun?: boolean; 22 | disableLineNumbers?: boolean; 23 | tokenCountOnly?: boolean; 24 | defaultPromptFile: string; 25 | defaultChat?: string; 26 | templateVars?: Record; 27 | format?: OutputFormat; 28 | } 29 | 30 | const defaultOutput = "codebase.md"; 31 | 32 | export const getDefaultConfig = (): CodefetchConfig => ({ 33 | outputPath: "codefetch", 34 | outputFile: defaultOutput, 35 | maxTokens: 999_000, // safety 36 | verbose: 1, 37 | projectTree: 2, 38 | defaultIgnore: true, 39 | gitignore: true, 40 | tokenEncoder: "simple", 41 | tokenLimiter: "truncated", 42 | trackedModels: ["o3", "gemini-2.5-pro", "claude-sonnet-4", "claude-opus-4"], 43 | dryRun: false, 44 | disableLineNumbers: false, 45 | tokenCountOnly: false, 46 | defaultPromptFile: "default.md", 47 | defaultChat: "https://chat.com", 48 | templateVars: {}, 49 | format: "markdown", 50 | }); 51 | 52 | export async function resolveCodefetchConfig( 53 | config: CodefetchConfig, 54 | cwd: string 55 | ): Promise { 56 | const resolved = { ...config }; 57 | 58 | if (typeof resolved.outputPath === "string") { 59 | resolved.outputPath = resolve(cwd, resolved.outputPath); 60 | } 61 | 62 | // Resolve paths for include/exclude patterns 63 | if (resolved.includeFiles) { 64 | resolved.includeFiles = resolved.includeFiles.map((pattern) => 65 | resolve(cwd, pattern) 66 | ); 67 | } 68 | if (resolved.excludeFiles) { 69 | resolved.excludeFiles = resolved.excludeFiles.map((pattern) => 70 | resolve(cwd, pattern) 71 | ); 72 | } 73 | if (resolved.includeDirs) { 74 | resolved.includeDirs = resolved.includeDirs.map((pattern) => 75 | resolve(cwd, pattern) 76 | ); 77 | } 78 | if (resolved.excludeDirs) { 79 | resolved.excludeDirs = resolved.excludeDirs.map((pattern) => 80 | resolve(cwd, pattern) 81 | ); 82 | } 83 | 84 | return resolved; 85 | } 86 | 87 | // Helper to merge CLI args with config file - improved array handling 88 | export function mergeWithCliArgs( 89 | config: CodefetchConfig, 90 | cliArgs: Partial 91 | ): CodefetchConfig { 92 | const mergeArrays = (a?: T[], b?: T[]): T[] => { 93 | if (!a && !b) return []; 94 | if (!a) return b || []; 95 | if (!b) return a; 96 | return [...new Set([...a, ...b])]; // Deduplicate arrays 97 | }; 98 | 99 | return { 100 | ...config, 101 | ...cliArgs, 102 | includeFiles: mergeArrays(config.includeFiles, cliArgs.includeFiles), 103 | excludeFiles: mergeArrays(config.excludeFiles, cliArgs.excludeFiles), 104 | includeDirs: mergeArrays(config.includeDirs, cliArgs.includeDirs), 105 | excludeDirs: mergeArrays(config.excludeDirs, cliArgs.excludeDirs), 106 | }; 107 | } 108 | 109 | // Custom merger that replaces trackedModels instead of merging 110 | export function createCustomConfigMerger() { 111 | return (obj: any, defaults: any) => { 112 | // If obj has trackedModels or prompt, use them instead of merging 113 | const result = defu(obj, defaults); 114 | if (obj && obj.trackedModels) { 115 | result.trackedModels = obj.trackedModels; 116 | } 117 | if (obj && obj.prompt !== undefined) { 118 | result.prompt = obj.prompt; 119 | } 120 | return result; 121 | }; 122 | } 123 | -------------------------------------------------------------------------------- /packages/sdk/src/constants.ts: -------------------------------------------------------------------------------- 1 | export const VALID_PROMPTS = new Set([ 2 | "default", 3 | "fix", 4 | "improve", 5 | "testgen", 6 | "codegen", 7 | ]); 8 | 9 | export const VALID_ENCODERS = new Set(["simple", "p50k", "o200k", "cl100k"]); 10 | 11 | export const VALID_LIMITERS = new Set(["sequential", "truncated"]); 12 | -------------------------------------------------------------------------------- /packages/sdk/src/default-ignore.ts: -------------------------------------------------------------------------------- 1 | export const DEFAULT_IGNORE_PATTERNS = ` 2 | # avoid recursion 3 | codefetch/ 4 | 5 | # Git 6 | .git/** 7 | **/.git/** 8 | .gitignore 9 | .gitattributes 10 | 11 | # Version Control 12 | .git/ 13 | .gitignore 14 | .gitattributes 15 | .svn/ 16 | .hg/ 17 | 18 | # Package Manager Files 19 | package-lock.json 20 | yarn.lock 21 | pnpm-lock.yaml 22 | bun.lockb 23 | .npmrc 24 | .yarnrc 25 | .pnpmrc 26 | .npmignore 27 | 28 | # Project Config 29 | .codefetchignore 30 | .editorconfig 31 | .eslintrc* 32 | .eslintcache 33 | .prettierrc* 34 | .stylelintrc* 35 | .tsbuildinfo 36 | .prettierignore 37 | 38 | # Binary and Image Files 39 | # Images 40 | *.png 41 | *.jpg 42 | *.jpeg 43 | *.gif 44 | *.ico 45 | *.webp 46 | *.bmp 47 | *.tiff 48 | *.tif 49 | *.raw 50 | *.cr2 51 | *.nef 52 | *.heic 53 | *.heif 54 | *.avif 55 | *.svg 56 | *.eps 57 | *.ai 58 | *.psd 59 | *.xcf 60 | 61 | # Videos 62 | *.mp4 63 | *.mov 64 | *.avi 65 | *.wmv 66 | *.flv 67 | *.mkv 68 | *.webm 69 | *.m4v 70 | *.mpg 71 | *.mpeg 72 | *.3gp 73 | *.3g2 74 | *.ogv 75 | *.vob 76 | 77 | # Audio 78 | *.mp3 79 | *.wav 80 | *.ogg 81 | *.m4a 82 | *.flac 83 | *.aac 84 | *.wma 85 | *.aiff 86 | *.mid 87 | *.midi 88 | 89 | # Documents and PDFs 90 | *.pdf 91 | *.doc 92 | *.docx 93 | *.xls 94 | *.xlsx 95 | *.ppt 96 | *.pptx 97 | *.odt 98 | *.ods 99 | *.odp 100 | *.pages 101 | *.numbers 102 | *.key 103 | 104 | # Archives and Compressed 105 | *.zip 106 | *.tar 107 | *.gz 108 | *.tgz 109 | *.rar 110 | *.7z 111 | *.bz2 112 | *.xz 113 | *.lz 114 | *.lzma 115 | *.lzo 116 | *.rz 117 | *.lz4 118 | *.zst 119 | *.br 120 | *.cab 121 | *.iso 122 | *.dmg 123 | *.img 124 | 125 | # Binary and Executable 126 | *.exe 127 | *.dll 128 | *.so 129 | *.dylib 130 | *.bin 131 | *.o 132 | *.obj 133 | *.lib 134 | *.a 135 | *.class 136 | *.pyc 137 | *.pyo 138 | *.pyd 139 | *.deb 140 | *.rpm 141 | *.pkg 142 | *.app 143 | *.sys 144 | *.ko 145 | 146 | # Database and Data Files 147 | *.dat 148 | *.db 149 | *.sqlite 150 | *.sqlite3 151 | *.mdb 152 | *.accdb 153 | *.dbf 154 | *.mdf 155 | *.ldf 156 | *.frm 157 | *.ibd 158 | *.idx 159 | *.dmp 160 | *.bak 161 | *.bson 162 | 163 | # Font Files 164 | *.ttf 165 | *.otf 166 | *.woff 167 | *.woff2 168 | *.eot 169 | 170 | # Model and 3D Files 171 | *.fbx 172 | *.obj 173 | *.max 174 | *.blend 175 | *.dae 176 | *.mb 177 | *.ma 178 | *.3ds 179 | *.c4d 180 | *.stl 181 | *.glb 182 | *.gltf 183 | 184 | # IDE and Editor Files 185 | .idea/ 186 | .vscode/ 187 | *.swp 188 | *.swo 189 | *.swn 190 | *.bak 191 | 192 | # Build and Cache 193 | dist/ 194 | build/ 195 | out/ 196 | workspace-data/ 197 | .cache/ 198 | .temp/ 199 | tmp/ 200 | *.min.js 201 | *.min.css 202 | 203 | # NXT Files 204 | *.nxt 205 | .nxt/ 206 | .nxt-cache/ 207 | nxt-env.d.ts 208 | nxt.config.* 209 | .nxtrc 210 | .nxt-workspace/ 211 | 212 | # Logs and Debug 213 | *.log 214 | debug.log 215 | npm-debug.log* 216 | yarn-debug.log* 217 | yarn-error.log* 218 | 219 | # Environment and Secrets 220 | .env 221 | .env.* 222 | .env-* 223 | *.env 224 | env.* 225 | *.pem 226 | *.key 227 | *.cert 228 | *.secret 229 | *.secrets 230 | *secret* 231 | *secrets* 232 | *credential* 233 | *credentials* 234 | *password* 235 | *passwords* 236 | *token* 237 | *tokens* 238 | 239 | # Documentation 240 | LICENSE* 241 | LICENCE* 242 | README* 243 | CHANGELOG* 244 | CONTRIBUTING* 245 | 246 | # OS Files 247 | .DS_Store 248 | Thumbs.db 249 | desktop.ini 250 | `.trim(); 251 | -------------------------------------------------------------------------------- /packages/sdk/src/env.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Runtime environment detection for Cloudflare Workers 3 | */ 4 | 5 | /** 6 | * Detect if code is running in a Cloudflare Worker environment 7 | * Workers have WebSocketPair but no __dirname in globalThis 8 | */ 9 | export const isCloudflareWorker = 10 | (globalThis as any).WebSocketPair !== undefined && 11 | !("__dirname" in globalThis); 12 | 13 | /** 14 | * Get environment-specific cache size limit in bytes 15 | */ 16 | export const getCacheSizeLimit = (): number => { 17 | // Workers have ~10MB TmpFS, use 8MB to leave headroom 18 | if (isCloudflareWorker) { 19 | return 8 * 1024 * 1024; // 8 MB 20 | } 21 | // Node.js default: 100 MB 22 | return 100 * 1024 * 1024; 23 | }; 24 | 25 | /** 26 | * Check if git operations are available in current environment 27 | */ 28 | export const isGitAvailable = (): boolean => { 29 | return !isCloudflareWorker; 30 | }; 31 | -------------------------------------------------------------------------------- /packages/sdk/src/errors.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Custom error classes for better error handling and debugging 3 | */ 4 | 5 | /** 6 | * Base error class for all Codefetch errors 7 | */ 8 | export class CodefetchError extends Error { 9 | constructor( 10 | message: string, 11 | public code: string 12 | ) { 13 | super(message); 14 | this.name = "CodefetchError"; 15 | // Maintain proper stack trace in V8 16 | if (Error.captureStackTrace) { 17 | Error.captureStackTrace(this, this.constructor); 18 | } 19 | } 20 | } 21 | 22 | /** 23 | * Error thrown when GitHub API requests fail 24 | */ 25 | export class GitHubError extends CodefetchError { 26 | constructor( 27 | message: string, 28 | public status: number, 29 | public rateLimitRemaining?: number, 30 | public rateLimitReset?: Date 31 | ) { 32 | super(message, "GITHUB_ERROR"); 33 | this.name = "GitHubError"; 34 | } 35 | } 36 | 37 | /** 38 | * Error thrown when token limit is exceeded 39 | */ 40 | export class TokenLimitError extends CodefetchError { 41 | constructor( 42 | public limit: number, 43 | public used: number, 44 | public files: string[] 45 | ) { 46 | super(`Token limit exceeded: ${used}/${limit} tokens used`, "TOKEN_LIMIT"); 47 | this.name = "TokenLimitError"; 48 | } 49 | } 50 | 51 | /** 52 | * Error thrown when parsing files or content fails 53 | */ 54 | export class ParseError extends CodefetchError { 55 | constructor( 56 | message: string, 57 | public filePath: string, 58 | public line?: number, 59 | public column?: number 60 | ) { 61 | super(message, "PARSE_ERROR"); 62 | this.name = "ParseError"; 63 | } 64 | } 65 | 66 | /** 67 | * Error thrown when network requests fail 68 | */ 69 | export class NetworkError extends CodefetchError { 70 | constructor( 71 | message: string, 72 | public url: string, 73 | public cause?: Error 74 | ) { 75 | super(message, "NETWORK_ERROR"); 76 | this.name = "NetworkError"; 77 | } 78 | } 79 | 80 | /** 81 | * Error thrown when configuration is invalid 82 | */ 83 | export class ConfigError extends CodefetchError { 84 | constructor( 85 | message: string, 86 | public configPath?: string, 87 | public invalidField?: string 88 | ) { 89 | super(message, "CONFIG_ERROR"); 90 | this.name = "ConfigError"; 91 | } 92 | } 93 | 94 | /** 95 | * Error thrown when cache operations fail 96 | */ 97 | export class CacheError extends CodefetchError { 98 | constructor( 99 | message: string, 100 | public operation: "read" | "write" | "delete", 101 | public key?: string 102 | ) { 103 | super(message, "CACHE_ERROR"); 104 | this.name = "CacheError"; 105 | } 106 | } 107 | 108 | /** 109 | * Error thrown when URL validation fails 110 | */ 111 | export class URLValidationError extends CodefetchError { 112 | constructor( 113 | message: string, 114 | public url: string, 115 | public reason: string 116 | ) { 117 | super(message, "URL_VALIDATION_ERROR"); 118 | this.name = "URLValidationError"; 119 | } 120 | } 121 | 122 | /** 123 | * Type guard to check if an error is a CodefetchError 124 | */ 125 | export function isCodefetchError(error: unknown): error is CodefetchError { 126 | return error instanceof CodefetchError; 127 | } 128 | 129 | /** 130 | * Type guard to check if an error is a GitHubError 131 | */ 132 | export function isGitHubError(error: unknown): error is GitHubError { 133 | return error instanceof GitHubError; 134 | } 135 | 136 | /** 137 | * Type guard to check if an error is a TokenLimitError 138 | */ 139 | export function isTokenLimitError(error: unknown): error is TokenLimitError { 140 | return error instanceof TokenLimitError; 141 | } 142 | 143 | /** 144 | * Helper to wrap unknown errors in a CodefetchError 145 | */ 146 | export function wrapError( 147 | error: unknown, 148 | code: string = "UNKNOWN_ERROR" 149 | ): CodefetchError { 150 | if (error instanceof CodefetchError) { 151 | return error; 152 | } 153 | 154 | if (error instanceof Error) { 155 | const wrappedError = new CodefetchError(error.message, code); 156 | wrappedError.stack = error.stack; 157 | return wrappedError; 158 | } 159 | 160 | return new CodefetchError(String(error), code); 161 | } 162 | -------------------------------------------------------------------------------- /packages/sdk/src/fetch-result.ts: -------------------------------------------------------------------------------- 1 | import type { FileNode, FetchMetadata } from "./types"; 2 | 3 | export class FetchResultImpl { 4 | constructor( 5 | public root: FileNode, 6 | public metadata: FetchMetadata 7 | ) {} 8 | 9 | /** 10 | * Get a file node by its path 11 | */ 12 | getFileByPath(path: string): FileNode | null { 13 | // Normalize path (remove leading slash if present) 14 | const normalizedPath = path.startsWith("/") ? path.slice(1) : path; 15 | 16 | function searchNode(node: FileNode, currentPath: string): FileNode | null { 17 | const nodePath = currentPath ? `${currentPath}/${node.name}` : node.name; 18 | 19 | if (node.type === "file" && nodePath === normalizedPath) { 20 | return node; 21 | } 22 | 23 | if (node.type === "directory" && node.children) { 24 | for (const child of node.children) { 25 | const result = searchNode(child, nodePath); 26 | if (result) return result; 27 | } 28 | } 29 | 30 | return null; 31 | } 32 | 33 | // Special case for root 34 | if (normalizedPath === "" || normalizedPath === "/") { 35 | return this.root; 36 | } 37 | 38 | // Search in children 39 | if (this.root.children) { 40 | for (const child of this.root.children) { 41 | const result = searchNode(child, ""); 42 | if (result) return result; 43 | } 44 | } 45 | 46 | return null; 47 | } 48 | 49 | /** 50 | * Get all files as a flat array 51 | */ 52 | getAllFiles(): FileNode[] { 53 | const files: FileNode[] = []; 54 | 55 | function collectFiles(node: FileNode) { 56 | if (node.type === "file") { 57 | files.push(node); 58 | } else if (node.type === "directory" && node.children) { 59 | for (const child of node.children) { 60 | collectFiles(child); 61 | } 62 | } 63 | } 64 | 65 | collectFiles(this.root); 66 | return files; 67 | } 68 | 69 | /** 70 | * Convert to markdown format 71 | */ 72 | toMarkdown(): string { 73 | const lines: string[] = []; 74 | 75 | // Add project structure 76 | lines.push("Project Structure:"); 77 | lines.push(this.buildTreeString(this.root)); 78 | lines.push(""); 79 | 80 | // Add file contents 81 | const files = this.getAllFiles(); 82 | for (const file of files) { 83 | if (file.content) { 84 | lines.push(`${file.path}`); 85 | lines.push("```"); 86 | const contentLines = file.content.split("\n"); 87 | for (const [index, line] of contentLines.entries()) { 88 | lines.push(`${index + 1} | ${line}`); 89 | } 90 | lines.push("```"); 91 | lines.push(""); 92 | } 93 | } 94 | 95 | return lines.join("\n"); 96 | } 97 | 98 | /** 99 | * Build tree string representation 100 | */ 101 | private buildTreeString(node: FileNode, prefix = "", isLast = true): string { 102 | const lines: string[] = []; 103 | 104 | if (node.name) { 105 | // Skip empty root name 106 | const connector = isLast ? "└── " : "├── "; 107 | lines.push(prefix + connector + node.name); 108 | } 109 | 110 | if (node.type === "directory" && node.children) { 111 | const extension = node.name ? (isLast ? " " : "│ ") : ""; 112 | const newPrefix = prefix + extension; 113 | 114 | for (const [index, child] of node.children.entries()) { 115 | const childIsLast = index === node.children!.length - 1; 116 | lines.push(this.buildTreeString(child, newPrefix, childIsLast)); 117 | } 118 | } 119 | 120 | return lines.join("\n"); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /packages/sdk/src/fetch.ts: -------------------------------------------------------------------------------- 1 | import { resolve } from "pathe"; 2 | import ignore from "ignore"; 3 | import { readFile } from "node:fs/promises"; 4 | import type { FetchResult, OutputFormat } from "./types"; 5 | import type { CodefetchConfig } from "./config"; 6 | import { collectFiles } from "./files"; 7 | import { collectFilesAsTree } from "./files-tree"; 8 | import { generateMarkdown } from "./markdown"; 9 | import { FetchResultImpl } from "./fetch-result"; 10 | import { DEFAULT_IGNORE_PATTERNS } from "./default-ignore"; 11 | import { CacheStrategy } from "./cache/interface.js"; 12 | 13 | export interface FetchOptions extends Partial { 14 | source?: string; // URL or local path, defaults to cwd 15 | format?: OutputFormat; 16 | 17 | // Cache control options 18 | cache?: boolean | CacheStrategy; 19 | cacheKey?: string; 20 | cacheTTL?: number; // in seconds 21 | cacheNamespace?: string; 22 | 23 | // For Cloudflare Workers 24 | cacheBaseUrl?: string; 25 | 26 | // Bypass cache completely 27 | noCache?: boolean; 28 | } 29 | 30 | export async function fetch( 31 | options: FetchOptions = {} 32 | ): Promise { 33 | // Check if source is a URL 34 | const source = options.source || process.cwd(); 35 | 36 | // URL detection - check for http(s):// or known git providers 37 | const isUrl = 38 | /^https?:\/\//.test(source) || 39 | /^(github\.com|gitlab\.com|bitbucket\.org)/.test(source); 40 | 41 | if (isUrl) { 42 | // Import web functionality 43 | const { fetchFromWeb } = await import("./web/sdk-web-fetch"); 44 | 45 | // Ensure proper URL format 46 | let normalizedUrl = source; 47 | if ( 48 | !normalizedUrl.startsWith("http://") && 49 | !normalizedUrl.startsWith("https://") 50 | ) { 51 | // Default to https for URLs without protocol 52 | normalizedUrl = "https://" + normalizedUrl; 53 | } 54 | 55 | // Use the SDK-friendly web fetch function 56 | return await fetchFromWeb(normalizedUrl, options); 57 | } 58 | 59 | const cwd = resolve(source); 60 | const format = options.format || "markdown"; 61 | 62 | // Set up ignore instance 63 | const ig = ignore(); 64 | 65 | // Add default ignore patterns if enabled 66 | if (options.defaultIgnore !== false) { 67 | ig.add(DEFAULT_IGNORE_PATTERNS); 68 | } 69 | 70 | // Add gitignore patterns if enabled and file exists 71 | if (options.gitignore !== false) { 72 | try { 73 | const gitignoreContent = await readFile( 74 | resolve(cwd, ".gitignore"), 75 | "utf8" 76 | ); 77 | ig.add(gitignoreContent); 78 | } catch { 79 | // No gitignore file, continue 80 | } 81 | } 82 | 83 | // Add custom excludes 84 | if (options.excludeFiles) { 85 | ig.add(options.excludeFiles); 86 | } 87 | 88 | // Prepare extension set 89 | const extensionSet = options.extensions 90 | ? new Set( 91 | options.extensions.map((ext: string) => 92 | ext.startsWith(".") ? ext : `.${ext}` 93 | ) 94 | ) 95 | : null; 96 | 97 | // Collect files 98 | const files = await collectFiles(cwd, { 99 | ig, 100 | extensionSet, 101 | excludeFiles: options.excludeFiles || null, 102 | includeFiles: options.includeFiles || null, 103 | excludeDirs: options.excludeDirs || null, 104 | includeDirs: options.includeDirs || null, 105 | verbose: options.verbose || 0, 106 | }); 107 | 108 | if (format === "json") { 109 | // Build tree structure 110 | const { root, totalSize, totalTokens } = await collectFilesAsTree( 111 | cwd, 112 | files, 113 | { 114 | tokenEncoder: options.tokenEncoder, 115 | tokenLimit: options.maxTokens, 116 | } 117 | ); 118 | 119 | const metadata = { 120 | totalFiles: files.length, 121 | totalSize, 122 | totalTokens, 123 | fetchedAt: new Date(), 124 | source: cwd, 125 | }; 126 | 127 | return new FetchResultImpl(root, metadata); 128 | } else { 129 | // Generate markdown format 130 | return generateMarkdown(files, { 131 | maxTokens: options.maxTokens || null, 132 | verbose: options.verbose || 0, 133 | projectTree: options.projectTree === undefined ? 3 : options.projectTree, 134 | tokenEncoder: options.tokenEncoder || "simple", 135 | disableLineNumbers: options.disableLineNumbers || false, 136 | tokenLimiter: options.tokenLimiter || "truncated", 137 | }); 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /packages/sdk/src/files-tree.ts: -------------------------------------------------------------------------------- 1 | import { readFile, stat } from "node:fs/promises"; 2 | import path from "pathe"; 3 | import type { FileNode, TokenEncoder } from "./types"; 4 | import { countTokens } from "./token-counter"; 5 | import { detectLanguage } from "./utils"; 6 | 7 | export async function collectFilesAsTree( 8 | baseDir: string, 9 | files: string[], 10 | options: { 11 | tokenEncoder?: string; 12 | tokenLimit?: number; 13 | } = {} 14 | ): Promise<{ root: FileNode; totalSize: number; totalTokens: number }> { 15 | const root: FileNode = { 16 | name: path.basename(baseDir), 17 | path: "", 18 | type: "directory", 19 | children: [], 20 | }; 21 | 22 | let totalSize = 0; 23 | let totalTokens = 0; 24 | 25 | // Sort files to ensure consistent tree structure 26 | files.sort(); 27 | 28 | for (const filePath of files) { 29 | const relativePath = path.relative(baseDir, filePath); 30 | const pathParts = relativePath.split(path.sep); 31 | 32 | // Navigate/create directory structure 33 | let currentNode = root; 34 | for (let i = 0; i < pathParts.length - 1; i++) { 35 | const dirName = pathParts[i]; 36 | 37 | if (!currentNode.children) { 38 | currentNode.children = []; 39 | } 40 | 41 | let dirNode = currentNode.children.find( 42 | (child) => child.type === "directory" && child.name === dirName 43 | ); 44 | 45 | if (!dirNode) { 46 | dirNode = { 47 | name: dirName, 48 | path: pathParts.slice(0, i + 1).join("/"), 49 | type: "directory", 50 | children: [], 51 | }; 52 | currentNode.children.push(dirNode); 53 | } 54 | 55 | currentNode = dirNode; 56 | } 57 | 58 | // Add file node 59 | try { 60 | const fileName = pathParts.at(-1)!; 61 | const content = await readFile(filePath, "utf8"); 62 | const stats = await stat(filePath); 63 | const encoder: TokenEncoder = 64 | (options.tokenEncoder as TokenEncoder) || "simple"; 65 | const tokens = await countTokens(content, encoder); 66 | 67 | const fileNode: FileNode = { 68 | name: fileName, 69 | path: relativePath, 70 | type: "file", 71 | content, 72 | language: detectLanguage(fileName), 73 | size: stats.size, 74 | tokens, 75 | lastModified: stats.mtime, 76 | }; 77 | 78 | if (!currentNode.children) { 79 | currentNode.children = []; 80 | } 81 | 82 | currentNode.children.push(fileNode); 83 | totalSize += stats.size; 84 | totalTokens += tokens; 85 | } catch (error) { 86 | console.warn(`Failed to read file ${filePath}:`, error); 87 | } 88 | } 89 | 90 | // Sort children in each directory for consistent output 91 | sortTreeChildren(root); 92 | 93 | return { root, totalSize, totalTokens }; 94 | } 95 | 96 | function sortTreeChildren(node: FileNode) { 97 | if (node.children) { 98 | // Sort: directories first, then files, alphabetically 99 | node.children.sort((a, b) => { 100 | if (a.type !== b.type) { 101 | return a.type === "directory" ? -1 : 1; 102 | } 103 | return a.name.localeCompare(b.name); 104 | }); 105 | 106 | // Recursively sort children 107 | for (const child of node.children) { 108 | if (child.type === "directory") { 109 | sortTreeChildren(child); 110 | } 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /packages/sdk/src/files.ts: -------------------------------------------------------------------------------- 1 | import path from "pathe"; 2 | import fg from "fast-glob"; 3 | 4 | // Helper function to escape special glob characters in paths 5 | function escapeGlobPath(str: string): string { 6 | // First normalize path separators to forward slashes for fast-glob 7 | const normalized = str.replace(/\\/g, "/"); 8 | // Then escape special glob characters: * ? [ ] { } ( ) ! @ + | 9 | return normalized.replace(/[*?[\]{}()!@+|]/g, (match) => "\\" + match); 10 | } 11 | 12 | export async function collectFiles( 13 | baseDir: string, 14 | options: { 15 | ig: any; 16 | extensionSet: Set | null; 17 | excludeFiles: string[] | null; 18 | includeFiles: string[] | null; 19 | excludeDirs: string[] | null; 20 | includeDirs: string[] | null; 21 | verbose: number; 22 | } 23 | ): Promise { 24 | const { 25 | ig, 26 | extensionSet, 27 | excludeFiles, 28 | includeFiles, 29 | excludeDirs, 30 | includeDirs, 31 | verbose, 32 | } = options; 33 | 34 | function logVerbose(message: string, level: number) { 35 | if (verbose >= level) { 36 | console.log(message); 37 | } 38 | } 39 | 40 | // Build glob patterns 41 | const patterns: string[] = []; 42 | 43 | // Handle include directories 44 | if (includeDirs?.length) { 45 | patterns.push(...includeDirs.map((dir) => `${escapeGlobPath(dir)}/**/*`)); 46 | } else { 47 | patterns.push("**/*"); 48 | } 49 | 50 | // Handle exclude directories 51 | const ignore = [ 52 | ...(excludeDirs?.map((dir) => `${escapeGlobPath(dir)}/**`) || []), 53 | ...(excludeFiles?.map((file) => file.replace(/\\/g, "/")) || []), 54 | ]; 55 | 56 | // Handle file extensions 57 | if (extensionSet) { 58 | const exts = [...extensionSet]; 59 | patterns.length = 0; // Clear patterns if we have specific extensions 60 | if (includeDirs?.length) { 61 | for (const dir of includeDirs) { 62 | for (const ext of exts) { 63 | patterns.push(`${escapeGlobPath(dir)}/**/*${ext}`); 64 | } 65 | } 66 | } else { 67 | for (const ext of exts) { 68 | patterns.push(`**/*${ext}`); 69 | } 70 | } 71 | } 72 | 73 | // Handle include files 74 | if (includeFiles?.length) { 75 | patterns.length = 0; // Clear patterns if we have specific files 76 | // Normalize path separators in include files for fast-glob 77 | patterns.push(...includeFiles.map((file) => file.replace(/\\/g, "/"))); 78 | } 79 | 80 | logVerbose(`Scanning with patterns: ${patterns.join(", ")}`, 2); 81 | logVerbose(`Ignoring: ${ignore.join(", ")}`, 2); 82 | 83 | const entries = await fg(patterns, { 84 | cwd: baseDir.replace(/\\/g, "/"), 85 | dot: true, 86 | absolute: true, 87 | ignore, 88 | onlyFiles: true, 89 | suppressErrors: true, 90 | followSymbolicLinks: true, 91 | caseSensitiveMatch: true, 92 | }); 93 | 94 | // Apply gitignore patterns 95 | return entries.filter((entry) => { 96 | const relativePath = path.relative(process.cwd(), entry); 97 | return !ig.ignores(relativePath); 98 | }); 99 | } 100 | -------------------------------------------------------------------------------- /packages/sdk/src/index.ts: -------------------------------------------------------------------------------- 1 | // Core exports 2 | export * from "./files"; 3 | export * from "./files-tree"; 4 | export * from "./markdown"; 5 | export * from "./token-counter"; 6 | export * from "./config"; 7 | export * from "./types"; 8 | export * from "./tree"; 9 | export * from "./template-parser"; 10 | export * from "./utils"; 11 | export * from "./utils/path"; 12 | export * from "./constants"; 13 | export * from "./model-db"; 14 | export * from "./default-ignore"; 15 | export * from "./fetch-result"; 16 | export * from "./fetch"; 17 | 18 | // Prompt templates 19 | export * from "./prompts/codegen"; 20 | export * from "./prompts/fix"; 21 | export * from "./prompts/improve"; 22 | export * from "./prompts/testgen"; 23 | 24 | // Web module 25 | export * from "./web/index"; 26 | -------------------------------------------------------------------------------- /packages/sdk/src/model-db.ts: -------------------------------------------------------------------------------- 1 | const MODELDB_URL = 2 | "https://raw.githubusercontent.com/regenrek/codefetch/main/modeldb.json"; 3 | 4 | export interface ModelInfo { 5 | max_tokens: number; 6 | max_input_tokens: number; 7 | max_output_tokens: number; 8 | litellm_provider: string; 9 | } 10 | 11 | export interface ModelDb { 12 | [key: string]: ModelInfo; 13 | } 14 | 15 | export async function getLocalModels(): Promise { 16 | return { 17 | o3: { 18 | max_tokens: 100_000, 19 | max_input_tokens: 200_000, 20 | max_output_tokens: 100_000, 21 | litellm_provider: "openai", 22 | }, 23 | "gemini-2.5-pro": { 24 | max_tokens: 65_535, 25 | max_input_tokens: 1_048_576, 26 | max_output_tokens: 65_535, 27 | litellm_provider: "gemini", 28 | }, 29 | "claude-sonnet-4": { 30 | max_tokens: 64_000, 31 | max_input_tokens: 200_000, 32 | max_output_tokens: 64_000, 33 | litellm_provider: "anthropic", 34 | }, 35 | "claude-opus-4": { 36 | max_tokens: 32_000, 37 | max_input_tokens: 200_000, 38 | max_output_tokens: 32_000, 39 | litellm_provider: "anthropic", 40 | }, 41 | }; 42 | } 43 | 44 | async function loadModelDb(): Promise { 45 | const response = await fetch(MODELDB_URL).catch(() => null); 46 | if (!response?.ok) return {}; 47 | 48 | const rawData = await response.json().catch(() => ({})); 49 | return rawData as ModelDb; 50 | } 51 | 52 | export async function fetchModels(trackedModels: string[]): Promise<{ 53 | modelDb: ModelDb; 54 | modelInfo: string; 55 | }> { 56 | const localModels = await getLocalModels(); 57 | const missingModels = trackedModels.filter((model) => !localModels[model]); 58 | 59 | if (missingModels.length === 0) { 60 | const modelInfo = formatModelInfo(trackedModels, localModels); 61 | return { modelDb: localModels, modelInfo }; 62 | } 63 | 64 | const remoteData = await loadModelDb(); 65 | const remoteModels: ModelDb = {}; 66 | 67 | for (const modelName of missingModels) { 68 | const model = remoteData[modelName]; 69 | if ( 70 | model?.max_tokens && 71 | model.max_input_tokens && 72 | model.max_output_tokens && 73 | model.litellm_provider 74 | ) { 75 | remoteModels[modelName] = { 76 | max_tokens: model.max_tokens, 77 | max_input_tokens: model.max_input_tokens, 78 | max_output_tokens: model.max_output_tokens, 79 | litellm_provider: model.litellm_provider, 80 | }; 81 | } 82 | } 83 | 84 | const modelDb = { ...remoteModels, ...localModels }; 85 | const modelInfo = formatModelInfo(trackedModels, modelDb); 86 | 87 | return { modelDb, modelInfo }; 88 | } 89 | 90 | export function formatModelInfo( 91 | trackedModels: string[], 92 | modelDb: ModelDb 93 | ): string { 94 | const rows = trackedModels.map((modelName) => { 95 | const model = modelDb[modelName] || {}; 96 | const tokens = model.max_input_tokens 97 | ? model.max_input_tokens.toLocaleString() 98 | : "Unknown"; 99 | return `│ ${modelName.padEnd(30)} │ ${tokens.padEnd(15)} │`; 100 | }); 101 | 102 | const header = "│ Model Name │ Max Tokens │"; 103 | const separator = "├────────────────────────────────┼────────────────┤"; 104 | const topBorder = "┌────────────────────────────────┬────────────────┐"; 105 | const bottomBorder = "└────────────────────────────────┴────────────────┘"; 106 | 107 | return [topBorder, header, separator, ...rows, bottomBorder].join("\n"); 108 | } 109 | -------------------------------------------------------------------------------- /packages/sdk/src/prompts/codegen.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior developer. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to write code according to my instructions for the current codebase. 4 | 5 | instructions: 6 | 7 | {{MESSAGE}} 8 | 9 | 10 | Rules: 11 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 12 | - Your output should be a series of specific, actionable changes. 13 | 14 | When approaching this task: 15 | 1. Carefully review the provided code. 16 | 2. Identify the area thats raising this issue or error and provide a fix. 17 | 3. Consider best practices for the specific programming language used. 18 | 19 | For each suggested change, provide: 20 | 1. A short description of the change (one line maximum). 21 | 2. The modified code block. 22 | 23 | Use the following format for your output: 24 | 25 | [Short Description] 26 | \`\`\`[language]:[path/to/file] 27 | [code block] 28 | \`\`\` 29 | 30 | Begin fixing the codebase provide your solutions. 31 | 32 | My current codebase: 33 | 34 | {{CURRENT_CODEBASE}} 35 | 36 | `; 37 | -------------------------------------------------------------------------------- /packages/sdk/src/prompts/fix.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior developer. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to review the current codebase and fix the current issues. 4 | 5 | Current Issue: 6 | 7 | {{MESSAGE}} 8 | 9 | 10 | Rules: 11 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 12 | - Your output should be a series of specific, actionable changes. 13 | 14 | When approaching this task: 15 | 1. Carefully review the provided code. 16 | 2. Identify the area thats raising this issue or error and provide a fix. 17 | 3. Consider best practices for the specific programming language used. 18 | 19 | For each suggested change, provide: 20 | 1. A short description of the change (one line maximum). 21 | 2. The modified code block. 22 | 23 | Use the following format for your output: 24 | 25 | [Short Description] 26 | \`\`\`[language]:[path/to/file] 27 | [code block] 28 | \`\`\` 29 | 30 | Begin fixing the codebase provide your solutions. 31 | 32 | My current codebase: 33 | 34 | {{CURRENT_CODEBASE}} 35 | 36 | `; 37 | -------------------------------------------------------------------------------- /packages/sdk/src/prompts/improve.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior software architect. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to review the current codebase and suggest improvements or optimizations. 4 | 5 | Rules: 6 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 7 | - Your output should be a series of specific, actionable changes. 8 | 9 | When approaching this task: 10 | 1. Carefully review the provided code. 11 | 2. Identify areas that could be improved in terms of efficiency, readability, or maintainability. 12 | 3. Consider best practices for the specific programming language used. 13 | 4. Think about potential optimizations that could enhance performance. 14 | 5. Look for opportunities to refactor or restructure the code for better organization. 15 | 16 | For each suggested change, provide: 17 | 1. A short description of the change (one line maximum). 18 | 2. The modified code block. 19 | 20 | Use the following format for your output: 21 | 22 | [Short Description] 23 | \`\`\`[language]:[path/to/file] 24 | [code block] 25 | \`\`\` 26 | 27 | Begin your analysis and provide your suggestions now. 28 | 29 | My current codebase: 30 | 31 | {{CURRENT_CODEBASE}} 32 | 33 | `; 34 | -------------------------------------------------------------------------------- /packages/sdk/src/prompts/index.ts: -------------------------------------------------------------------------------- 1 | export { default as codegenPrompt } from "./codegen.js"; 2 | export { default as fixPrompt } from "./fix.js"; 3 | export { default as improvePrompt } from "./improve.js"; 4 | export { default as testgenPrompt } from "./testgen.js"; 5 | 6 | // Import all prompts for the prompts object 7 | import codegenPrompt from "./codegen.js"; 8 | import fixPrompt from "./fix.js"; 9 | import improvePrompt from "./improve.js"; 10 | import testgenPrompt from "./testgen.js"; 11 | 12 | // Export as a single object for convenience 13 | export const prompts = { 14 | codegen: codegenPrompt, 15 | fix: fixPrompt, 16 | improve: improvePrompt, 17 | testgen: testgenPrompt, 18 | }; 19 | 20 | // Default export for convenient importing 21 | export default prompts; 22 | -------------------------------------------------------------------------------- /packages/sdk/src/prompts/testgen.ts: -------------------------------------------------------------------------------- 1 | export default `You are a senior test developer. You produce optimized, maintainable code that follows best practices. 2 | 3 | Your task is to review the current codebase and create and improve missing tests for the codebase. 4 | 5 | Additional instructions: 6 | 7 | {{MESSAGE}} 8 | 9 | 10 | Rules: 11 | - Keep your suggestions concise and focused. Avoid unnecessary explanations or fluff. 12 | - Your output should be a series of specific, actionable changes. 13 | 14 | When approaching this task: 15 | 1. Carefully review the provided code. 16 | 2. Identify the area thats raising this issue or error and provide a fix. 17 | 3. Consider best practices for the specific programming language used. 18 | 19 | For each suggested change, provide: 20 | 1. A short description of the change (one line maximum). 21 | 2. The modified code block. 22 | 23 | Use the following format for your output: 24 | 25 | [Short Description] 26 | \`\`\`[language]:[path/to/file] 27 | [code block] 28 | \`\`\` 29 | 30 | Begin fixing the codebase provide your solutions. 31 | 32 | My current codebase: 33 | 34 | {{CURRENT_CODEBASE}} 35 | 36 | `; 37 | -------------------------------------------------------------------------------- /packages/sdk/src/template-parser.ts: -------------------------------------------------------------------------------- 1 | import { resolve } from "pathe"; 2 | import { existsSync } from "node:fs"; 3 | import { readFile } from "node:fs/promises"; 4 | import { VALID_PROMPTS } from "./constants"; 5 | 6 | // Update the PromptModule type to match the actual structure 7 | type PromptModule = { 8 | default: string; 9 | }; 10 | 11 | const builtInPrompts: Record Promise> = { 12 | fix: () => import("./prompts/fix"), 13 | improve: () => import("./prompts/improve"), 14 | codegen: () => import("./prompts/codegen"), 15 | testgen: () => import("./prompts/testgen"), 16 | }; 17 | 18 | export async function processPromptTemplate( 19 | template: string, 20 | codebase: string, 21 | vars: Record 22 | ): Promise { 23 | let result = template; 24 | 25 | // Process all other variables 26 | for (const [key, value] of Object.entries(vars)) { 27 | result = result.replace(new RegExp(`{{${key}}}`, "g"), value); 28 | } 29 | 30 | // Always process CURRENT_CODEBASE first 31 | result = result.replace(/{{CURRENT_CODEBASE}}/g, codebase); 32 | 33 | console.log("result", result); 34 | 35 | return result; 36 | } 37 | 38 | export async function resolvePrompt( 39 | promptFile: string 40 | ): Promise { 41 | console.log("promptFile", promptFile); 42 | 43 | // Check built-in prompts 44 | if (VALID_PROMPTS.has(promptFile)) { 45 | try { 46 | const mod = await builtInPrompts[promptFile]?.(); 47 | return mod?.default; // Now just return the string 48 | } catch { 49 | console.error(`Built-in prompt "${promptFile}" not found`); 50 | return ""; 51 | } 52 | } 53 | 54 | if (promptFile.endsWith(".md") || promptFile.endsWith(".txt")) { 55 | const defaultPath = resolve(promptFile); 56 | if (!existsSync(defaultPath)) { 57 | return ""; 58 | } 59 | return await readFile(defaultPath, "utf8"); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /packages/sdk/src/token-counter.ts: -------------------------------------------------------------------------------- 1 | import { Tiktoken, type TiktokenBPE } from "js-tiktoken/lite"; 2 | import type { TokenEncoder } from "./types"; 3 | 4 | const tokenizerCache = new Map(); 5 | 6 | const getTokenizer = async (encoder: TokenEncoder): Promise => { 7 | if (tokenizerCache.has(encoder)) { 8 | return tokenizerCache.get(encoder)!; 9 | } 10 | 11 | // A simplified map of encoders to their JSON definition files 12 | const encoderFiles: Record = { 13 | p50k: "p50k_base.json", 14 | o200k: "o200k_base.json", 15 | cl100k: "cl100k_base.json", 16 | }; 17 | 18 | const fileName = encoderFiles[encoder]; 19 | if (!fileName) { 20 | throw new Error(`Unsupported token encoder: ${encoder}`); 21 | } 22 | 23 | const response = await fetch(`https://tiktoken.pages.dev/js/${fileName}`); 24 | if (!response.ok) { 25 | throw new Error( 26 | `Failed to fetch tokenizer file for ${encoder}: ${response.statusText}` 27 | ); 28 | } 29 | 30 | const rank = (await response.json()) as TiktokenBPE; 31 | const tokenizer = new Tiktoken(rank); 32 | tokenizerCache.set(encoder, tokenizer); 33 | return tokenizer; 34 | }; 35 | 36 | const estimateTokens = (text: string): number => { 37 | return text.split(/[\s\p{P}]+/u).filter(Boolean).length; 38 | }; 39 | 40 | const getTokenCount = async ( 41 | text: string, 42 | encoder: TokenEncoder 43 | ): Promise => { 44 | if (!text) return 0; 45 | 46 | if (encoder === "simple") { 47 | return estimateTokens(text); 48 | } 49 | 50 | const tiktoken = await getTokenizer(encoder); 51 | return tiktoken.encode(text).length; 52 | }; 53 | 54 | export const countTokens = async ( 55 | text: string, 56 | encoder: TokenEncoder 57 | ): Promise => { 58 | if (!encoder || !text) return 0; 59 | return getTokenCount(text, encoder); 60 | }; 61 | 62 | export const SUPPORTED_MODELS = { 63 | simple: ["*"], 64 | p50k: ["text-davinci-003", "text-davinci-002", "code-davinci-002"], 65 | o200k: [ 66 | "gpt-4o-2024-11-20", 67 | "gpt-4o-2024-08-06", 68 | "gpt-4o-2024-05-13", 69 | "gpt-4o-mini-2024-07-18", 70 | ], 71 | cl100k: ["gpt-4", "gpt-3.5-turbo", "gpt-35-turbo"], 72 | } as const; 73 | -------------------------------------------------------------------------------- /packages/sdk/src/tree.ts: -------------------------------------------------------------------------------- 1 | import fs from "node:fs"; 2 | import { join, basename } from "pathe"; 3 | 4 | function generateTree( 5 | dir: string, 6 | level: number, 7 | prefix = "", 8 | isLast = true, 9 | maxLevel = 2, 10 | currentLevel = 0 11 | ): string { 12 | if (currentLevel >= maxLevel) return ""; 13 | 14 | let tree = 15 | currentLevel === 0 16 | ? "" 17 | : `${prefix}${isLast ? "└── " : "├── "}${basename(dir)}\n`; 18 | 19 | const files = fs.readdirSync(dir); 20 | const filteredFiles = files.filter( 21 | (file) => !file.startsWith(".") && file !== "node_modules" 22 | ); 23 | 24 | for (const [index, file] of filteredFiles.entries()) { 25 | const filePath = join(dir, file); 26 | const isDirectory = fs.statSync(filePath).isDirectory(); 27 | const newPrefix = 28 | currentLevel === 0 ? "" : prefix + (isLast ? " " : "│ "); 29 | const isLastItem = index === filteredFiles.length - 1; 30 | 31 | if (isDirectory) { 32 | tree += generateTree( 33 | filePath, 34 | level + 1, 35 | newPrefix, 36 | isLastItem, 37 | maxLevel, 38 | currentLevel + 1 39 | ); 40 | } else if (currentLevel < maxLevel) { 41 | tree += `${newPrefix}${isLastItem ? "└── " : "├── "}${file}\n`; 42 | } 43 | } 44 | 45 | return tree; 46 | } 47 | 48 | export function generateProjectTree(baseDir: string, maxLevel = 2): string { 49 | return ( 50 | "Project Structure:\n" + generateTree(baseDir, 1, "", true, maxLevel, 0) 51 | ); 52 | } 53 | -------------------------------------------------------------------------------- /packages/sdk/src/type-guards.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Type guards and branded types for improved type safety 3 | */ 4 | 5 | // Branded types for additional type safety 6 | export type GitHubToken = string & { __brand: "GitHubToken" }; 7 | export type RepoPath = `${string}/${string}` & { __brand: "RepoPath" }; 8 | export type GitHubUrl = `https://github.com/${RepoPath}` & { 9 | __brand: "GitHubUrl"; 10 | }; 11 | export type SemVer = `${number}.${number}.${number}` & { __brand: "SemVer" }; 12 | 13 | /** 14 | * Type guard to check if a string is a valid GitHub URL 15 | */ 16 | export function isValidGitHubUrl(url: string): url is GitHubUrl { 17 | const pattern = /^https:\/\/github\.com\/[\w-]+\/[\w.-]+(?:\/.*)?$/; 18 | return pattern.test(url); 19 | } 20 | 21 | /** 22 | * Type guard to check if a string is a valid repository path 23 | */ 24 | export function isValidRepoPath(path: string): path is RepoPath { 25 | const pattern = /^[\w-]+\/[\w.-]+$/; 26 | return pattern.test(path); 27 | } 28 | 29 | /** 30 | * Type guard to check if a token is a valid GitHub token format 31 | */ 32 | export function isValidGitHubToken(token: string): token is GitHubToken { 33 | // GitHub tokens can be: 34 | // - Classic: 40 character hex 35 | // - Fine-grained: ghp_ prefix 36 | // - Installation: ghs_ prefix 37 | // - OAuth: gho_ prefix 38 | const patterns = [ 39 | /^[a-f0-9]{40}$/, // Classic token 40 | /^ghp_[a-zA-Z0-9]{36}$/, // Fine-grained personal access token 41 | /^ghs_[a-zA-Z0-9]{36}$/, // GitHub App installation access token 42 | /^gho_[a-zA-Z0-9]{36}$/, // OAuth access token 43 | ]; 44 | 45 | return patterns.some((pattern) => pattern.test(token)); 46 | } 47 | 48 | /** 49 | * Type guard to check if a string is a valid semantic version 50 | */ 51 | export function isValidSemVer(version: string): version is SemVer { 52 | const pattern = /^\d+\.\d+\.\d+$/; 53 | return pattern.test(version); 54 | } 55 | 56 | /** 57 | * Create a branded GitHub token (with validation) 58 | */ 59 | export function createGitHubToken(token: string): GitHubToken { 60 | if (!isValidGitHubToken(token)) { 61 | throw new Error(`Invalid GitHub token format`); 62 | } 63 | return token as GitHubToken; 64 | } 65 | 66 | /** 67 | * Create a branded repo path (with validation) 68 | */ 69 | export function createRepoPath(owner: string, repo: string): RepoPath { 70 | const path = `${owner}/${repo}`; 71 | if (!isValidRepoPath(path)) { 72 | throw new Error(`Invalid repository path: ${path}`); 73 | } 74 | return path as RepoPath; 75 | } 76 | 77 | /** 78 | * Create a branded GitHub URL (with validation) 79 | */ 80 | export function createGitHubUrl(owner: string, repo: string): GitHubUrl { 81 | const url = `https://github.com/${owner}/${repo}`; 82 | if (!isValidGitHubUrl(url)) { 83 | throw new Error(`Invalid GitHub URL: ${url}`); 84 | } 85 | return url as GitHubUrl; 86 | } 87 | 88 | /** 89 | * Type guard for non-nullable values 90 | */ 91 | export function isNotNull(value: T | null | undefined): value is T { 92 | return value !== null && value !== undefined; 93 | } 94 | 95 | /** 96 | * Type guard for arrays 97 | */ 98 | export function isArray(value: unknown): value is T[] { 99 | return Array.isArray(value); 100 | } 101 | 102 | /** 103 | * Type guard for objects 104 | */ 105 | export function isObject(value: unknown): value is Record { 106 | return typeof value === "object" && value !== null && !Array.isArray(value); 107 | } 108 | 109 | /** 110 | * Type guard for strings 111 | */ 112 | export function isString(value: unknown): value is string { 113 | return typeof value === "string"; 114 | } 115 | 116 | /** 117 | * Type guard for numbers 118 | */ 119 | export function isNumber(value: unknown): value is number { 120 | return typeof value === "number" && !Number.isNaN(value); 121 | } 122 | 123 | /** 124 | * Assert that a value is defined (throws if not) 125 | */ 126 | export function assertDefined( 127 | value: T | null | undefined, 128 | message?: string 129 | ): asserts value is T { 130 | if (value === null || value === undefined) { 131 | throw new Error(message || "Value is null or undefined"); 132 | } 133 | } 134 | 135 | /** 136 | * Assert that a condition is true (throws if not) 137 | */ 138 | export function assert( 139 | condition: unknown, 140 | message?: string 141 | ): asserts condition { 142 | if (!condition) { 143 | throw new Error(message || "Assertion failed"); 144 | } 145 | } 146 | 147 | /** 148 | * Exhaustive check for switch statements 149 | */ 150 | export function exhaustiveCheck(value: never): never { 151 | throw new Error(`Unhandled case: ${value}`); 152 | } 153 | -------------------------------------------------------------------------------- /packages/sdk/src/types.ts: -------------------------------------------------------------------------------- 1 | export type TokenEncoder = "simple" | "p50k" | "o200k" | "cl100k"; 2 | export type TokenLimiter = "sequential" | "truncated"; 3 | 4 | export interface FileNode { 5 | name: string; 6 | path: string; 7 | type: "file" | "directory"; 8 | content?: string; // Only for files 9 | language?: string; // Detected language 10 | size?: number; // File size in bytes 11 | tokens?: number; // Token count 12 | lastModified?: Date; 13 | children?: FileNode[]; // Only for directories 14 | } 15 | 16 | export interface PerformanceMetrics { 17 | fetchDuration: number; 18 | parseFiles: number; 19 | tokenCountDuration: number; 20 | totalDuration: number; 21 | memoryUsed?: number; 22 | } 23 | 24 | export interface FetchMetadata { 25 | totalFiles: number; 26 | totalSize: number; 27 | totalTokens: number; 28 | fetchedAt: Date; 29 | source: string; // URL or local path 30 | gitProvider?: string; 31 | gitOwner?: string; 32 | gitRepo?: string; 33 | gitRef?: string; 34 | metrics?: PerformanceMetrics; 35 | } 36 | 37 | export interface FetchResult { 38 | root: FileNode; 39 | metadata: FetchMetadata; 40 | } 41 | 42 | export type OutputFormat = "markdown" | "json"; 43 | -------------------------------------------------------------------------------- /packages/sdk/src/utils-browser.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Browser-safe utility functions 3 | * These functions do not depend on any Node.js APIs 4 | */ 5 | 6 | export const detectLanguage = (fileName: string): string => { 7 | const ext = fileName.split(".").pop()?.toLowerCase(); 8 | 9 | const languageMap: Record = { 10 | // JavaScript/TypeScript 11 | js: "javascript", 12 | jsx: "javascript", 13 | ts: "typescript", 14 | tsx: "typescript", 15 | mjs: "javascript", 16 | cjs: "javascript", 17 | mts: "typescript", 18 | cts: "typescript", 19 | 20 | // Web 21 | html: "html", 22 | htm: "html", 23 | css: "css", 24 | scss: "scss", 25 | sass: "sass", 26 | less: "less", 27 | 28 | // Config 29 | json: "json", 30 | yaml: "yaml", 31 | yml: "yaml", 32 | toml: "toml", 33 | xml: "xml", 34 | ini: "ini", 35 | conf: "conf", 36 | 37 | // Programming languages 38 | py: "python", 39 | java: "java", 40 | c: "c", 41 | cpp: "cpp", 42 | cs: "csharp", 43 | go: "go", 44 | rs: "rust", 45 | php: "php", 46 | rb: "ruby", 47 | swift: "swift", 48 | kt: "kotlin", 49 | scala: "scala", 50 | r: "r", 51 | lua: "lua", 52 | dart: "dart", 53 | 54 | // Shell 55 | sh: "bash", 56 | bash: "bash", 57 | zsh: "bash", 58 | fish: "fish", 59 | ps1: "powershell", 60 | 61 | // Documentation 62 | md: "markdown", 63 | mdx: "markdown", 64 | rst: "restructuredtext", 65 | tex: "latex", 66 | 67 | // Other 68 | sql: "sql", 69 | dockerfile: "dockerfile", 70 | makefile: "makefile", 71 | cmake: "cmake", 72 | gradle: "gradle", 73 | vim: "vim", 74 | vue: "vue", 75 | svelte: "svelte", 76 | }; 77 | 78 | // Special cases for files without extensions 79 | const fileNameLower = fileName.toLowerCase(); 80 | if (fileNameLower === "dockerfile") return "dockerfile"; 81 | if (fileNameLower === "makefile") return "makefile"; 82 | if (fileNameLower === "cmakelists.txt") return "cmake"; 83 | 84 | return languageMap[ext || ""] || "text"; 85 | }; 86 | -------------------------------------------------------------------------------- /packages/sdk/src/utils.ts: -------------------------------------------------------------------------------- 1 | import { existsSync } from "node:fs"; 2 | import { parse, join, dirname } from "pathe"; 3 | 4 | export const findProjectRoot = (startDir: string): string => { 5 | let currentDir = startDir; 6 | while (currentDir !== parse(currentDir).root) { 7 | if (existsSync(join(currentDir, "package.json"))) { 8 | return currentDir; 9 | } 10 | currentDir = dirname(currentDir); 11 | } 12 | return startDir; 13 | }; 14 | 15 | export const detectLanguage = (fileName: string): string => { 16 | const ext = fileName.split(".").pop()?.toLowerCase(); 17 | 18 | const languageMap: Record = { 19 | // JavaScript/TypeScript 20 | js: "javascript", 21 | jsx: "javascript", 22 | ts: "typescript", 23 | tsx: "typescript", 24 | mjs: "javascript", 25 | cjs: "javascript", 26 | mts: "typescript", 27 | cts: "typescript", 28 | 29 | // Web 30 | html: "html", 31 | htm: "html", 32 | css: "css", 33 | scss: "scss", 34 | sass: "sass", 35 | less: "less", 36 | 37 | // Config 38 | json: "json", 39 | yaml: "yaml", 40 | yml: "yaml", 41 | toml: "toml", 42 | xml: "xml", 43 | ini: "ini", 44 | conf: "conf", 45 | 46 | // Programming languages 47 | py: "python", 48 | java: "java", 49 | c: "c", 50 | cpp: "cpp", 51 | cs: "csharp", 52 | go: "go", 53 | rs: "rust", 54 | php: "php", 55 | rb: "ruby", 56 | swift: "swift", 57 | kt: "kotlin", 58 | scala: "scala", 59 | r: "r", 60 | lua: "lua", 61 | dart: "dart", 62 | 63 | // Shell 64 | sh: "bash", 65 | bash: "bash", 66 | zsh: "bash", 67 | fish: "fish", 68 | ps1: "powershell", 69 | 70 | // Documentation 71 | md: "markdown", 72 | mdx: "markdown", 73 | rst: "restructuredtext", 74 | tex: "latex", 75 | 76 | // Other 77 | sql: "sql", 78 | dockerfile: "dockerfile", 79 | makefile: "makefile", 80 | cmake: "cmake", 81 | gradle: "gradle", 82 | vim: "vim", 83 | vue: "vue", 84 | svelte: "svelte", 85 | }; 86 | 87 | // Special cases for files without extensions 88 | const fileNameLower = fileName.toLowerCase(); 89 | if (fileNameLower === "dockerfile") return "dockerfile"; 90 | if (fileNameLower === "makefile") return "makefile"; 91 | if (fileNameLower === "cmakelists.txt") return "cmake"; 92 | 93 | return languageMap[ext || ""] || "text"; 94 | }; 95 | 96 | /** 97 | * Create a simple hash from a string 98 | * Uses a basic hashing algorithm suitable for cache keys 99 | */ 100 | export function createHash(str: string): string { 101 | let hash = 0; 102 | for (let i = 0; i < str.length; i++) { 103 | const char = str.codePointAt(i) || 0; 104 | hash = (hash << 5) - hash + char; 105 | hash = hash & hash; // Convert to 32-bit integer 106 | } 107 | 108 | // Convert to hexadecimal string 109 | return Math.abs(hash).toString(16); 110 | } 111 | -------------------------------------------------------------------------------- /packages/sdk/src/utils/path.ts: -------------------------------------------------------------------------------- 1 | import { sep } from "node:path"; 2 | 3 | /** 4 | * Normalize path separators to forward slashes for cross-platform compatibility 5 | * This is especially useful in tests where we need consistent path representations 6 | */ 7 | export function normalizePathSeparators(path: string): string { 8 | return path.replace(/\\/g, "/"); 9 | } 10 | 11 | /** 12 | * Convert path to use platform-specific separators 13 | */ 14 | export function toPlatformPath(path: string): string { 15 | return path.replace(/[/\\]/g, sep); 16 | } 17 | -------------------------------------------------------------------------------- /packages/sdk/src/web/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./url-handler.js"; 2 | export * from "./cache.js"; 3 | export * from "./types.js"; 4 | export { handleWebFetch } from "./web-fetch.js"; 5 | export { fetchFromWeb } from "./sdk-web-fetch.js"; 6 | export * from "./github-api.js"; 7 | -------------------------------------------------------------------------------- /packages/sdk/src/web/types.ts: -------------------------------------------------------------------------------- 1 | export interface WebFetchConfig { 2 | url: string; 3 | cacheTTL?: number; 4 | branch?: string; 5 | noCache?: boolean; 6 | noApi?: boolean; 7 | githubToken?: string; 8 | } 9 | 10 | export interface GitCloneOptions { 11 | branch?: string; 12 | depth?: number; 13 | singleBranch?: boolean; 14 | } 15 | 16 | export interface CrawlOptions { 17 | maxDepth?: number; 18 | maxPages?: number; 19 | ignoreRobots?: boolean; 20 | ignoreCors?: boolean; 21 | followRedirects?: boolean; 22 | userAgent?: string; 23 | } 24 | 25 | export interface CrawlResult { 26 | url: string; 27 | content: string; 28 | links: string[]; 29 | depth: number; 30 | error?: string; 31 | } 32 | -------------------------------------------------------------------------------- /packages/sdk/src/web/web-fetch.ts: -------------------------------------------------------------------------------- 1 | import { join } from "node:path"; 2 | import type { ConsolaInstance } from "consola"; 3 | import { countTokens } from "../index.js"; 4 | import { parseURL, validateURL } from "./url-handler.js"; 5 | import { fetchFromWeb } from "./sdk-web-fetch.js"; 6 | import { WebCache } from "./cache.js"; 7 | 8 | export async function handleWebFetch( 9 | args: any, 10 | logger: ConsolaInstance, 11 | config?: any 12 | ): Promise { 13 | // Set a safety timeout for the entire operation 14 | const safetyTimeout = setTimeout( 15 | () => { 16 | logger.error("Operation timed out after 10 minutes"); 17 | process.exit(1); 18 | }, 19 | 10 * 60 * 1000 20 | ); // 10 minutes 21 | 22 | // Validate URL 23 | const validation = validateURL(args.url); 24 | if (!validation.valid) { 25 | logger.error(`Invalid URL: ${validation.error}`); 26 | process.exit(1); 27 | } 28 | 29 | // Parse URL for output filename 30 | const parsedUrl = parseURL(args.url); 31 | if (!parsedUrl) { 32 | logger.error("Failed to parse URL"); 33 | process.exit(1); 34 | } 35 | 36 | // Use provided config or args as fallback 37 | if (!config) { 38 | config = args; 39 | } 40 | 41 | // Prepare options for the SDK 42 | const fetchOptions = { 43 | format: config.format, 44 | verbose: config.verbose, 45 | tokenEncoder: config.tokenEncoder, 46 | maxTokens: config.maxTokens, 47 | extensions: config.extensions, 48 | excludeFiles: config.excludeFiles, 49 | includeFiles: config.includeFiles, 50 | excludeDirs: config.excludeDirs, 51 | includeDirs: config.includeDirs, 52 | projectTree: config.projectTree, 53 | disableLineNumbers: config.disableLineNumbers, 54 | tokenLimiter: config.tokenLimiter, 55 | templateVars: config.templateVars, 56 | // Web-specific options 57 | cacheTTL: args.cacheTTL, 58 | maxDepth: args.maxDepth, 59 | maxPages: args.maxPages, 60 | branch: args.branch, 61 | noCache: args.noCache, 62 | ignoreRobots: args.ignoreRobots, 63 | ignoreCors: args.ignoreCors, 64 | noApi: args.noApi, 65 | githubToken: args.githubToken, 66 | }; 67 | 68 | // Use the SDK's fetchFromWeb function 69 | const output = await fetchFromWeb(args.url, fetchOptions); 70 | 71 | // Calculate total tokens 72 | const totalTokens = 73 | typeof output === "string" 74 | ? await countTokens(output, config.tokenEncoder || "cl100k") 75 | : output.metadata.totalTokens; 76 | 77 | const originalCwd = process.cwd(); 78 | 79 | // Token count is already calculated above 80 | if (config.tokenCountOnly) { 81 | console.log(totalTokens); 82 | return; 83 | } 84 | 85 | // Output results 86 | if (args.dryRun) { 87 | if (typeof output === "string") { 88 | logger.log(output); 89 | } else { 90 | // For JSON format in dry-run, output the JSON 91 | console.log(JSON.stringify(output, null, 2)); 92 | } 93 | } else { 94 | if (typeof output === "string") { 95 | // Write markdown 96 | const outputFileName = 97 | args.outputFile || 98 | `${parsedUrl.domain.replace(/\./g, "-")}-analysis.md`; 99 | const outputPath = join(originalCwd, outputFileName); 100 | await import("node:fs").then((fs) => 101 | fs.promises.writeFile(outputPath, output) 102 | ); 103 | logger.success(`Output written to ${outputPath}`); 104 | } else { 105 | // Write JSON 106 | const outputFileName = 107 | args.outputFile || 108 | `${parsedUrl.domain.replace(/\./g, "-")}-analysis.json`; 109 | const outputPath = join(originalCwd, outputFileName); 110 | await import("node:fs").then((fs) => 111 | fs.promises.writeFile(outputPath, JSON.stringify(output, null, 2)) 112 | ); 113 | logger.success(`Output written to ${outputPath}`); 114 | } 115 | logger.info(`Total tokens: ${totalTokens.toLocaleString()}`); 116 | } 117 | 118 | // Show cache stats 119 | if (config.verbose >= 2) { 120 | const cache = new WebCache({ 121 | ttlHours: args.cacheTTL || 1, 122 | }); 123 | await cache.init(); 124 | const stats = await cache.getStats(); 125 | logger.info( 126 | `Cache stats: ${stats.entryCount} entries, ${stats.sizeMB.toFixed(2)}MB` 127 | ); 128 | } 129 | 130 | // Clear the safety timeout 131 | clearTimeout(safetyTimeout); 132 | } 133 | -------------------------------------------------------------------------------- /packages/sdk/src/worker.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Cloudflare Worker-compatible entry point for codefetch-sdk 3 | * 4 | * This module re-exports only the APIs that work within Worker constraints: 5 | * - No local file system access 6 | * - No child_process (git clone) 7 | * - Limited to ephemeral TmpFS storage (10MB) 8 | */ 9 | 10 | // Core utilities that are Worker-safe 11 | export { 12 | getDefaultConfig, 13 | resolveCodefetchConfig, 14 | mergeWithCliArgs, 15 | type CodefetchConfig, 16 | } from "./config-worker.js"; 17 | export { countTokens } from "./token-counter.js"; 18 | export { VALID_PROMPTS, VALID_ENCODERS, VALID_LIMITERS } from "./constants.js"; 19 | 20 | // Markdown utilities (Worker-safe version that uses in-memory content) 21 | export { 22 | generateMarkdownFromContent, 23 | type FileContent, 24 | type MarkdownFromContentOptions, 25 | } from "./markdown-content.js"; 26 | 27 | // Worker-safe web fetch functionality 28 | export { fetchFromWebWorker as fetchFromWeb } from "./web/sdk-web-fetch-worker.js"; 29 | export type { WebFetchConfig, CrawlOptions, CrawlResult } from "./web/types.js"; 30 | 31 | // Re-export FetchOptions from fetch.ts 32 | export type { FetchOptions } from "./fetch.js"; 33 | 34 | // Types (Worker-safe) - Export all types that were missing 35 | export type { 36 | FileNode, 37 | FetchResult, 38 | FetchMetadata, 39 | PerformanceMetrics, 40 | TokenEncoder, 41 | TokenLimiter, 42 | OutputFormat, 43 | } from "./types.js"; 44 | export { FetchResultImpl } from "./fetch-result.js"; 45 | 46 | // Prompt templates (Worker-safe) 47 | export * from "./prompts/index.js"; 48 | 49 | // Tree utilities (Worker-safe) 50 | export { 51 | filesToTree, 52 | treeToFiles, 53 | findNodeByPath, 54 | walkTree, 55 | calculateTreeMetrics, 56 | sortTree, 57 | filterTree, 58 | } from "./tree-utils.js"; 59 | 60 | // Environment detection 61 | export { isCloudflareWorker, getCacheSizeLimit } from "./env.js"; 62 | 63 | // Utility functions (Worker-safe) 64 | export { detectLanguage } from "./utils-browser.js"; 65 | 66 | // Error classes (Worker-safe) 67 | export { 68 | CodefetchError, 69 | GitHubError, 70 | TokenLimitError, 71 | ParseError, 72 | NetworkError, 73 | ConfigError, 74 | CacheError, 75 | URLValidationError, 76 | isCodefetchError, 77 | isGitHubError, 78 | isTokenLimitError, 79 | wrapError, 80 | } from "./errors.js"; 81 | 82 | // Streaming APIs (Worker-safe) 83 | export { 84 | streamGitHubFiles, 85 | createMarkdownStream, 86 | createTransformStream, 87 | collectStream, 88 | filterStream, 89 | mapStream, 90 | type StreamOptions, 91 | } from "./streaming.js"; 92 | 93 | // HTML to Markdown conversion (Worker-safe) 94 | export { htmlToMarkdown } from "./web/html-to-markdown.js"; 95 | 96 | // GitHub tarball extraction (uses native DecompressionStream) 97 | export { fetchGitHubTarball } from "./web/github-tarball.js"; 98 | 99 | // Enhanced cache integration (Worker-safe) 100 | export { 101 | fetchFromWebCached, 102 | deleteFromCache, 103 | clearCache, 104 | createCacheStorage, 105 | withCache, 106 | type CacheOptions, 107 | type CacheStorage, 108 | } from "./cache-enhanced.js"; 109 | 110 | // Type guards and branded types (Worker-safe) 111 | export { 112 | isValidGitHubUrl, 113 | isValidRepoPath, 114 | isValidGitHubToken, 115 | isValidSemVer, 116 | createGitHubToken, 117 | createRepoPath, 118 | createGitHubUrl, 119 | isNotNull, 120 | isArray, 121 | isObject, 122 | isString, 123 | isNumber, 124 | assertDefined, 125 | assert, 126 | exhaustiveCheck, 127 | type GitHubToken, 128 | type RepoPath, 129 | type GitHubUrl, 130 | type SemVer, 131 | } from "./type-guards.js"; 132 | 133 | // Note: The following are NOT exported as they require Node.js APIs: 134 | // - collectFiles (requires fs) 135 | // - generateMarkdown (requires fs for file reading) 136 | // - generateProjectTree (requires fs) 137 | // - collectFilesAsTree (requires fs) 138 | // - File operations from local filesystem 139 | // - Git clone operations 140 | -------------------------------------------------------------------------------- /packages/sdk/test/browser-exports.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect } from "vitest"; 2 | import * as browserExports from "../src/browser"; 3 | 4 | describe("Browser Exports", () => { 5 | test("should export all required browser-safe items", () => { 6 | // Classes and functions 7 | expect(browserExports.FetchResultImpl).toBeDefined(); 8 | expect(browserExports.countTokens).toBeDefined(); 9 | expect(browserExports.generateMarkdownFromContent).toBeDefined(); 10 | expect(browserExports.detectLanguage).toBeDefined(); 11 | 12 | // Constants 13 | expect(browserExports.SUPPORTED_MODELS).toBeDefined(); 14 | expect(browserExports.VALID_PROMPTS).toBeDefined(); 15 | expect(browserExports.VALID_ENCODERS).toBeDefined(); 16 | expect(browserExports.VALID_LIMITERS).toBeDefined(); 17 | 18 | // Prompts 19 | expect(browserExports.prompts).toBeDefined(); 20 | expect(browserExports.prompts.codegen).toBeDefined(); 21 | expect(browserExports.prompts.fix).toBeDefined(); 22 | expect(browserExports.prompts.improve).toBeDefined(); 23 | expect(browserExports.prompts.testgen).toBeDefined(); 24 | expect(browserExports.codegenPrompt).toBeDefined(); 25 | expect(browserExports.fixPrompt).toBeDefined(); 26 | expect(browserExports.improvePrompt).toBeDefined(); 27 | expect(browserExports.testgenPrompt).toBeDefined(); 28 | }); 29 | 30 | test("should not export Node.js dependent functions", () => { 31 | // These should NOT be exported in browser build 32 | expect((browserExports as any).generateMarkdown).toBeUndefined(); 33 | expect((browserExports as any).generateProjectTree).toBeUndefined(); 34 | expect((browserExports as any).collectFilesAsTree).toBeUndefined(); 35 | expect((browserExports as any).findProjectRoot).toBeUndefined(); 36 | expect((browserExports as any).fetchFromWeb).toBeUndefined(); 37 | expect((browserExports as any).GitHubApiClient).toBeUndefined(); 38 | }); 39 | 40 | test("FetchResultImpl should work with browser-safe data", () => { 41 | const fileNode = { 42 | name: "test.js", 43 | path: "test.js", 44 | type: "file" as const, 45 | content: "console.log('hello');", 46 | }; 47 | 48 | const metadata = { 49 | fetchedAt: new Date(), 50 | source: "https://example.com", 51 | totalFiles: 1, 52 | totalSize: 21, 53 | totalTokens: 5, 54 | }; 55 | 56 | const result = new browserExports.FetchResultImpl(fileNode, metadata); 57 | 58 | expect(result.root).toEqual(fileNode); 59 | expect(result.metadata).toEqual(metadata); 60 | expect(result.getAllFiles()).toHaveLength(1); 61 | expect(result.toMarkdown()).toContain("console.log('hello');"); 62 | }); 63 | 64 | test("countTokens should work in browser context", async () => { 65 | const text = "Hello, world!"; 66 | const tokenCount = await browserExports.countTokens(text, "simple"); 67 | expect(tokenCount).toBeGreaterThan(0); 68 | }); 69 | 70 | test("detectLanguage should work correctly", () => { 71 | expect(browserExports.detectLanguage("test.js")).toBe("javascript"); 72 | expect(browserExports.detectLanguage("style.css")).toBe("css"); 73 | expect(browserExports.detectLanguage("README.md")).toBe("markdown"); 74 | expect(browserExports.detectLanguage("unknown.xyz")).toBe("text"); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /packages/sdk/test/cache-enhanced.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, vi } from "vitest"; 2 | import { createCacheStorage, withCache } from "../src/cache-enhanced.js"; 3 | 4 | describe("Enhanced Cache Smoke Tests", () => { 5 | it("should create cache storage from Cache API", () => { 6 | const mockCache = { 7 | match: vi.fn(), 8 | put: vi.fn(), 9 | delete: vi.fn(), 10 | }; 11 | 12 | const storage = createCacheStorage(mockCache as any); 13 | 14 | expect(storage).toBeDefined(); 15 | expect(storage.type).toBe("cache-api"); 16 | expect(storage.instance).toBe(mockCache); 17 | }); 18 | 19 | it("should create cache storage from KV namespace", () => { 20 | const mockKV = { 21 | get: vi.fn(), 22 | put: vi.fn(), 23 | delete: vi.fn(), 24 | list: vi.fn(), 25 | getWithMetadata: vi.fn(), 26 | }; 27 | 28 | const storage = createCacheStorage(mockKV as any); 29 | 30 | expect(storage).toBeDefined(); 31 | expect(storage.type).toBe("kv"); 32 | expect(storage.instance).toBe(mockKV); 33 | }); 34 | 35 | it("should create cached function with decorator", async () => { 36 | let callCount = 0; 37 | const expensiveFunction = async (input: string) => { 38 | callCount++; 39 | return `Result: ${input}`; 40 | }; 41 | 42 | const cachedFunction = withCache( 43 | expensiveFunction, 44 | (input) => `cache-key-${input}`, 45 | 60 46 | ); 47 | 48 | // First call 49 | const result1 = await cachedFunction("test"); 50 | expect(result1).toBe("Result: test"); 51 | expect(callCount).toBe(1); 52 | 53 | // Note: In a real environment with cache, second call would use cache 54 | // In test environment without global cache, it will call function again 55 | const result2 = await cachedFunction("test"); 56 | expect(result2).toBe("Result: test"); 57 | 58 | // Different input should call function 59 | const result3 = await cachedFunction("different"); 60 | expect(result3).toBe("Result: different"); 61 | }); 62 | 63 | it("should handle cache options interface", () => { 64 | const options = { 65 | cacheKey: "test-key", 66 | ttl: 3600, 67 | cacheBehavior: "default" as const, 68 | namespace: "test-namespace", 69 | }; 70 | 71 | expect(options.cacheKey).toBe("test-key"); 72 | expect(options.ttl).toBe(3600); 73 | expect(options.cacheBehavior).toBe("default"); 74 | expect(options.namespace).toBe("test-namespace"); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /packages/sdk/test/cache/interface.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Tests for cache interface 3 | */ 4 | 5 | import { describe, it, expect } from "vitest"; 6 | import type { 7 | CacheInterface, 8 | CachedResult, 9 | CacheMetadata, 10 | } from "../../src/cache/interface.js"; 11 | 12 | describe("Cache Interface", () => { 13 | // Helper to create a mock cache implementation for testing 14 | function createMockCache(): CacheInterface & { 15 | storage: Map; 16 | } { 17 | const storage = new Map(); 18 | 19 | return { 20 | storage, 21 | 22 | async get(key: string): Promise { 23 | const cached = storage.get(key); 24 | if (!cached) return null; 25 | 26 | // Check expiration 27 | if (cached.metadata?.expiresAt) { 28 | const expiresAt = new Date(cached.metadata.expiresAt); 29 | if (expiresAt <= new Date()) { 30 | storage.delete(key); 31 | return null; 32 | } 33 | } 34 | 35 | return cached; 36 | }, 37 | 38 | async set(key: string, value: any, ttl = 3600): Promise { 39 | const now = new Date(); 40 | const effectiveTtl = ttl; 41 | const expiresAt = new Date(now.getTime() + effectiveTtl * 1000); 42 | 43 | const metadata: CacheMetadata = { 44 | url: key, 45 | fetchedAt: now.toISOString(), 46 | expiresAt: expiresAt.toISOString(), 47 | }; 48 | 49 | const cachedResult: CachedResult = { 50 | metadata, 51 | content: value, 52 | type: "memory", 53 | }; 54 | 55 | storage.set(key, cachedResult); 56 | }, 57 | 58 | async delete(key: string): Promise { 59 | storage.delete(key); 60 | }, 61 | 62 | async clear(): Promise { 63 | storage.clear(); 64 | }, 65 | 66 | async has(key: string): Promise { 67 | const cached = await this.get(key); 68 | return cached !== null; 69 | }, 70 | }; 71 | } 72 | 73 | it("should implement all required methods", () => { 74 | const cache = createMockCache(); 75 | 76 | expect(cache.get).toBeDefined(); 77 | expect(cache.set).toBeDefined(); 78 | expect(cache.delete).toBeDefined(); 79 | expect(cache.clear).toBeDefined(); 80 | expect(cache.has).toBeDefined(); 81 | }); 82 | 83 | it("should store and retrieve values", async () => { 84 | const cache = createMockCache(); 85 | const testData = { foo: "bar", count: 42 }; 86 | 87 | await cache.set("test-key", testData); 88 | const result = await cache.get("test-key"); 89 | 90 | expect(result).not.toBeNull(); 91 | expect(result?.content).toEqual(testData); 92 | expect(result?.metadata.url).toBe("test-key"); 93 | }); 94 | 95 | it("should respect TTL", async () => { 96 | const cache = createMockCache(); 97 | 98 | // Set with 1 second TTL 99 | await cache.set("test-key", "value", 1); 100 | 101 | // Should exist immediately 102 | expect(await cache.has("test-key")).toBe(true); 103 | 104 | // Wait for expiration 105 | await new Promise((resolve) => setTimeout(resolve, 1100)); 106 | 107 | // Should be expired 108 | expect(await cache.has("test-key")).toBe(false); 109 | expect(await cache.get("test-key")).toBeNull(); 110 | }); 111 | 112 | it("should delete entries", async () => { 113 | const cache = createMockCache(); 114 | 115 | await cache.set("test-key", "value"); 116 | expect(await cache.has("test-key")).toBe(true); 117 | 118 | await cache.delete("test-key"); 119 | expect(await cache.has("test-key")).toBe(false); 120 | }); 121 | 122 | it("should clear all entries", async () => { 123 | const cache = createMockCache(); 124 | 125 | await cache.set("key1", "value1"); 126 | await cache.set("key2", "value2"); 127 | await cache.set("key3", "value3"); 128 | 129 | expect(cache.storage.size).toBe(3); 130 | 131 | await cache.clear(); 132 | 133 | expect(cache.storage.size).toBe(0); 134 | expect(await cache.has("key1")).toBe(false); 135 | expect(await cache.has("key2")).toBe(false); 136 | expect(await cache.has("key3")).toBe(false); 137 | }); 138 | }); 139 | -------------------------------------------------------------------------------- /packages/sdk/test/demo-github.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect } from "vitest"; 2 | import { fetch } from "../src/index"; 3 | import { FetchResultImpl } from "../src/fetch-result"; 4 | 5 | describe("GitHub Repository Demo", () => { 6 | test("fetch sindresorhus/is-plain-obj repository", async () => { 7 | console.log( 8 | "\n🔍 Fetching https://github.com/sindresorhus/is-plain-obj...\n" 9 | ); 10 | 11 | // Fetch as JSON 12 | const result = (await fetch({ 13 | source: "https://github.com/sindresorhus/is-plain-obj", 14 | format: "json", 15 | })) as FetchResultImpl; 16 | 17 | // Display metadata 18 | console.log("📊 Repository Metadata:"); 19 | console.log(` Total Files: ${result.metadata.totalFiles}`); 20 | console.log( 21 | ` Total Size: ${(result.metadata.totalSize / 1024).toFixed(2)} KB` 22 | ); 23 | console.log(` Total Tokens: ${result.metadata.totalTokens}`); 24 | console.log(` Source: ${result.metadata.source}`); 25 | console.log(` Fetched At: ${result.metadata.fetchedAt.toISOString()}\n`); 26 | 27 | // List all files 28 | console.log("📁 Files in repository:"); 29 | const allFiles = result.getAllFiles(); 30 | for (const file of allFiles) { 31 | console.log( 32 | ` - ${file.path} (${file.size} bytes, ${file.tokens} tokens)` 33 | ); 34 | } 35 | 36 | // Show main file content 37 | const indexFile = result.getFileByPath("index.js"); 38 | if (indexFile?.content) { 39 | console.log("\n📄 Content of index.js:"); 40 | console.log("─".repeat(50)); 41 | console.log(indexFile.content); 42 | console.log("─".repeat(50)); 43 | } 44 | 45 | // Assertions 46 | expect(result).toBeDefined(); 47 | expect(result.metadata.totalFiles).toBeGreaterThan(0); 48 | expect(allFiles.some((f) => f.name === "index.js")).toBe(true); 49 | expect(allFiles.some((f) => f.name === "package.json")).toBe(true); 50 | }, 30_000); 51 | 52 | test("generate markdown for sindresorhus/is-plain-obj", async () => { 53 | console.log("\n📝 Generating markdown for the repository...\n"); 54 | 55 | const markdown = (await fetch({ 56 | source: "https://github.com/sindresorhus/is-plain-obj", 57 | format: "markdown", 58 | projectTree: 2, 59 | maxTokens: 5000, 60 | })) as string; 61 | 62 | console.log("📄 Generated Markdown (first 1000 characters):"); 63 | console.log("─".repeat(50)); 64 | console.log(markdown.slice(0, 1000)); 65 | console.log("..."); 66 | console.log("─".repeat(50)); 67 | console.log(`\nTotal markdown length: ${markdown.length} characters`); 68 | 69 | // Assertions 70 | expect(markdown).toBeDefined(); 71 | expect(markdown).toContain("Project Structure:"); 72 | expect(markdown).toContain("index.js"); 73 | expect(markdown).toContain("export default function isPlainObject"); 74 | }, 30_000); 75 | 76 | test("fetch with filtering options", async () => { 77 | console.log( 78 | "\n🎯 Fetching with filters (only .js files, exclude tests)...\n" 79 | ); 80 | 81 | const result = (await fetch({ 82 | source: "https://github.com/sindresorhus/is-plain-obj", 83 | format: "json", 84 | extensions: [".js"], 85 | excludeFiles: ["test.js", "*.test.js", "benchmark.js"], 86 | })) as FetchResultImpl; 87 | 88 | const allFiles = result.getAllFiles(); 89 | console.log("📁 Filtered files:"); 90 | for (const file of allFiles) { 91 | console.log(` - ${file.path}`); 92 | } 93 | 94 | // Assertions 95 | expect(allFiles.every((f) => f.name.endsWith(".js"))).toBe(true); 96 | expect(allFiles.some((f) => f.name === "test.js")).toBe(false); 97 | expect(allFiles.some((f) => f.name === "benchmark.js")).toBe(false); 98 | expect(allFiles.some((f) => f.name === "index.js")).toBe(true); 99 | }, 30_000); 100 | }); 101 | -------------------------------------------------------------------------------- /packages/sdk/test/errors.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { 3 | CodefetchError, 4 | GitHubError, 5 | TokenLimitError, 6 | ParseError, 7 | NetworkError, 8 | ConfigError, 9 | CacheError, 10 | URLValidationError, 11 | isCodefetchError, 12 | isGitHubError, 13 | isTokenLimitError, 14 | wrapError, 15 | } from "../src/errors.js"; 16 | 17 | describe("Error Classes Smoke Tests", () => { 18 | it("should create CodefetchError", () => { 19 | const error = new CodefetchError("Test error", "TEST_CODE"); 20 | 21 | expect(error).toBeInstanceOf(Error); 22 | expect(error).toBeInstanceOf(CodefetchError); 23 | expect(error.message).toBe("Test error"); 24 | expect(error.code).toBe("TEST_CODE"); 25 | expect(error.name).toBe("CodefetchError"); 26 | }); 27 | 28 | it("should create GitHubError", () => { 29 | const error = new GitHubError("API failed", 404, 100); 30 | 31 | expect(error).toBeInstanceOf(GitHubError); 32 | expect(error.status).toBe(404); 33 | expect(error.rateLimitRemaining).toBe(100); 34 | }); 35 | 36 | it("should create TokenLimitError", () => { 37 | const error = new TokenLimitError(1000, 1500, ["file1.ts", "file2.ts"]); 38 | 39 | expect(error).toBeInstanceOf(TokenLimitError); 40 | expect(error.limit).toBe(1000); 41 | expect(error.used).toBe(1500); 42 | expect(error.files).toHaveLength(2); 43 | }); 44 | 45 | it("should create ParseError", () => { 46 | const error = new ParseError("Parse failed", "test.ts", 10, 5); 47 | 48 | expect(error).toBeInstanceOf(ParseError); 49 | expect(error.filePath).toBe("test.ts"); 50 | expect(error.line).toBe(10); 51 | expect(error.column).toBe(5); 52 | }); 53 | 54 | it("should create NetworkError", () => { 55 | const error = new NetworkError("Connection failed", "https://example.com"); 56 | 57 | expect(error).toBeInstanceOf(NetworkError); 58 | expect(error.url).toBe("https://example.com"); 59 | }); 60 | 61 | it("should create ConfigError", () => { 62 | const error = new ConfigError("Invalid config", "config.json", "maxTokens"); 63 | 64 | expect(error).toBeInstanceOf(ConfigError); 65 | expect(error.configPath).toBe("config.json"); 66 | expect(error.invalidField).toBe("maxTokens"); 67 | }); 68 | 69 | it("should create CacheError", () => { 70 | const error = new CacheError("Cache write failed", "write", "test-key"); 71 | 72 | expect(error).toBeInstanceOf(CacheError); 73 | expect(error.operation).toBe("write"); 74 | expect(error.key).toBe("test-key"); 75 | }); 76 | 77 | it("should create URLValidationError", () => { 78 | const error = new URLValidationError( 79 | "Invalid URL", 80 | "not-a-url", 81 | "Invalid format" 82 | ); 83 | 84 | expect(error).toBeInstanceOf(URLValidationError); 85 | expect(error.url).toBe("not-a-url"); 86 | expect(error.reason).toBe("Invalid format"); 87 | }); 88 | 89 | it("should check error types with type guards", () => { 90 | const codefetchError = new CodefetchError("Test", "TEST"); 91 | const githubError = new GitHubError("Test", 404); 92 | const tokenError = new TokenLimitError(100, 200, []); 93 | 94 | expect(isCodefetchError(codefetchError)).toBe(true); 95 | expect(isGitHubError(githubError)).toBe(true); 96 | expect(isTokenLimitError(tokenError)).toBe(true); 97 | 98 | expect(isGitHubError(codefetchError)).toBe(false); 99 | expect(isTokenLimitError(githubError)).toBe(false); 100 | }); 101 | 102 | it("should wrap unknown errors", () => { 103 | const originalError = new Error("Original error"); 104 | const wrapped = wrapError(originalError); 105 | 106 | expect(wrapped).toBeInstanceOf(CodefetchError); 107 | expect(wrapped.message).toBe("Original error"); 108 | 109 | const stringError = wrapError("String error"); 110 | expect(stringError).toBeInstanceOf(CodefetchError); 111 | expect(stringError.message).toBe("String error"); 112 | }); 113 | }); 114 | -------------------------------------------------------------------------------- /packages/sdk/test/fetch-json.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { fetch } from "../src/fetch"; 3 | import { FetchResultImpl } from "../src/fetch-result"; 4 | import { join } from "pathe"; 5 | 6 | describe("fetch with JSON format", () => { 7 | const fixturesDir = join(__dirname, "fixtures", "sample-project"); 8 | 9 | it("should return FetchResult when format is json", async () => { 10 | const result = await fetch({ 11 | source: fixturesDir, 12 | format: "json", 13 | extensions: [".js", ".json"], 14 | }); 15 | 16 | expect(result).toBeInstanceOf(FetchResultImpl); 17 | 18 | if (result instanceof FetchResultImpl) { 19 | expect(result.root).toBeDefined(); 20 | expect(result.root.type).toBe("directory"); 21 | expect(result.metadata).toBeDefined(); 22 | expect(result.metadata.totalFiles).toBeGreaterThan(0); 23 | } 24 | }); 25 | 26 | it("should return markdown string when format is markdown", async () => { 27 | const result = await fetch({ 28 | source: fixturesDir, 29 | format: "markdown", 30 | extensions: [".js", ".json"], 31 | }); 32 | 33 | expect(typeof result).toBe("string"); 34 | expect(result).toContain("Project Structure:"); 35 | }); 36 | 37 | it("should access files by path", async () => { 38 | const result = await fetch({ 39 | source: fixturesDir, 40 | format: "json", 41 | extensions: [".js", ".json"], 42 | }); 43 | 44 | if (result instanceof FetchResultImpl) { 45 | const packageJson = result.getFileByPath("package.json"); 46 | expect(packageJson).toBeDefined(); 47 | expect(packageJson?.type).toBe("file"); 48 | expect(packageJson?.content).toBeDefined(); 49 | } 50 | }); 51 | 52 | it("should get all files as flat array", async () => { 53 | const result = await fetch({ 54 | source: fixturesDir, 55 | format: "json", 56 | extensions: [".js", ".json"], 57 | }); 58 | 59 | if (result instanceof FetchResultImpl) { 60 | const allFiles = result.getAllFiles(); 61 | expect(Array.isArray(allFiles)).toBe(true); 62 | expect(allFiles.length).toBeGreaterThan(0); 63 | expect(allFiles.every((f) => f.type === "file")).toBe(true); 64 | } 65 | }); 66 | 67 | it("should convert FetchResult to markdown", async () => { 68 | const result = await fetch({ 69 | source: fixturesDir, 70 | format: "json", 71 | extensions: [".js", ".json"], 72 | }); 73 | 74 | if (result instanceof FetchResultImpl) { 75 | const markdown = result.toMarkdown(); 76 | expect(typeof markdown).toBe("string"); 77 | expect(markdown).toContain("Project Structure:"); 78 | expect(markdown).toContain("package.json"); 79 | } 80 | }); 81 | }); 82 | -------------------------------------------------------------------------------- /packages/sdk/test/fixtures/sample-project/index.js: -------------------------------------------------------------------------------- 1 | // Sample JavaScript file for testing 2 | function greet(name) { 3 | return `Hello, ${name}!`; 4 | } 5 | 6 | module.exports = { greet }; 7 | -------------------------------------------------------------------------------- /packages/sdk/test/fixtures/sample-project/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sample-project", 3 | "version": "1.0.0", 4 | "description": "A sample project for testing", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "MIT" 11 | } 12 | -------------------------------------------------------------------------------- /packages/sdk/test/github-api.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, vi, beforeEach } from "vitest"; 2 | import { GitHubApiClient } from "../src/web/github-api"; 3 | 4 | describe("GitHub API Client", () => { 5 | let client: GitHubApiClient; 6 | const mockLogger = { 7 | info: vi.fn(), 8 | warn: vi.fn(), 9 | debug: vi.fn(), 10 | error: vi.fn(), 11 | }; 12 | 13 | beforeEach(() => { 14 | vi.clearAllMocks(); 15 | client = new GitHubApiClient( 16 | "testowner", 17 | "testrepo", 18 | mockLogger as any, 19 | {} 20 | ); 21 | }); 22 | 23 | describe("downloadZipArchive", () => { 24 | test("should handle missing Content-Length header gracefully", async () => { 25 | // Mock fetch response without Content-Length header 26 | const mockResponse = { 27 | ok: true, 28 | headers: { 29 | get: (name: string) => (name === "content-length" ? null : null), 30 | }, 31 | arrayBuffer: async () => new ArrayBuffer(1024), 32 | }; 33 | 34 | globalThis.fetch = vi.fn().mockResolvedValue(mockResponse); 35 | 36 | // Should not throw error 37 | const result = await client.downloadZipArchive(); 38 | 39 | expect(result).toBeInstanceOf(Buffer); 40 | expect(result.length).toBe(1024); 41 | 42 | // Should log warning 43 | expect(mockLogger.warn).toHaveBeenCalledWith( 44 | expect.stringContaining("Content-Length header") 45 | ); 46 | }); 47 | 48 | test("should log size when Content-Length header is present", async () => { 49 | // Mock fetch response with Content-Length header 50 | const mockResponse = { 51 | ok: true, 52 | headers: { 53 | get: (name: string) => (name === "content-length" ? "1048576" : null), // 1MB 54 | }, 55 | arrayBuffer: async () => new ArrayBuffer(1_048_576), 56 | }; 57 | 58 | globalThis.fetch = vi.fn().mockResolvedValue(mockResponse); 59 | 60 | const result = await client.downloadZipArchive(); 61 | 62 | expect(result).toBeInstanceOf(Buffer); 63 | expect(mockLogger.info).toHaveBeenCalledWith("Archive size: 1.00 MB"); 64 | }); 65 | 66 | test("should throw error for failed requests", async () => { 67 | const mockResponse = { 68 | ok: false, 69 | status: 404, 70 | statusText: "Not Found", 71 | }; 72 | 73 | globalThis.fetch = vi.fn().mockResolvedValue(mockResponse); 74 | 75 | await expect(client.downloadZipArchive()).rejects.toThrow( 76 | "Failed to download archive: 404 Not Found" 77 | ); 78 | }); 79 | }); 80 | 81 | describe("checkAccess", () => { 82 | test("should return accessible for public repository", async () => { 83 | const mockResponse = { 84 | ok: true, 85 | json: async () => ({ 86 | default_branch: "main", 87 | size: 1000, 88 | private: false, 89 | }), 90 | }; 91 | 92 | globalThis.fetch = vi.fn().mockResolvedValue(mockResponse); 93 | 94 | const result = await client.checkAccess(); 95 | 96 | expect(result).toEqual({ 97 | accessible: true, 98 | isPrivate: false, 99 | defaultBranch: "main", 100 | }); 101 | }); 102 | 103 | test("should handle 404 errors", async () => { 104 | const mockResponse = { 105 | ok: false, 106 | status: 404, 107 | }; 108 | 109 | globalThis.fetch = vi.fn().mockResolvedValue(mockResponse); 110 | 111 | const result = await client.checkAccess(); 112 | 113 | expect(result).toEqual({ 114 | accessible: false, 115 | isPrivate: true, 116 | defaultBranch: "main", 117 | }); 118 | }); 119 | }); 120 | }); 121 | -------------------------------------------------------------------------------- /packages/sdk/test/local-worker-test.js: -------------------------------------------------------------------------------- 1 | // Quick local test of Worker functionality 2 | import { 3 | fetchFromWeb, 4 | isCloudflareWorker, 5 | getCacheSizeLimit, 6 | countTokens, 7 | htmlToMarkdown, 8 | } from "../dist-worker/worker.mjs"; 9 | 10 | console.log("=== Cloudflare Worker SDK Test ===\n"); 11 | 12 | // Test 1: Environment detection 13 | console.log("1. Environment Detection:"); 14 | console.log(` isCloudflareWorker: ${isCloudflareWorker}`); 15 | console.log(` Cache size limit: ${getCacheSizeLimit() / 1024 / 1024}MB`); 16 | console.log(` ✓ Environment detection working\n`); 17 | 18 | // Test 2: HTML to Markdown 19 | console.log("2. HTML to Markdown:"); 20 | const html = "

Test

This is a test

"; 21 | const markdown = htmlToMarkdown(html); 22 | console.log(` Input: ${html}`); 23 | console.log(` Output: ${markdown.trim()}`); 24 | console.log(` ✓ HTML conversion working\n`); 25 | 26 | // Test 3: Token counting 27 | console.log("3. Token Counting:"); 28 | const text = "Hello, World! This is a test."; 29 | countTokens(text, "cl100k") 30 | .then((tokens) => { 31 | console.log(` Text: "${text}"`); 32 | console.log(` Tokens: ${tokens}`); 33 | console.log(` ✓ Token counting working\n`); 34 | 35 | // Test 4: Web fetching (requires network) 36 | console.log("4. Web Fetching (example.com):"); 37 | return fetchFromWeb("https://example.com", { 38 | maxPages: 1, 39 | maxDepth: 0, 40 | verbose: 0, 41 | }); 42 | }) 43 | .then((result) => { 44 | // fetchFromWeb can return either a string (markdown) or FetchResult object 45 | const markdown = typeof result === "string" ? result : result.markdown; 46 | const hasFiles = typeof result === "object" && result.files; 47 | 48 | console.log(` Result type: ${typeof result}`); 49 | if (hasFiles) { 50 | console.log(` Files fetched: ${result.files.length}`); 51 | } 52 | console.log(` Markdown length: ${markdown.length} chars`); 53 | console.log( 54 | ` Contains "Example Domain": ${markdown.includes("Example Domain")}` 55 | ); 56 | console.log(` ✓ Web fetching working\n`); 57 | 58 | console.log("=== All tests passed! ==="); 59 | }) 60 | .catch((error) => { 61 | console.error(" ✗ Error:", error.message); 62 | console.error(error.stack); 63 | process.exit(1); 64 | }); 65 | -------------------------------------------------------------------------------- /packages/sdk/test/mocks/server.ts: -------------------------------------------------------------------------------- 1 | import { setupServer } from "msw/node"; 2 | import { http, HttpResponse } from "msw"; 3 | import fs from "node:fs/promises"; 4 | import path from "node:path"; 5 | import { fileURLToPath } from "node:url"; 6 | 7 | // Convert __dirname to be compatible with ES Modules 8 | const __filename = fileURLToPath(import.meta.url); 9 | const __dirname = path.dirname(__filename); 10 | 11 | const handlers = [ 12 | http.get("https://tiktoken.pages.dev/js/:fileName", async ({ params }) => { 13 | const { fileName } = params; 14 | const filePath = path.resolve( 15 | __dirname, 16 | `../fixtures/tiktoken/${fileName}` 17 | ); 18 | 19 | try { 20 | const fileContent = await fs.readFile(filePath); 21 | return new HttpResponse(fileContent, { 22 | headers: { 23 | "Content-Type": "application/json", 24 | }, 25 | }); 26 | } catch { 27 | return new HttpResponse(null, { status: 404 }); 28 | } 29 | }), 30 | http.get("https://codeload.github.com/:owner/:repo/tar.gz/:ref", () => { 31 | // This is a placeholder for a more sophisticated mock if needed 32 | return new HttpResponse(null, { status: 200 }); 33 | }), 34 | 35 | http.get("https://api.github.com/repos/:owner/:repo", () => { 36 | // This is a placeholder for a more sophisticated mock if needed 37 | return new HttpResponse( 38 | JSON.stringify({ 39 | full_name: "test/repo", 40 | stargazers_count: 123, 41 | }), 42 | { 43 | headers: { 44 | "Content-Type": "application/json", 45 | }, 46 | } 47 | ); 48 | }), 49 | ]; 50 | 51 | export const server = setupServer(...handlers); 52 | -------------------------------------------------------------------------------- /packages/sdk/test/setup.ts: -------------------------------------------------------------------------------- 1 | import { beforeAll, afterEach, afterAll } from "vitest"; 2 | import { server } from "./mocks/server.js"; 3 | 4 | beforeAll(() => server.listen({ onUnhandledRequest: "error" })); 5 | afterAll(() => server.close()); 6 | afterEach(() => server.resetHandlers()); 7 | -------------------------------------------------------------------------------- /packages/sdk/test/streaming.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { 3 | createMarkdownStream, 4 | createTransformStream, 5 | collectStream, 6 | filterStream, 7 | mapStream, 8 | } from "../src/streaming.js"; 9 | import type { FileContent } from "../src/markdown-content.js"; 10 | 11 | describe("Streaming API Smoke Tests", () => { 12 | const mockFiles: FileContent[] = [ 13 | { path: "file1.ts", content: "console.log('hello');" }, 14 | { path: "file2.ts", content: "export default {};" }, 15 | ]; 16 | 17 | async function* generateMockFiles(): AsyncGenerator { 18 | for (const file of mockFiles) { 19 | yield file; 20 | } 21 | } 22 | 23 | it("should create markdown stream", async () => { 24 | const stream = createMarkdownStream(generateMockFiles()); 25 | 26 | expect(stream).toBeInstanceOf(ReadableStream); 27 | 28 | // Basic test that stream can be read 29 | const reader = stream.getReader(); 30 | const { value, done } = await reader.read(); 31 | expect(done).toBe(false); 32 | expect(value).toBeDefined(); 33 | reader.releaseLock(); 34 | }); 35 | 36 | it("should create transform stream", async () => { 37 | const transform = createTransformStream((file) => file.path); 38 | 39 | expect(transform).toBeInstanceOf(TransformStream); 40 | expect(transform.readable).toBeDefined(); 41 | expect(transform.writable).toBeDefined(); 42 | }); 43 | 44 | it("should collect stream into array", async () => { 45 | const collected = await collectStream(generateMockFiles()); 46 | 47 | expect(collected).toHaveLength(2); 48 | expect(collected[0].path).toBe("file1.ts"); 49 | expect(collected[1].path).toBe("file2.ts"); 50 | }); 51 | 52 | it("should filter stream items", async () => { 53 | const filtered = filterStream(generateMockFiles(), (file) => 54 | file.path.includes("file1") 55 | ); 56 | 57 | const result = await collectStream(filtered); 58 | expect(result).toHaveLength(1); 59 | expect(result[0].path).toBe("file1.ts"); 60 | }); 61 | 62 | it("should map stream items", async () => { 63 | const mapped = mapStream(generateMockFiles(), (file) => file.path); 64 | 65 | const result = await collectStream(mapped); 66 | expect(result).toEqual(["file1.ts", "file2.ts"]); 67 | }); 68 | 69 | it("should handle async filter predicate", async () => { 70 | const filtered = filterStream(generateMockFiles(), async (file) => { 71 | // Simulate async operation 72 | await new Promise((resolve) => setTimeout(resolve, 1)); 73 | return file.path.includes("file2"); 74 | }); 75 | 76 | const result = await collectStream(filtered); 77 | expect(result).toHaveLength(1); 78 | expect(result[0].path).toBe("file2.ts"); 79 | }); 80 | 81 | it("should handle async map function", async () => { 82 | const mapped = mapStream(generateMockFiles(), async (file) => { 83 | // Simulate async operation 84 | await new Promise((resolve) => setTimeout(resolve, 1)); 85 | return file.path.toUpperCase(); 86 | }); 87 | 88 | const result = await collectStream(mapped); 89 | expect(result).toEqual(["FILE1.TS", "FILE2.TS"]); 90 | }); 91 | }); 92 | -------------------------------------------------------------------------------- /packages/sdk/test/tree-utils.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { 3 | filesToTree, 4 | treeToFiles, 5 | findNodeByPath, 6 | walkTree, 7 | calculateTreeMetrics, 8 | sortTree, 9 | filterTree, 10 | } from "../src/tree-utils.js"; 11 | import type { FileContent } from "../src/markdown-content.js"; 12 | 13 | describe("Tree Utils Smoke Tests", () => { 14 | const mockFiles: FileContent[] = [ 15 | { path: "src/index.ts", content: "export default {}" }, 16 | { path: "src/utils/helper.ts", content: "export function help() {}" }, 17 | { path: "README.md", content: "# Test Project" }, 18 | ]; 19 | 20 | it("should convert files to tree structure", () => { 21 | const tree = filesToTree(mockFiles); 22 | 23 | expect(tree).toBeDefined(); 24 | expect(tree.type).toBe("directory"); 25 | expect(tree.children).toBeDefined(); 26 | expect(tree.children?.length).toBeGreaterThan(0); 27 | }); 28 | 29 | it("should convert tree back to files", () => { 30 | const tree = filesToTree(mockFiles); 31 | const files = treeToFiles(tree); 32 | 33 | expect(files).toHaveLength(mockFiles.length); 34 | expect(files[0].path).toBeDefined(); 35 | expect(files[0].content).toBeDefined(); 36 | }); 37 | 38 | it("should find node by path", () => { 39 | const tree = filesToTree(mockFiles); 40 | const node = findNodeByPath(tree, "src"); 41 | 42 | expect(node).toBeDefined(); 43 | expect(node?.type).toBe("directory"); 44 | }); 45 | 46 | it("should walk tree with callback", () => { 47 | const tree = filesToTree(mockFiles); 48 | let nodeCount = 0; 49 | 50 | walkTree(tree, () => { 51 | nodeCount++; 52 | }); 53 | 54 | expect(nodeCount).toBeGreaterThan(0); 55 | }); 56 | 57 | it("should calculate tree metrics", () => { 58 | const tree = filesToTree(mockFiles); 59 | const metrics = calculateTreeMetrics(tree); 60 | 61 | expect(metrics.totalFiles).toBe(3); 62 | expect(metrics.totalSize).toBeGreaterThan(0); 63 | expect(metrics.totalTokens).toBeDefined(); 64 | }); 65 | 66 | it("should sort tree nodes", () => { 67 | const tree = filesToTree(mockFiles); 68 | const sorted = sortTree(tree); 69 | 70 | expect(sorted).toBeDefined(); 71 | expect(sorted.children).toBeDefined(); 72 | }); 73 | 74 | it("should filter tree based on predicate", () => { 75 | const tree = filesToTree(mockFiles); 76 | const filtered = filterTree(tree, (node) => !node.path.includes("utils")); 77 | 78 | expect(filtered).toBeDefined(); 79 | }); 80 | }); 81 | -------------------------------------------------------------------------------- /packages/sdk/test/type-guards.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { 3 | isValidGitHubUrl, 4 | isValidRepoPath, 5 | isValidGitHubToken, 6 | isValidSemVer, 7 | createGitHubToken, 8 | createRepoPath, 9 | createGitHubUrl, 10 | isNotNull, 11 | isArray, 12 | isObject, 13 | isString, 14 | isNumber, 15 | assertDefined, 16 | assert, 17 | } from "../src/type-guards.js"; 18 | 19 | describe("Type Guards Smoke Tests", () => { 20 | it("should validate GitHub URLs", () => { 21 | expect(isValidGitHubUrl("https://github.com/owner/repo")).toBe(true); 22 | expect(isValidGitHubUrl("https://github.com/owner/repo/tree/main")).toBe( 23 | true 24 | ); 25 | expect(isValidGitHubUrl("https://example.com/owner/repo")).toBe(false); 26 | expect(isValidGitHubUrl("not-a-url")).toBe(false); 27 | }); 28 | 29 | it("should validate repo paths", () => { 30 | expect(isValidRepoPath("owner/repo")).toBe(true); 31 | expect(isValidRepoPath("owner-name/repo-name")).toBe(true); 32 | expect(isValidRepoPath("owner/repo/extra")).toBe(false); 33 | expect(isValidRepoPath("justowner")).toBe(false); 34 | }); 35 | 36 | it("should validate GitHub tokens", () => { 37 | // Classic token (40 char hex) 38 | expect(isValidGitHubToken("a".repeat(40))).toBe(true); 39 | 40 | // Fine-grained PAT 41 | expect(isValidGitHubToken("ghp_" + "a".repeat(36))).toBe(true); 42 | 43 | // Invalid tokens 44 | expect(isValidGitHubToken("invalid-token")).toBe(false); 45 | expect(isValidGitHubToken("ghp_short")).toBe(false); 46 | }); 47 | 48 | it("should validate semantic versions", () => { 49 | expect(isValidSemVer("1.2.3")).toBe(true); 50 | expect(isValidSemVer("0.0.1")).toBe(true); 51 | expect(isValidSemVer("1.2")).toBe(false); 52 | expect(isValidSemVer("v1.2.3")).toBe(false); 53 | }); 54 | 55 | it("should create branded types", () => { 56 | expect(() => createGitHubToken("a".repeat(40))).not.toThrow(); 57 | expect(() => createGitHubToken("invalid")).toThrow(); 58 | 59 | expect(() => createRepoPath("owner", "repo")).not.toThrow(); 60 | expect(() => createRepoPath("", "repo")).toThrow(); 61 | 62 | expect(() => createGitHubUrl("owner", "repo")).not.toThrow(); 63 | }); 64 | 65 | it("should check basic type guards", () => { 66 | expect(isNotNull("value")).toBe(true); 67 | expect(isNotNull(null)).toBe(false); 68 | expect(isNotNull(undefined)).toBe(false); 69 | 70 | expect(isArray([1, 2, 3])).toBe(true); 71 | expect(isArray("not array")).toBe(false); 72 | 73 | expect(isObject({ key: "value" })).toBe(true); 74 | expect(isObject([])).toBe(false); 75 | expect(isObject(null)).toBe(false); 76 | 77 | expect(isString("hello")).toBe(true); 78 | expect(isString(123)).toBe(false); 79 | 80 | expect(isNumber(123)).toBe(true); 81 | expect(isNumber("123")).toBe(false); 82 | expect(isNumber(Number.NaN)).toBe(false); 83 | }); 84 | 85 | it("should assert defined values", () => { 86 | expect(() => assertDefined("value")).not.toThrow(); 87 | expect(() => assertDefined(null)).toThrow(); 88 | expect(() => assertDefined(undefined)).toThrow(); 89 | }); 90 | 91 | it("should assert conditions", () => { 92 | expect(() => assert(true)).not.toThrow(); 93 | expect(() => assert(1 === 1)).not.toThrow(); 94 | expect(() => assert(false)).toThrow(); 95 | expect(() => assert(null)).toThrow(); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /packages/sdk/test/verify-worker-build.js: -------------------------------------------------------------------------------- 1 | // Verify Worker build without network dependencies 2 | import * as worker from "../dist-worker/worker.mjs"; 3 | 4 | console.log("=== Worker Build Verification ===\n"); 5 | 6 | // Check all expected exports 7 | const expectedExports = [ 8 | "fetchFromWeb", 9 | "countTokens", 10 | "htmlToMarkdown", 11 | "isCloudflareWorker", 12 | "getCacheSizeLimit", 13 | "generateMarkdown", 14 | "VALID_ENCODERS", 15 | "VALID_LIMITERS", 16 | "VALID_PROMPTS", 17 | ]; 18 | 19 | console.log("Checking exports:"); 20 | let allExportsFound = true; 21 | for (const exp of expectedExports) { 22 | const found = exp in worker; 23 | console.log(` ${found ? "✓" : "✗"} ${exp}`); 24 | if (!found) allExportsFound = false; 25 | } 26 | 27 | // Check that Node-specific exports are NOT included 28 | console.log("\nChecking excluded exports (should NOT be present):"); 29 | const excludedExports = ["collectFiles", "fetchFiles"]; 30 | let noExcludedExports = true; 31 | for (const exp of excludedExports) { 32 | const found = exp in worker; 33 | console.log(` ${found ? "✗" : "✓"} ${exp} is not exported`); 34 | if (found) noExcludedExports = false; 35 | } 36 | 37 | // Test basic functionality 38 | console.log("\nBasic functionality tests:"); 39 | 40 | // 1. Environment detection 41 | console.log( 42 | ` Environment: ${worker.isCloudflareWorker ? "Worker" : "Node.js"}` 43 | ); 44 | console.log(` Cache limit: ${worker.getCacheSizeLimit() / 1024 / 1024}MB`); 45 | 46 | // 2. Constants 47 | console.log(` Valid encoders: ${[...worker.VALID_ENCODERS].join(", ")}`); 48 | 49 | // 3. HTML conversion 50 | const html = "

Test

"; 51 | const markdown = worker.htmlToMarkdown(html); 52 | console.log(` HTML->MD: "${html}" => "${markdown.trim()}"`); 53 | 54 | // 4. Token counting (async) 55 | worker 56 | .countTokens("test", "cl100k") 57 | .then((tokens) => { 58 | console.log(` Token count: ${tokens} tokens for "test"`); 59 | 60 | // Summary 61 | console.log("\n=== Summary ==="); 62 | if (allExportsFound && noExcludedExports) { 63 | console.log("✓ All checks passed!"); 64 | console.log("✓ Worker build is ready for deployment"); 65 | } else { 66 | console.log("✗ Some checks failed"); 67 | process.exit(1); 68 | } 69 | }) 70 | .catch((error_) => { 71 | console.error(" ✗ Token counting error:", error_.message); 72 | process.exit(1); 73 | }); 74 | -------------------------------------------------------------------------------- /packages/sdk/test/worker-test-example.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Test Cloudflare Worker using codefetch-sdk/worker 3 | */ 4 | 5 | import { fetchFromWeb, isCloudflareWorker } from "../src/worker"; 6 | 7 | interface ExportedHandler { 8 | fetch(request: Request, env: T): Promise | Response; 9 | } 10 | 11 | export interface Env { 12 | GITHUB_TOKEN?: string; 13 | } 14 | 15 | export default { 16 | async fetch(request: Request, _env: Env): Promise { 17 | const url = new URL(request.url); 18 | 19 | // Health check 20 | if (url.pathname === "/health") { 21 | return new Response( 22 | JSON.stringify({ 23 | status: "ok", 24 | isCloudflareWorker, 25 | hasWebAPIs: typeof caches !== "undefined", 26 | }), 27 | { 28 | headers: { "Content-Type": "application/json" }, 29 | } 30 | ); 31 | } 32 | 33 | // Test fetching a small public repository 34 | if (url.pathname === "/test") { 35 | try { 36 | const result = await fetchFromWeb( 37 | "https://github.com/octocat/Hello-World", 38 | { 39 | extensions: [".md"], 40 | verbose: 1, 41 | // Using type assertion for noCache option 42 | } as any 43 | ); 44 | 45 | // Check if result is a string (markdown) 46 | return typeof result === "string" 47 | ? new Response(result, { 48 | headers: { "Content-Type": "text/markdown" }, 49 | }) 50 | : new Response(JSON.stringify(result), { 51 | headers: { "Content-Type": "application/json" }, 52 | }); 53 | } catch (error) { 54 | return new Response( 55 | JSON.stringify({ 56 | error: error instanceof Error ? error.message : String(error), 57 | stack: error instanceof Error ? error.stack : undefined, 58 | }), 59 | { 60 | status: 500, 61 | headers: { "Content-Type": "application/json" }, 62 | } 63 | ); 64 | } 65 | } 66 | 67 | return new Response("Codefetch Worker Test", { status: 200 }); 68 | }, 69 | } satisfies ExportedHandler; 70 | -------------------------------------------------------------------------------- /packages/sdk/test/worker.test.ts: -------------------------------------------------------------------------------- 1 | import { expect, test, describe } from "vitest"; 2 | import { isCloudflareWorker, getCacheSizeLimit } from "../src/env.js"; 3 | 4 | describe("Worker environment detection", () => { 5 | test("should detect non-Worker environment in tests", () => { 6 | // In test environment, we're not in a Worker 7 | expect(isCloudflareWorker).toBe(false); 8 | }); 9 | 10 | test("should return appropriate cache limits", () => { 11 | const limit = getCacheSizeLimit(); 12 | // In non-Worker environment, should be 100MB 13 | expect(limit).toBe(100 * 1024 * 1024); 14 | }); 15 | }); 16 | 17 | describe("Worker exports", () => { 18 | test("should export Worker-safe APIs", async () => { 19 | // Dynamically import to test the exports 20 | const workerModule = await import("../src/worker.js"); 21 | 22 | // Check core exports exist 23 | expect(workerModule.fetchFromWeb).toBeDefined(); 24 | expect(workerModule.countTokens).toBeDefined(); 25 | expect(workerModule.htmlToMarkdown).toBeDefined(); 26 | expect(workerModule.generateMarkdownFromContent).toBeDefined(); 27 | 28 | // Check that Node-specific exports are NOT included 29 | expect((workerModule as any).collectFiles).toBeUndefined(); 30 | expect((workerModule as any).fetchFiles).toBeUndefined(); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /packages/sdk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.base.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "moduleResolution": "bundler", 6 | "target": "ES2022" 7 | }, 8 | "include": ["src/**/*", "test/**/*", "vitest.config.ts"], 9 | "exclude": [ 10 | "node_modules", 11 | "dist", 12 | "dist-worker", 13 | "coverage", 14 | "test/worker-runtime-test.ts" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /packages/sdk/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: "node", 7 | setupFiles: ["./test/setup.ts"], 8 | coverage: { 9 | provider: "v8", 10 | reporter: ["text", "json", "html", "lcov"], 11 | exclude: [ 12 | "node_modules/**", 13 | "dist/**", 14 | "dist-worker/**", 15 | "test/**", 16 | "examples/**", 17 | "**/*.d.ts", 18 | "**/*.test.ts", 19 | "**/*.spec.ts", 20 | "build.*.config.ts", 21 | "vitest.config.ts", 22 | ], 23 | include: ["src/**/*.ts"], 24 | all: true, 25 | thresholds: { 26 | lines: 35, 27 | functions: 35, 28 | branches: 35, 29 | statements: 35, 30 | }, 31 | }, 32 | testTimeout: 30_000, 33 | hookTimeout: 30_000, 34 | }, 35 | }); 36 | -------------------------------------------------------------------------------- /playground/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore all files in output directories 2 | output/ 3 | 4 | # Keep the .gitkeep files to maintain folder structure 5 | !output/**/.gitkeep -------------------------------------------------------------------------------- /playground/README.md: -------------------------------------------------------------------------------- 1 | # Codefetch SDK Playground 2 | 3 | This playground demonstrates real-world programmatic usage of the `codefetch-sdk` package. 4 | 5 | ## Setup 6 | 7 | Before running the examples, make sure to: 8 | 9 | 1. Build the SDK: 10 | 11 | ```bash 12 | cd ../packages/sdk 13 | npm install 14 | npm run build 15 | ``` 16 | 17 | 2. Install playground dependencies: 18 | ```bash 19 | cd playground 20 | npm install 21 | ``` 22 | 23 | ## Examples 24 | 25 | ### 1. Analyze GitHub Repository (`analyze-github-repo.js`) 26 | 27 | Clones a GitHub repository and analyzes its codebase using the SDK. 28 | 29 | ```bash 30 | npm run analyze-github 31 | ``` 32 | 33 | Features: 34 | 35 | - Clones repository to temp directory 36 | - Collects files with specific extensions 37 | - Counts tokens in source files 38 | - Generates markdown documentation 39 | - Creates analysis prompts 40 | 41 | ### 2. Generate Project Documentation (`generate-docs.js`) 42 | 43 | Generates comprehensive documentation for any TypeScript/JavaScript project. 44 | 45 | ```bash 46 | npm run generate-docs 47 | # or with a specific path: 48 | node generate-docs.js /path/to/project 49 | ``` 50 | 51 | Features: 52 | 53 | - Scans project for source files 54 | - Generates API documentation 55 | - Creates component documentation 56 | - Produces full project overview 57 | - Generates README template 58 | 59 | ### 3. Code Analysis for AI (`code-analyzer.js`) 60 | 61 | Prepares code for AI analysis with optimized prompts for different tasks. 62 | 63 | ```bash 64 | npm run analyze-code 65 | # or with options: 66 | node code-analyzer.js /path/to/code review 67 | node code-analyzer.js . refactor 68 | node code-analyzer.js . test 69 | node code-analyzer.js . document 70 | ``` 71 | 72 | Features: 73 | 74 | - Smart file selection based on token limits 75 | - Task-specific prompt generation 76 | - Optimized prompts for GPT-4 and Claude 77 | - Token usage analysis 78 | - Multiple analysis tasks (review, refactor, test, document) 79 | 80 | ## SDK Features Demonstrated 81 | 82 | - **File Collection**: Using `collectFiles()` with ignore patterns 83 | - **Token Counting**: Using `countTokens()` with different encoders 84 | - **Markdown Generation**: Using `generateMarkdown()` with various options 85 | - **Template Processing**: Using `processPromptTemplate()` for dynamic content 86 | - **Project Analysis**: Using `findProjectRoot()` and other utilities 87 | 88 | ## Output Files 89 | 90 | The examples generate output files in organized directories: 91 | 92 | ``` 93 | output/ 94 | ├── github-analysis/ # Repository analysis results 95 | ├── documentation/ # Project documentation files 96 | └── ai-prompts/ # AI-optimized prompts 97 | ``` 98 | 99 | Generated files include: 100 | 101 | - **GitHub Analysis**: `codefetch-analysis-{timestamp}.md` 102 | - **Documentation**: `PROJECT_DOCUMENTATION-{timestamp}.md`, `README_TEMPLATE-{timestamp}.md` 103 | - **AI Prompts**: `gpt4-{task}-{timestamp}.md`, `claude-{task}-{timestamp}.md` 104 | 105 | **Note:** The `output/` directory is ignored by git to keep the repository clean. Each user will generate their own output files when running the examples. 106 | 107 | To list all generated output files: 108 | 109 | ```bash 110 | npm run list-outputs 111 | ``` 112 | 113 | ## Requirements 114 | 115 | - Node.js 18+ 116 | - Git (for GitHub repo analysis) 117 | - The SDK must be built first: `cd ../packages/sdk && npm run build` 118 | -------------------------------------------------------------------------------- /playground/list-outputs.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * List all generated output files in the playground output directories 5 | */ 6 | 7 | import { readdir, stat, access } from 'node:fs/promises'; 8 | import { join } from 'node:path'; 9 | 10 | const playgroundDir = new URL('.', import.meta.url).pathname; 11 | const outputDir = join(playgroundDir, 'output'); 12 | 13 | console.log('📁 Generated files in output directories:\n'); 14 | 15 | async function listFilesInDir(dir, category) { 16 | const files = []; 17 | try { 18 | await access(dir); 19 | const dirFiles = await readdir(dir); 20 | 21 | for (const file of dirFiles) { 22 | if (file.endsWith('.md')) { 23 | const filePath = join(dir, file); 24 | const stats = await stat(filePath); 25 | files.push({ 26 | name: file, 27 | path: filePath, 28 | size: stats.size, 29 | modified: stats.mtime, 30 | category 31 | }); 32 | } 33 | } 34 | } catch (error) { 35 | // Directory doesn't exist yet 36 | } 37 | return files; 38 | } 39 | 40 | try { 41 | const outputFolders = [ 42 | { path: join(outputDir, 'github-analysis'), category: '🔍 GitHub Analysis' }, 43 | { path: join(outputDir, 'documentation'), category: '📚 Documentation' }, 44 | { path: join(outputDir, 'ai-prompts'), category: '🤖 AI Prompts' } 45 | ]; 46 | 47 | const allFiles = []; 48 | 49 | for (const folder of outputFolders) { 50 | const files = await listFilesInDir(folder.path, folder.category); 51 | allFiles.push(...files); 52 | } 53 | 54 | if (allFiles.length === 0) { 55 | console.log('No generated output files found yet.'); 56 | console.log('\nRun the following commands to generate outputs:'); 57 | console.log(' npm run analyze-github'); 58 | console.log(' npm run generate-docs'); 59 | console.log(' npm run analyze-code'); 60 | } else { 61 | allFiles.sort((a, b) => b.modified - a.modified); 62 | 63 | console.log(`Found ${allFiles.length} output files:\n`); 64 | 65 | // Group by category 66 | const categories = {}; 67 | for (const file of allFiles) { 68 | if (!categories[file.category]) { 69 | categories[file.category] = []; 70 | } 71 | categories[file.category].push(file); 72 | } 73 | 74 | for (const [category, files] of Object.entries(categories)) { 75 | console.log(`${category}:`); 76 | for (const file of files) { 77 | const sizeKB = (file.size / 1024).toFixed(2); 78 | const date = file.modified.toLocaleString(); 79 | console.log(` 📄 ${file.name}`); 80 | console.log(` Size: ${sizeKB} KB`); 81 | console.log(` Modified: ${date}`); 82 | } 83 | console.log(); 84 | } 85 | } 86 | } catch (error) { 87 | console.error('❌ Error:', error.message); 88 | } -------------------------------------------------------------------------------- /playground/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codefetch-playground", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "codefetch-playground", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "ignore": "^7.0.5" 12 | } 13 | }, 14 | "node_modules/ignore": { 15 | "version": "7.0.5", 16 | "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", 17 | "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", 18 | "engines": { 19 | "node": ">= 4" 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /playground/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codefetch-playground", 3 | "version": "1.0.0", 4 | "type": "module", 5 | "description": "Playground for testing codefetch-sdk", 6 | "scripts": { 7 | "test": "node test-sdk.js", 8 | "analyze-github": "node analyze-github-repo.js", 9 | "generate-docs": "node generate-docs.js", 10 | "analyze-code": "node code-analyzer.js", 11 | "list-outputs": "node list-outputs.js" 12 | }, 13 | "dependencies": { 14 | "ignore": "^7.0.5" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /playground/quick-web-test.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * Quick test of web fetching feature 5 | */ 6 | 7 | import { execSync } from 'node:child_process'; 8 | 9 | console.log('🌐 Quick Web Fetch Test\n'); 10 | 11 | // Test URLs 12 | const tests = [ 13 | 'https://github.com/sindresorhus/is-plain-obj', 14 | 'https://github.com/chalk/chalk --branch main', 15 | 'https://github.com/yargs/yargs --token-count-only' 16 | ]; 17 | 18 | for (const test of tests) { 19 | console.log(`\n📝 Testing: ${test}`); 20 | console.log('─'.repeat(50)); 21 | 22 | try { 23 | const cmd = `npx codefetch --url ${test} --dry-run`; 24 | execSync(cmd, { 25 | cwd: '../packages/cli', 26 | stdio: 'inherit' 27 | }); 28 | } catch (error) { 29 | console.error('Error:', error.message); 30 | } 31 | } 32 | 33 | console.log('\n✨ Done!'); -------------------------------------------------------------------------------- /playground/test-sdk.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { 4 | collectFiles, 5 | countTokens, 6 | generateMarkdown, 7 | findProjectRoot, 8 | processPromptTemplate, 9 | DEFAULT_IGNORE_PATTERNS, 10 | VALID_PROMPTS, 11 | VALID_ENCODERS, 12 | VALID_LIMITERS, 13 | SUPPORTED_MODELS, 14 | } from "../packages/sdk/dist/index.mjs"; 15 | import ignore from "ignore"; 16 | import { resolve, join } from "path"; 17 | import { existsSync, readFileSync } from "fs"; 18 | 19 | console.log("🧪 Testing codefetch-sdk functionality\n"); 20 | 21 | // Test 1: Constants 22 | console.log("1️⃣ Testing Constants:"); 23 | console.log("Valid Prompts:", VALID_PROMPTS); 24 | console.log("Valid Encoders:", VALID_ENCODERS); 25 | console.log("Valid Limiters:", VALID_LIMITERS); 26 | console.log( 27 | "Supported Models sample:", 28 | Array.from(SUPPORTED_MODELS).slice(0, 3) 29 | ); 30 | console.log( 31 | "Default Ignore Patterns (first 5 lines):", 32 | DEFAULT_IGNORE_PATTERNS.split("\n").slice(0, 5).join("\n") 33 | ); 34 | console.log("✅ Constants loaded successfully\n"); 35 | 36 | // Test 2: findProjectRoot 37 | console.log("2️⃣ Testing findProjectRoot:"); 38 | const testDir = process.cwd(); 39 | const projectRoot = findProjectRoot(testDir); 40 | console.log("Current dir:", testDir); 41 | console.log("Project root:", projectRoot); 42 | console.log("✅ findProjectRoot works\n"); 43 | 44 | // Test 3: Token counting 45 | console.log("3️⃣ Testing countTokens:"); 46 | const testText = "Hello world! This is a test of the token counter."; 47 | try { 48 | const simpleTokens = await countTokens(testText, "simple"); 49 | console.log(`Simple encoder: "${testText}" = ${simpleTokens} tokens`); 50 | 51 | const cl100kTokens = await countTokens(testText, "cl100k"); 52 | console.log(`cl100k encoder: "${testText}" = ${cl100kTokens} tokens`); 53 | console.log("✅ Token counting works\n"); 54 | } catch (error) { 55 | console.error("❌ Token counting failed:", error.message); 56 | } 57 | 58 | // Test 4: Collect files 59 | console.log("4️⃣ Testing collectFiles:"); 60 | try { 61 | const ig = ignore().add( 62 | DEFAULT_IGNORE_PATTERNS.split("\n").filter( 63 | (line) => line && !line.startsWith("#") 64 | ) 65 | ); 66 | ig.add("node_modules/"); // Ignore node_modules 67 | 68 | // Collect JS/TS files in current directory 69 | const files = await collectFiles(".", { 70 | ig, 71 | extensionSet: new Set([".js", ".ts"]), 72 | verbose: 0, 73 | }); 74 | 75 | console.log(`Found ${files.length} JS/TS files`); 76 | files.slice(0, 3).forEach((file) => console.log(` - ${file}`)); 77 | console.log("✅ File collection works\n"); 78 | } catch (error) { 79 | console.error("❌ File collection failed:", error); 80 | } 81 | 82 | // Test 5: Prompt template processing 83 | console.log("5️⃣ Testing processPromptTemplate:"); 84 | try { 85 | const template = `Hello {{NAME}}! 86 | Your message: {{MESSAGE}} 87 | Current codebase: 88 | {{CURRENT_CODEBASE}}`; 89 | 90 | const codebase = "Test codebase content here"; 91 | const vars = { 92 | NAME: "Developer", 93 | MESSAGE: "Testing the SDK", 94 | }; 95 | 96 | const processed = await processPromptTemplate(template, codebase, vars); 97 | console.log("Template processed:"); 98 | console.log(processed); 99 | console.log("✅ Template processing works\n"); 100 | } catch (error) { 101 | console.error("❌ Template processing failed:", error); 102 | } 103 | 104 | // Test 6: Generate markdown 105 | console.log("6️⃣ Testing generateMarkdown:"); 106 | try { 107 | const currentFile = new URL(import.meta.url).pathname; // ES module way to get current file 108 | const testFiles = [currentFile]; 109 | const markdown = await generateMarkdown(testFiles, { 110 | maxTokens: 1000, 111 | verbose: 0, 112 | projectTree: 2, 113 | tokenEncoder: "simple", 114 | disableLineNumbers: false, 115 | tokenLimiter: "truncated", 116 | }); 117 | 118 | console.log("Generated markdown preview (first 200 chars):"); 119 | console.log(markdown.substring(0, 200) + "..."); 120 | console.log(`Total length: ${markdown.length} characters`); 121 | console.log("✅ Markdown generation works\n"); 122 | } catch (error) { 123 | console.error("❌ Markdown generation failed:", error.message); 124 | } 125 | 126 | console.log("🎉 SDK testing complete!"); 127 | -------------------------------------------------------------------------------- /pnpm-workspace.yaml: -------------------------------------------------------------------------------- 1 | packages: 2 | - 'packages/*' -------------------------------------------------------------------------------- /public/cover.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/codefetch/3a1d1de72fd11db14bcf709e7eeae729f57854b1/public/cover.jpeg -------------------------------------------------------------------------------- /public/tokenlimiter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/codefetch/3a1d1de72fd11db14bcf709e7eeae729f57854b1/public/tokenlimiter.png -------------------------------------------------------------------------------- /test-url.js: -------------------------------------------------------------------------------- 1 | import { parseURL } from './packages/sdk/dist/index.mjs'; 2 | 3 | try { 4 | const result = parseURL('https://example.com'); 5 | console.log('Result:', result); 6 | } catch (e) { 7 | console.log('Error:', e.message); 8 | } 9 | 10 | try { 11 | const result = parseURL('https://github.com/user/repo'); 12 | console.log('GitHub URL parsed successfully:', result); 13 | } catch (e) { 14 | console.log('Error:', e.message); 15 | } -------------------------------------------------------------------------------- /tsconfig.base.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "ESNext", 5 | "moduleResolution": "node", 6 | "lib": ["ES2022"], 7 | "esModuleInterop": true, 8 | "allowSyntheticDefaultImports": true, 9 | "strict": true, 10 | "skipLibCheck": true, 11 | "forceConsistentCasingInFileNames": true, 12 | "resolveJsonModule": true, 13 | "declaration": true, 14 | "declarationMap": true, 15 | "sourceMap": true, 16 | "noEmit": true 17 | }, 18 | "exclude": ["node_modules", "dist", "build", "coverage"] 19 | } --------------------------------------------------------------------------------