├── .gitignore
├── LICENSE
├── README.md
├── bun.lock
├── demo.png
├── package.json
├── src
├── anthropic-api-types.ts
├── anthropic-proxy.ts
├── claude-config.ts
├── convert-anthropic-messages.ts
├── convert-to-anthropic-stream.ts
├── convert-to-language-model-prompt.ts
├── data-content.ts
├── detect-mimetype.ts
├── invalid-data-content-error.ts
├── json-schema.ts
├── main.ts
└── split-data-url.ts
└── tsconfig.json
/.gitignore:
--------------------------------------------------------------------------------
1 | # dependencies (bun install)
2 | node_modules
3 |
4 | # output
5 | out
6 | dist
7 | *.tgz
8 |
9 | # code coverage
10 | coverage
11 | *.lcov
12 |
13 | # logs
14 | logs
15 | _.log
16 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
17 |
18 | # dotenv environment variable files
19 | .env
20 | .env.development.local
21 | .env.test.local
22 | .env.production.local
23 | .env.local
24 |
25 | # caches
26 | .eslintcache
27 | .cache
28 | *.tsbuildinfo
29 |
30 | # IntelliJ based IDEs
31 | .idea
32 |
33 | # Finder (MacOS) folder config
34 | .DS_Store
35 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License
2 |
3 | Copyright (c) 2025 Coder Technologies Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # anyclaude
2 |
3 | 
4 |
5 | Use Claude Code with OpenAI, Google, xAI, and other providers.
6 |
7 | - Extremely simple setup - just a basic command wrapper
8 | - Uses the AI SDK for simple support of new providers
9 | - Works with Claude Code GitHub Actions
10 |
11 |
12 |
13 | ## Get Started
14 |
15 | ```sh
16 | # Use your favorite package manager (bun, pnpm, and npm are supported)
17 | $ pnpm install -g anyclaude
18 |
19 | # anyclaude is a wrapper for the Claude CLI
20 | # `openai/`, `google/`, `xai/`, and `anthropic/` are supported
21 | $ anyclaude --model openai/o3
22 | ```
23 |
24 | Switch models in the Claude UI with `/model openai/o3`.
25 |
26 | ## FAQ
27 |
28 | ### What providers are supported?
29 |
30 | See [the providers](./src/main.ts#L17) for the implementation.
31 |
32 | - `GOOGLE_API_KEY` supports `google/*` models.
33 | - `OPENAI_API_KEY` supports `openai/*` models.
34 | - `XAI_API_KEY` supports `xai/*` models.
35 |
36 | Set a custom OpenAI endpoint with `OPENAI_API_URL` to use OpenRouter
37 |
38 | ### How does this work?
39 |
40 | Claude Code has added support for customizing the Anthropic endpoint with `ANTHROPIC_BASE_URL`.
41 |
42 | anyclaude spawns a simple HTTP server that translates between Anthropic's format and the [AI SDK](https://github.com/vercel/ai) format, enabling support for any [AI SDK](https://github.com/vercel/ai) provider (e.g., Google, OpenAI, etc.)
43 |
44 | ## Do other models work better in Claude Code?
45 |
46 | Not really, but it's fun to experiment with them.
47 |
48 | `ANTHROPIC_MODEL` and `ANTHROPIC_SMALL_MODEL` are supported with the `/` syntax.
49 |
--------------------------------------------------------------------------------
/bun.lock:
--------------------------------------------------------------------------------
1 | {
2 | "lockfileVersion": 1,
3 | "workspaces": {
4 | "": {
5 | "name": "openclaude",
6 | "devDependencies": {
7 | "@ai-sdk/anthropic": "^1.2.12",
8 | "@ai-sdk/azure": "^1.3.23",
9 | "@ai-sdk/google": "^1.2.18",
10 | "@ai-sdk/openai": "^1.3.22",
11 | "@ai-sdk/xai": "^1.2.16",
12 | "@types/bun": "latest",
13 | "@types/json-schema": "^7.0.15",
14 | "ai": "^4.3.16",
15 | "json-schema": "^0.4.0",
16 | },
17 | "peerDependencies": {
18 | "typescript": "^5",
19 | },
20 | },
21 | },
22 | "packages": {
23 | "@ai-sdk/anthropic": ["@ai-sdk/anthropic@1.2.12", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ=="],
24 |
25 | "@ai-sdk/azure": ["@ai-sdk/azure@1.3.23", "", { "dependencies": { "@ai-sdk/openai": "1.3.22", "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-vpsaPtU24RBVk/IMM5UylR/N4RtAuL2NZLWc7LJ3tvMTHu6pI46a7w+1qIwR3F6yO9ehWR8qvfLaBefJNFxaVw=="],
26 |
27 | "@ai-sdk/google": ["@ai-sdk/google@1.2.18", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-8B70+i+uB12Ae6Sn6B9Oc6W0W/XorGgc88Nx0pyUrcxFOdytHBaAVhTPqYsO3LLClfjYN8pQ9GMxd5cpGEnUcA=="],
28 |
29 | "@ai-sdk/openai": ["@ai-sdk/openai@1.3.22", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-QwA+2EkG0QyjVR+7h6FE7iOu2ivNqAVMm9UJZkVxxTk5OIq5fFJDTEI/zICEMuHImTTXR2JjsL6EirJ28Jc4cw=="],
30 |
31 | "@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@0.2.14", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-icjObfMCHKSIbywijaoLdZ1nSnuRnWgMEMLgwoxPJgxsUHMx0aVORnsLUid4SPtdhHI3X2masrt6iaEQLvOSFw=="],
32 |
33 | "@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
34 |
35 | "@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
36 |
37 | "@ai-sdk/react": ["@ai-sdk/react@1.2.12", "", { "dependencies": { "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/ui-utils": "1.2.11", "swr": "^2.2.5", "throttleit": "2.1.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["zod"] }, "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g=="],
38 |
39 | "@ai-sdk/ui-utils": ["@ai-sdk/ui-utils@1.2.11", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w=="],
40 |
41 | "@ai-sdk/xai": ["@ai-sdk/xai@1.2.16", "", { "dependencies": { "@ai-sdk/openai-compatible": "0.2.14", "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-UOZT8td9PWwMi2dF9a0U44t/Oltmf6QmIJdSvrOcLG4mvpRc1UJn6YJaR0HtXs3YnW6SvY1zRdIDrW4GFpv4NA=="],
42 |
43 | "@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
44 |
45 | "@types/bun": ["@types/bun@1.2.14", "", { "dependencies": { "bun-types": "1.2.14" } }, "sha512-VsFZKs8oKHzI7zwvECiAJ5oSorWndIWEVhfbYqZd4HI/45kzW7PN2Rr5biAzvGvRuNmYLSANY+H59ubHq8xw7Q=="],
46 |
47 | "@types/diff-match-patch": ["@types/diff-match-patch@1.0.36", "", {}, "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg=="],
48 |
49 | "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
50 |
51 | "@types/node": ["@types/node@22.15.21", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-EV/37Td6c+MgKAbkcLG6vqZ2zEYHD7bvSrzqqs2RIhbA6w3x+Dqz8MZM3sP6kGTeLrdoOgKZe+Xja7tUB2DNkQ=="],
52 |
53 | "ai": ["ai@4.3.16", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/react": "1.2.12", "@ai-sdk/ui-utils": "1.2.11", "@opentelemetry/api": "1.9.0", "jsondiffpatch": "0.6.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["react"] }, "sha512-KUDwlThJ5tr2Vw0A1ZkbDKNME3wzWhuVfAOwIvFUzl1TPVDFAXDFTXio3p+jaKneB+dKNCvFFlolYmmgHttG1g=="],
54 |
55 | "bun-types": ["bun-types@1.2.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-Kuh4Ub28ucMRWeiUUWMHsT9Wcbr4H3kLIO72RZZElSDxSu7vpetRvxIUDUaW6QtaIeixIpm7OXtNnZPf82EzwA=="],
56 |
57 | "chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="],
58 |
59 | "dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
60 |
61 | "diff-match-patch": ["diff-match-patch@1.0.5", "", {}, "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="],
62 |
63 | "json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="],
64 |
65 | "jsondiffpatch": ["jsondiffpatch@0.6.0", "", { "dependencies": { "@types/diff-match-patch": "^1.0.36", "chalk": "^5.3.0", "diff-match-patch": "^1.0.5" }, "bin": { "jsondiffpatch": "bin/jsondiffpatch.js" } }, "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ=="],
66 |
67 | "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
68 |
69 | "react": ["react@19.1.0", "", {}, "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg=="],
70 |
71 | "secure-json-parse": ["secure-json-parse@2.7.0", "", {}, "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw=="],
72 |
73 | "swr": ["swr@2.3.3", "", { "dependencies": { "dequal": "^2.0.3", "use-sync-external-store": "^1.4.0" }, "peerDependencies": { "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A=="],
74 |
75 | "throttleit": ["throttleit@2.1.0", "", {}, "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw=="],
76 |
77 | "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
78 |
79 | "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
80 |
81 | "use-sync-external-store": ["use-sync-external-store@1.5.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A=="],
82 |
83 | "zod": ["zod@3.25.30", "", {}, "sha512-VolhdEtu6TJr/fzGuHA/SZ5ixvXqA6ADOG9VRcQ3rdOKmF5hkmcJbyaQjUH5BgmpA9gej++zYRX7zjSmdReIwA=="],
84 |
85 | "zod-to-json-schema": ["zod-to-json-schema@3.24.5", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g=="],
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/demo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/coder/anyclaude/8639be076619d77a0751223949dd2eebb5fdc774/demo.png
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "anyclaude",
3 | "version": "1.0.5",
4 | "author": {
5 | "name": "coder",
6 | "email": "support@coder.com",
7 | "url": "https://coder.com"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "https://github.com/coder/anyclaude"
12 | },
13 | "bin": {
14 | "anyclaude": "./dist/main.js"
15 | },
16 | "devDependencies": {
17 | "@ai-sdk/anthropic": "^1.2.12",
18 | "@ai-sdk/azure": "^1.3.23",
19 | "@ai-sdk/google": "^1.2.18",
20 | "@ai-sdk/openai": "^1.3.22",
21 | "@ai-sdk/xai": "^1.2.16",
22 | "@types/bun": "latest",
23 | "@types/json-schema": "^7.0.15",
24 | "ai": "^4.3.16",
25 | "json-schema": "^0.4.0"
26 | },
27 | "peerDependencies": {
28 | "typescript": "^5"
29 | },
30 | "description": "Run Claude Code with OpenAI, Google, xAI, and others.",
31 | "license": "MIT",
32 | "scripts": {
33 | "build": "bun build --target node --outfile dist/main.js ./src/main.ts --format cjs && sed -i '0,/^/s//#!\\/usr\\/bin\\/env node\\n/' ./dist/main.js"
34 | }
35 | }
--------------------------------------------------------------------------------
/src/anthropic-api-types.ts:
--------------------------------------------------------------------------------
1 | import type { JSONSchema7 } from "@ai-sdk/provider";
2 | import type { FinishReason } from "ai";
3 |
4 | export type AnthropicMessagesPrompt = {
5 | system: Array | undefined;
6 | messages: AnthropicMessage[];
7 | };
8 |
9 | export type AnthropicMessage = AnthropicUserMessage | AnthropicAssistantMessage;
10 |
11 | export type AnthropicCacheControl = { type: "ephemeral" };
12 |
13 | export interface AnthropicUserMessage {
14 | role: "user";
15 | content: Array<
16 | | AnthropicTextContent
17 | | AnthropicImageContent
18 | | AnthropicDocumentContent
19 | | AnthropicToolResultContent
20 | >;
21 | }
22 |
23 | export interface AnthropicAssistantMessage {
24 | role: "assistant";
25 | content: Array<
26 | | AnthropicTextContent
27 | | AnthropicThinkingContent
28 | | AnthropicRedactedThinkingContent
29 | | AnthropicToolCallContent
30 | >;
31 | }
32 |
33 | export interface AnthropicTextContent {
34 | type: "text";
35 | text: string;
36 | cache_control: AnthropicCacheControl | undefined;
37 | }
38 |
39 | export interface AnthropicThinkingContent {
40 | type: "thinking";
41 | thinking: string;
42 | signature: string;
43 | cache_control: AnthropicCacheControl | undefined;
44 | }
45 |
46 | export interface AnthropicRedactedThinkingContent {
47 | type: "redacted_thinking";
48 | data: string;
49 | cache_control: AnthropicCacheControl | undefined;
50 | }
51 |
52 | type AnthropicContentSource =
53 | | {
54 | type: "base64";
55 | media_type: string;
56 | data: string;
57 | }
58 | | {
59 | type: "url";
60 | url: string;
61 | };
62 |
63 | export interface AnthropicImageContent {
64 | type: "image";
65 | source: AnthropicContentSource;
66 | cache_control: AnthropicCacheControl | undefined;
67 | }
68 |
69 | export interface AnthropicDocumentContent {
70 | type: "document";
71 | source: AnthropicContentSource;
72 | cache_control: AnthropicCacheControl | undefined;
73 | }
74 |
75 | export interface AnthropicToolCallContent {
76 | type: "tool_use";
77 | id: string;
78 | name: string;
79 | input: unknown;
80 | cache_control: AnthropicCacheControl | undefined;
81 | }
82 |
83 | export interface AnthropicToolResultContent {
84 | type: "tool_result";
85 | tool_use_id: string;
86 | content: string | Array;
87 | is_error: boolean | undefined;
88 | cache_control: AnthropicCacheControl | undefined;
89 | }
90 |
91 | export type AnthropicTool =
92 | | {
93 | name: string;
94 | description: string | undefined;
95 | input_schema: JSONSchema7;
96 | }
97 | | {
98 | name: string;
99 | type: "computer_20250124" | "computer_20241022";
100 | display_width_px: number;
101 | display_height_px: number;
102 | display_number: number;
103 | }
104 | | {
105 | name: string;
106 | type: "text_editor_20250124" | "text_editor_20241022";
107 | }
108 | | {
109 | name: string;
110 | type: "bash_20250124" | "bash_20241022";
111 | };
112 |
113 | export type AnthropicToolChoice =
114 | | { type: "auto" | "any" }
115 | | { type: "tool"; name: string };
116 |
117 | export type AnthropicStreamUsage = {
118 | input_tokens: number;
119 | output_tokens: number;
120 | };
121 |
122 | export type AnthropicStreamChunk =
123 | | {
124 | type: "message_start";
125 | message: AnthropicAssistantMessage & {
126 | id: string;
127 | model: string;
128 | stop_reason: string | null;
129 | stop_sequence: string | null;
130 | usage: AnthropicStreamUsage;
131 | };
132 | }
133 | | {
134 | type: "content_block_start";
135 | index: number;
136 | content_block:
137 | | {
138 | type: "text";
139 | text: string;
140 | }
141 | | {
142 | type: "tool_use";
143 | id: string;
144 | name: string;
145 | input: any;
146 | };
147 | }
148 | | {
149 | type: "content_block_delta";
150 | index: number;
151 | delta:
152 | | {
153 | type: "text_delta";
154 | text: string;
155 | }
156 | | {
157 | type: "input_json_delta";
158 | partial_json: string;
159 | };
160 | }
161 | | {
162 | type: "content_block_stop";
163 | index: number;
164 | }
165 | | {
166 | type: "message_delta";
167 | delta: {
168 | stop_reason: string;
169 | stop_sequence: string | null;
170 | };
171 | usage: AnthropicStreamUsage;
172 | }
173 | | {
174 | type: "message_stop";
175 | }
176 | | {
177 | type: "error";
178 | error: {
179 | type: "api_error";
180 | message: string;
181 | };
182 | };
183 |
184 | export type AnthropicMessagesRequest = {
185 | model: string;
186 | max_tokens: number;
187 | messages: AnthropicMessage[];
188 | temperature: number;
189 | metadata: {
190 | user_id: string;
191 | };
192 | system?: Array;
193 | tools?: Array<{
194 | name: string;
195 | description: string | undefined;
196 | input_schema: JSONSchema7;
197 | }>;
198 | stream: boolean;
199 | };
200 |
201 | export function mapAnthropicStopReason(finishReason: FinishReason): string {
202 | switch (finishReason) {
203 | case "stop":
204 | return "end_turn";
205 | case "tool-calls":
206 | return "tool_use";
207 | case "length":
208 | return "max_tokens";
209 | default:
210 | return "unknown";
211 | }
212 | }
213 |
--------------------------------------------------------------------------------
/src/anthropic-proxy.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderV1 } from "@ai-sdk/provider";
2 | import { jsonSchema, streamText, type Tool } from "ai";
3 | import * as http from "http";
4 | import * as https from "https";
5 | import type { AnthropicMessagesRequest } from "./anthropic-api-types";
6 | import { mapAnthropicStopReason } from "./anthropic-api-types";
7 | import {
8 | convertFromAnthropicMessages,
9 | convertToAnthropicMessagesPrompt,
10 | } from "./convert-anthropic-messages";
11 | import { convertToAnthropicStream } from "./convert-to-anthropic-stream";
12 | import { convertToLanguageModelMessage } from "./convert-to-language-model-prompt";
13 | import { providerizeSchema } from "./json-schema";
14 |
15 | export type CreateAnthropicProxyOptions = {
16 | providers: Record;
17 | port?: number;
18 | };
19 |
20 | // createAnthropicProxy creates a proxy server that accepts
21 | // Anthropic Message API requests and proxies them through
22 | // the appropriate provider - converting the results back
23 | // to the Anthropic Message API format.
24 | export const createAnthropicProxy = ({
25 | port,
26 | providers,
27 | }: CreateAnthropicProxyOptions): string => {
28 | const proxy = http
29 | .createServer((req, res) => {
30 | if (!req.url) {
31 | res.writeHead(400, {
32 | "Content-Type": "application/json",
33 | });
34 | res.end(
35 | JSON.stringify({
36 | error: "No URL provided",
37 | })
38 | );
39 | return;
40 | }
41 |
42 | const proxyToAnthropic = (body?: AnthropicMessagesRequest) => {
43 | delete req.headers["host"];
44 |
45 | const proxy = https.request(
46 | {
47 | host: "api.anthropic.com",
48 | path: req.url,
49 | method: req.method,
50 | headers: req.headers,
51 | },
52 | (proxiedRes) => {
53 | res.writeHead(proxiedRes.statusCode ?? 500, proxiedRes.headers);
54 | proxiedRes.pipe(res, {
55 | end: true,
56 | });
57 | }
58 | );
59 | if (body) {
60 | proxy.end(JSON.stringify(body));
61 | } else {
62 | req.pipe(proxy, {
63 | end: true,
64 | });
65 | }
66 | };
67 |
68 | if (!req.url.startsWith("/v1/messages")) {
69 | proxyToAnthropic();
70 | return;
71 | }
72 |
73 | (async () => {
74 | const body = await new Promise(
75 | (resolve, reject) => {
76 | let body = "";
77 | req.on("data", (chunk) => {
78 | body += chunk;
79 | });
80 | req.on("end", () => {
81 | resolve(JSON.parse(body));
82 | });
83 | req.on("error", (err) => {
84 | reject(err);
85 | });
86 | }
87 | );
88 |
89 | const modelParts = body.model.split("/");
90 |
91 | let providerName: string;
92 | let model: string;
93 | if (modelParts.length === 1) {
94 | // If the user has the Anthropic provider configured,
95 | // proxy all requests through there instead.
96 | if (providers.anthropic) {
97 | providerName = "anthropic";
98 | model = modelParts[0]!;
99 | } else {
100 | // If they don't have it configured, just use
101 | // the normal Anthropic API.
102 | proxyToAnthropic(body);
103 | }
104 | return;
105 | } else {
106 | providerName = modelParts[0]!;
107 | model = modelParts[1]!;
108 | }
109 |
110 | const provider = providers[providerName];
111 | if (!provider) {
112 | throw new Error(`Unknown provider: ${providerName}`);
113 | }
114 |
115 | const coreMessages = convertFromAnthropicMessages(body.messages);
116 | let system: string | undefined;
117 | if (body.system && body.system.length > 0) {
118 | system = body.system.map((s) => s.text).join("\n");
119 | }
120 |
121 | const tools = body.tools?.reduce((acc, tool) => {
122 | acc[tool.name] = {
123 | description: tool.name,
124 | parameters: jsonSchema(
125 | providerizeSchema(providerName, tool.input_schema)
126 | ),
127 | };
128 | return acc;
129 | }, {} as Record);
130 |
131 | const stream = streamText({
132 | model: provider.languageModel(model),
133 | system,
134 | tools,
135 | messages: coreMessages,
136 | maxTokens: body.max_tokens,
137 | temperature: body.temperature,
138 |
139 | onFinish: ({ response, usage, finishReason }) => {
140 | // If the body is already being streamed,
141 | // we don't need to do any conversion here.
142 | if (body.stream) {
143 | return;
144 | }
145 |
146 | // There should only be one message.
147 | const message = response.messages[0];
148 | if (!message) {
149 | throw new Error("No message found");
150 | }
151 |
152 | const prompt = convertToAnthropicMessagesPrompt({
153 | prompt: [convertToLanguageModelMessage(message, {})],
154 | sendReasoning: true,
155 | warnings: [],
156 | });
157 | const promptMessage = prompt.prompt.messages[0];
158 | if (!promptMessage) {
159 | throw new Error("No prompt message found");
160 | }
161 |
162 | res.writeHead(200, { "Content-Type": "application/json" }).end(
163 | JSON.stringify({
164 | id: message.id,
165 | type: "message",
166 | role: promptMessage.role,
167 | content: promptMessage.content,
168 | model: body.model,
169 | stop_reason: mapAnthropicStopReason(finishReason),
170 | stop_sequence: null,
171 | usage: {
172 | input_tokens: usage.promptTokens,
173 | output_tokens: usage.completionTokens,
174 | },
175 | })
176 | );
177 | },
178 | onError: ({ error }) => {
179 | res
180 | .writeHead(400, {
181 | "Content-Type": "application/json",
182 | })
183 | .end(
184 | JSON.stringify({
185 | type: "error",
186 | error: error instanceof Error ? error.message : error,
187 | })
188 | );
189 | },
190 | });
191 |
192 | if (!body.stream) {
193 | await stream.consumeStream();
194 | return;
195 | }
196 |
197 | res.on("error", () => {
198 | // In NodeJS, this needs to be handled.
199 | // We already send the error to the client.
200 | });
201 |
202 | await convertToAnthropicStream(stream.fullStream).pipeTo(
203 | new WritableStream({
204 | write(chunk) {
205 | res.write(
206 | `event: ${chunk.type}\ndata: ${JSON.stringify(chunk)}\n\n`
207 | );
208 | },
209 | close() {
210 | res.end();
211 | },
212 | })
213 | );
214 | })().catch((err) => {
215 | res.writeHead(500, {
216 | "Content-Type": "application/json",
217 | });
218 | res.end(
219 | JSON.stringify({
220 | error: "Internal server error: " + err.message,
221 | })
222 | );
223 | });
224 | })
225 | .listen(port ?? 0);
226 |
227 | const address = proxy.address();
228 | if (!address) {
229 | throw new Error("Failed to get proxy address");
230 | }
231 | if (typeof address === "string") {
232 | return address;
233 | }
234 | return `http://localhost:${address.port}`;
235 | };
236 |
--------------------------------------------------------------------------------
/src/claude-config.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from "fs";
2 | import { homedir } from "os";
3 | import path from "path";
4 |
5 | export const readClaudeCodeAPIKey = (): string => {
6 | const data = readFileSync(path.join(homedir(), ".claude.json"), "utf8");
7 | const config = JSON.parse(data);
8 | return config.primaryApiKey;
9 | };
10 |
--------------------------------------------------------------------------------
/src/convert-anthropic-messages.ts:
--------------------------------------------------------------------------------
1 | import {
2 | type LanguageModelV1CallWarning,
3 | type LanguageModelV1Message,
4 | type LanguageModelV1Prompt,
5 | type LanguageModelV1ProviderMetadata,
6 | UnsupportedFunctionalityError,
7 | } from "@ai-sdk/provider";
8 | import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
9 | import type {
10 | AnthropicAssistantMessage,
11 | AnthropicCacheControl,
12 | AnthropicMessage,
13 | AnthropicMessagesPrompt,
14 | AnthropicUserMessage,
15 | } from "./anthropic-api-types";
16 | import type { CoreMessage, FilePart, TextPart, ToolCallPart } from "ai";
17 | import type { ReasoningUIPart } from "@ai-sdk/ui-utils";
18 |
19 | export function convertToAnthropicMessagesPrompt({
20 | prompt,
21 | sendReasoning,
22 | warnings,
23 | }: {
24 | prompt: LanguageModelV1Prompt;
25 | sendReasoning: boolean;
26 | warnings: LanguageModelV1CallWarning[];
27 | }): {
28 | prompt: AnthropicMessagesPrompt;
29 | betas: Set;
30 | } {
31 | const betas = new Set();
32 | const blocks = groupIntoBlocks(prompt);
33 |
34 | let system: AnthropicMessagesPrompt["system"] = undefined;
35 | const messages: AnthropicMessagesPrompt["messages"] = [];
36 |
37 | function getCacheControl(
38 | providerMetadata: LanguageModelV1ProviderMetadata | undefined
39 | ): AnthropicCacheControl | undefined {
40 | const anthropic = providerMetadata?.anthropic;
41 |
42 | // allow both cacheControl and cache_control:
43 | const cacheControlValue =
44 | anthropic?.cacheControl ?? anthropic?.cache_control;
45 |
46 | // Pass through value assuming it is of the correct type.
47 | // The Anthropic API will validate the value.
48 | return cacheControlValue as AnthropicCacheControl | undefined;
49 | }
50 |
51 | for (let i = 0; i < blocks.length; i++) {
52 | const block = blocks[i]!;
53 | const isLastBlock = i === blocks.length - 1;
54 | const type = block.type;
55 |
56 | switch (type) {
57 | case "system": {
58 | if (system != null) {
59 | throw new UnsupportedFunctionalityError({
60 | functionality:
61 | "Multiple system messages that are separated by user/assistant messages",
62 | });
63 | }
64 |
65 | system = block.messages.map(({ content, providerMetadata }) => ({
66 | type: "text",
67 | text: content,
68 | cache_control: getCacheControl(providerMetadata),
69 | }));
70 |
71 | break;
72 | }
73 |
74 | case "user": {
75 | // combines all user and tool messages in this block into a single message:
76 | const anthropicContent: AnthropicUserMessage["content"] = [];
77 |
78 | for (const message of block.messages) {
79 | const { role, content } = message;
80 | switch (role) {
81 | case "user": {
82 | for (let j = 0; j < content.length; j++) {
83 | const part = content[j]!;
84 |
85 | // cache control: first add cache control from part.
86 | // for the last part of a message,
87 | // check also if the message has cache control.
88 | const isLastPart = j === content.length - 1;
89 |
90 | const cacheControl =
91 | getCacheControl(part.providerMetadata) ??
92 | (isLastPart
93 | ? getCacheControl(message.providerMetadata)
94 | : undefined);
95 |
96 | switch (part.type) {
97 | case "text": {
98 | anthropicContent.push({
99 | type: "text",
100 | text: part.text,
101 | cache_control: cacheControl,
102 | });
103 | break;
104 | }
105 |
106 | case "image": {
107 | anthropicContent.push({
108 | type: "image",
109 | source:
110 | part.image instanceof URL
111 | ? {
112 | type: "url",
113 | url: part.image.toString(),
114 | }
115 | : {
116 | type: "base64",
117 | media_type: part.mimeType ?? "image/jpeg",
118 | data: convertUint8ArrayToBase64(part.image),
119 | },
120 | cache_control: cacheControl,
121 | });
122 |
123 | break;
124 | }
125 |
126 | case "file": {
127 | if (part.mimeType !== "application/pdf") {
128 | throw new UnsupportedFunctionalityError({
129 | functionality: "Non-PDF files in user messages",
130 | });
131 | }
132 |
133 | betas.add("pdfs-2024-09-25");
134 |
135 | anthropicContent.push({
136 | type: "document",
137 | source:
138 | part.data instanceof URL
139 | ? {
140 | type: "url",
141 | url: part.data.toString(),
142 | }
143 | : {
144 | type: "base64",
145 | media_type: "application/pdf",
146 | data: part.data,
147 | },
148 | cache_control: cacheControl,
149 | });
150 |
151 | break;
152 | }
153 | }
154 | }
155 |
156 | break;
157 | }
158 | case "tool": {
159 | for (let i = 0; i < content.length; i++) {
160 | const part = content[i]!;
161 |
162 | // cache control: first add cache control from part.
163 | // for the last part of a message,
164 | // check also if the message has cache control.
165 | const isLastPart = i === content.length - 1;
166 |
167 | const cacheControl =
168 | getCacheControl(part.providerMetadata) ??
169 | (isLastPart
170 | ? getCacheControl(message.providerMetadata)
171 | : undefined);
172 |
173 | const toolResultContent =
174 | part.content != null
175 | ? part.content.map((part) => {
176 | switch (part.type) {
177 | case "text":
178 | return {
179 | type: "text" as const,
180 | text: part.text,
181 | cache_control: undefined,
182 | };
183 | case "image":
184 | return {
185 | type: "image" as const,
186 | source: {
187 | type: "base64" as const,
188 | media_type: part.mimeType ?? "image/jpeg",
189 | data: part.data,
190 | },
191 | cache_control: undefined,
192 | };
193 | }
194 | })
195 | : JSON.stringify(part.result);
196 |
197 | anthropicContent.push({
198 | type: "tool_result",
199 | tool_use_id: part.toolCallId,
200 | content: toolResultContent,
201 | is_error: part.isError,
202 | cache_control: cacheControl,
203 | });
204 | }
205 |
206 | break;
207 | }
208 | default: {
209 | const _exhaustiveCheck: never = role;
210 | throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
211 | }
212 | }
213 | }
214 |
215 | messages.push({ role: "user", content: anthropicContent });
216 |
217 | break;
218 | }
219 |
220 | case "assistant": {
221 | // combines multiple assistant messages in this block into a single message:
222 | const anthropicContent: AnthropicAssistantMessage["content"] = [];
223 |
224 | for (let j = 0; j < block.messages.length; j++) {
225 | const message = block.messages[j]!;
226 | const isLastMessage = j === block.messages.length - 1;
227 | const { content } = message;
228 |
229 | for (let k = 0; k < content.length; k++) {
230 | const part = content[k]!;
231 | const isLastContentPart = k === content.length - 1;
232 |
233 | // cache control: first add cache control from part.
234 | // for the last part of a message,
235 | // check also if the message has cache control.
236 | const cacheControl =
237 | getCacheControl(part.providerMetadata) ??
238 | (isLastContentPart
239 | ? getCacheControl(message.providerMetadata)
240 | : undefined);
241 |
242 | switch (part.type) {
243 | case "text": {
244 | anthropicContent.push({
245 | type: "text",
246 | text:
247 | // trim the last text part if it's the last message in the block
248 | // because Anthropic does not allow trailing whitespace
249 | // in pre-filled assistant responses
250 | isLastBlock && isLastMessage && isLastContentPart
251 | ? part.text.trim()
252 | : part.text,
253 |
254 | cache_control: cacheControl,
255 | });
256 | break;
257 | }
258 |
259 | case "reasoning": {
260 | if (sendReasoning) {
261 | anthropicContent.push({
262 | type: "thinking",
263 | thinking: part.text,
264 | signature: part.signature!,
265 | cache_control: cacheControl,
266 | });
267 | } else {
268 | warnings.push({
269 | type: "other",
270 | message:
271 | "sending reasoning content is disabled for this model",
272 | });
273 | }
274 | break;
275 | }
276 |
277 | case "redacted-reasoning": {
278 | anthropicContent.push({
279 | type: "redacted_thinking",
280 | data: part.data,
281 | cache_control: cacheControl,
282 | });
283 | break;
284 | }
285 |
286 | case "tool-call": {
287 | anthropicContent.push({
288 | type: "tool_use",
289 | id: part.toolCallId,
290 | name: part.toolName,
291 | input: part.args,
292 | cache_control: cacheControl,
293 | });
294 | break;
295 | }
296 | }
297 | }
298 | }
299 |
300 | messages.push({ role: "assistant", content: anthropicContent });
301 |
302 | break;
303 | }
304 |
305 | default: {
306 | const _exhaustiveCheck: never = type;
307 | throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
308 | }
309 | }
310 | }
311 |
312 | return {
313 | prompt: { system, messages },
314 | betas,
315 | };
316 | }
317 |
318 | type SystemBlock = {
319 | type: "system";
320 | messages: Array;
321 | };
322 | type AssistantBlock = {
323 | type: "assistant";
324 | messages: Array;
325 | };
326 | type UserBlock = {
327 | type: "user";
328 | messages: Array;
329 | };
330 |
331 | function groupIntoBlocks(
332 | prompt: LanguageModelV1Prompt
333 | ): Array {
334 | const blocks: Array = [];
335 | let currentBlock: SystemBlock | AssistantBlock | UserBlock | undefined =
336 | undefined;
337 |
338 | for (const message of prompt) {
339 | const { role } = message;
340 | switch (role) {
341 | case "system": {
342 | if (currentBlock?.type !== "system") {
343 | currentBlock = { type: "system", messages: [] };
344 | blocks.push(currentBlock);
345 | }
346 |
347 | currentBlock.messages.push(message);
348 | break;
349 | }
350 | case "assistant": {
351 | if (currentBlock?.type !== "assistant") {
352 | currentBlock = { type: "assistant", messages: [] };
353 | blocks.push(currentBlock);
354 | }
355 |
356 | currentBlock.messages.push(message);
357 | break;
358 | }
359 | case "user": {
360 | if (currentBlock?.type !== "user") {
361 | currentBlock = { type: "user", messages: [] };
362 | blocks.push(currentBlock);
363 | }
364 |
365 | currentBlock.messages.push(message);
366 | break;
367 | }
368 | case "tool": {
369 | if (currentBlock?.type !== "user") {
370 | currentBlock = { type: "user", messages: [] };
371 | blocks.push(currentBlock);
372 | }
373 |
374 | currentBlock.messages.push(message);
375 | break;
376 | }
377 | default: {
378 | const _exhaustiveCheck: never = role;
379 | throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
380 | }
381 | }
382 | }
383 |
384 | return blocks;
385 | }
386 |
387 | export function convertFromAnthropicMessages(
388 | messages: ReadonlyArray
389 | ) {
390 | const result: CoreMessage[] = [];
391 | let toolCalls: Record = {};
392 |
393 | for (const message of messages) {
394 | const messageContent: (
395 | | TextPart
396 | | FilePart
397 | | ReasoningUIPart
398 | | ToolCallPart
399 | )[] = [];
400 |
401 | if (typeof message.content !== "string") {
402 | message.content.forEach((content) => {
403 | switch (content.type) {
404 | case "text": {
405 | messageContent.push({
406 | type: "text",
407 | text: content.text,
408 | });
409 | break;
410 | }
411 | case "tool_use": {
412 | messageContent.push({
413 | type: "tool-call",
414 | args: content.input,
415 | toolCallId: content.id,
416 | toolName: content.name,
417 | });
418 | toolCalls[content.id] = {
419 | type: "tool-call",
420 | args: content.input,
421 | toolCallId: content.id,
422 | toolName: content.name,
423 | };
424 | break;
425 | }
426 | case "tool_result": {
427 | const toolCall = toolCalls[content.tool_use_id];
428 | if (!toolCall) {
429 | throw new Error("Tool call not found");
430 | }
431 | result.push({
432 | role: "tool",
433 | content: [
434 | {
435 | result: content.content,
436 | toolCallId: content.tool_use_id,
437 | toolName: toolCall.toolName,
438 | type: "tool-result",
439 | },
440 | ],
441 | });
442 | break;
443 | }
444 | }
445 | });
446 | } else {
447 | messageContent.push({
448 | type: "text",
449 | text: message.content as string,
450 | });
451 | }
452 |
453 | if (messageContent.length > 0) {
454 | result.push({
455 | role: message.role,
456 | content: messageContent,
457 | } as CoreMessage);
458 | }
459 | }
460 | return result;
461 | }
462 |
--------------------------------------------------------------------------------
/src/convert-to-anthropic-stream.ts:
--------------------------------------------------------------------------------
1 | import type { Tool } from "ai";
2 | import type { TextStreamPart } from "ai";
3 | import {
4 | mapAnthropicStopReason,
5 | type AnthropicStreamChunk,
6 | } from "./anthropic-api-types";
7 |
8 | export function convertToAnthropicStream(
9 | stream: ReadableStream>>
10 | ): ReadableStream {
11 | const transform = new TransformStream<
12 | TextStreamPart>,
13 | AnthropicStreamChunk
14 | >({
15 | transform(chunk, controller) {
16 | let index = 0;
17 |
18 | switch (chunk.type) {
19 | case "step-start":
20 | controller.enqueue({
21 | type: "message_start",
22 | message: {
23 | id: chunk.messageId,
24 | role: "assistant",
25 | content: [],
26 | model: "claude-4-sonnet-20250514",
27 | stop_reason: null,
28 | stop_sequence: null,
29 | usage: {
30 | input_tokens: 0,
31 | output_tokens: 0,
32 | },
33 | },
34 | });
35 | break;
36 | case "step-finish":
37 | controller.enqueue({
38 | type: "message_delta",
39 | delta: {
40 | stop_reason: mapAnthropicStopReason(chunk.finishReason),
41 | stop_sequence: null,
42 | },
43 | usage: {
44 | input_tokens: chunk.usage.promptTokens,
45 | output_tokens: chunk.usage.completionTokens,
46 | },
47 | });
48 | index++;
49 | break;
50 | case "finish":
51 | controller.enqueue({
52 | type: "message_stop",
53 | });
54 | break;
55 | case "text-delta":
56 | controller.enqueue({
57 | type: "content_block_delta",
58 | index: index,
59 | delta: {
60 | type: "text_delta",
61 | text: chunk.textDelta,
62 | },
63 | });
64 | break;
65 | case "tool-call-streaming-start":
66 | controller.enqueue({
67 | type: "content_block_start",
68 | index: index,
69 | content_block: {
70 | type: "tool_use",
71 | id: chunk.toolCallId,
72 | name: chunk.toolName,
73 | input: {},
74 | },
75 | });
76 | break;
77 | case "tool-call-delta":
78 | controller.enqueue({
79 | type: "content_block_delta",
80 | index: index,
81 | delta: {
82 | type: "input_json_delta",
83 | partial_json: chunk.argsTextDelta,
84 | },
85 | });
86 | break;
87 | case "tool-call":
88 | controller.enqueue({
89 | type: "content_block_start",
90 | index: index,
91 | content_block: {
92 | type: "tool_use",
93 | id: chunk.toolCallId,
94 | name: chunk.toolName,
95 | input: chunk.args,
96 | },
97 | });
98 | index++;
99 | break;
100 | case "error":
101 | controller.enqueue({
102 | type: "error",
103 | error: {
104 | type: "api_error",
105 | message:
106 | chunk.error instanceof Error
107 | ? chunk.error.message
108 | : chunk.error as string,
109 | },
110 | });
111 | break;
112 | default:
113 | controller.error(new Error(`Unhandled chunk type: ${chunk.type}`));
114 | }
115 | },
116 | });
117 | stream.pipeTo(transform.writable).catch((err) => {
118 | console.log("WE GOT AN ERROR");
119 | });
120 | return transform.readable;
121 | }
122 |
--------------------------------------------------------------------------------
/src/convert-to-language-model-prompt.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | LanguageModelV1FilePart,
3 | LanguageModelV1ImagePart,
4 | LanguageModelV1Message,
5 | LanguageModelV1TextPart,
6 | } from "@ai-sdk/provider";
7 | import {
8 | InvalidMessageRoleError,
9 | type CoreMessage,
10 | type DataContent,
11 | type FilePart,
12 | type ImagePart,
13 | type TextPart,
14 | } from "ai";
15 | import {
16 | convertDataContentToBase64String,
17 | convertDataContentToUint8Array,
18 | } from "./data-content";
19 | import { detectMimeType, imageMimeTypeSignatures } from "./detect-mimetype";
20 | import { splitDataUrl } from "./split-data-url";
21 |
22 | /**
23 | * Convert a CoreMessage to a LanguageModelV1Message.
24 | *
25 | * @param message The CoreMessage to convert.
26 | * @param downloadedAssets A map of URLs to their downloaded data. Only
27 | * available if the model does not support URLs, null otherwise.
28 | */
29 | export function convertToLanguageModelMessage(
30 | message: CoreMessage,
31 | downloadedAssets: Record<
32 | string,
33 | { mimeType: string | undefined; data: Uint8Array }
34 | >
35 | ): LanguageModelV1Message {
36 | const role = message.role;
37 | switch (role) {
38 | case "system": {
39 | return {
40 | role: "system",
41 | content: message.content,
42 | providerMetadata:
43 | message.providerOptions ?? message.experimental_providerMetadata,
44 | };
45 | }
46 |
47 | case "user": {
48 | if (typeof message.content === "string") {
49 | return {
50 | role: "user",
51 | content: [{ type: "text", text: message.content }],
52 | providerMetadata:
53 | message.providerOptions ?? message.experimental_providerMetadata,
54 | };
55 | }
56 |
57 | return {
58 | role: "user",
59 | content: message.content
60 | .map((part) => convertPartToLanguageModelPart(part, downloadedAssets))
61 | // remove empty text parts:
62 | .filter((part) => part.type !== "text" || part.text !== ""),
63 | providerMetadata:
64 | message.providerOptions ?? message.experimental_providerMetadata,
65 | };
66 | }
67 |
68 | case "assistant": {
69 | if (typeof message.content === "string") {
70 | return {
71 | role: "assistant",
72 | content: [{ type: "text", text: message.content }],
73 | providerMetadata:
74 | message.providerOptions ?? message.experimental_providerMetadata,
75 | };
76 | }
77 |
78 | return {
79 | role: "assistant",
80 | content: message.content
81 | .filter(
82 | // remove empty text parts:
83 | (part) => part.type !== "text" || part.text !== ""
84 | )
85 | .map((part) => {
86 | const providerOptions =
87 | part.providerOptions ?? part.experimental_providerMetadata;
88 |
89 | switch (part.type) {
90 | case "file": {
91 | return {
92 | type: "file",
93 | data:
94 | part.data instanceof URL
95 | ? part.data
96 | : convertDataContentToBase64String(part.data),
97 | filename: part.filename,
98 | mimeType: part.mimeType,
99 | providerMetadata: providerOptions,
100 | };
101 | }
102 | case "reasoning": {
103 | return {
104 | type: "reasoning",
105 | text: part.text,
106 | signature: part.signature,
107 | providerMetadata: providerOptions,
108 | };
109 | }
110 | case "redacted-reasoning": {
111 | return {
112 | type: "redacted-reasoning",
113 | data: part.data,
114 | providerMetadata: providerOptions,
115 | };
116 | }
117 | case "text": {
118 | return {
119 | type: "text" as const,
120 | text: part.text,
121 | providerMetadata: providerOptions,
122 | };
123 | }
124 | case "tool-call": {
125 | return {
126 | type: "tool-call" as const,
127 | toolCallId: part.toolCallId,
128 | toolName: part.toolName,
129 | args: part.args,
130 | providerMetadata: providerOptions,
131 | };
132 | }
133 | }
134 | }),
135 | providerMetadata:
136 | message.providerOptions ?? message.experimental_providerMetadata,
137 | };
138 | }
139 |
140 | case "tool": {
141 | return {
142 | role: "tool",
143 | content: message.content.map((part) => ({
144 | type: "tool-result",
145 | toolCallId: part.toolCallId,
146 | toolName: part.toolName,
147 | result: part.result,
148 | content: part.experimental_content,
149 | isError: part.isError,
150 | providerMetadata:
151 | part.providerOptions ?? part.experimental_providerMetadata,
152 | })),
153 | providerMetadata:
154 | message.providerOptions ?? message.experimental_providerMetadata,
155 | };
156 | }
157 |
158 | default: {
159 | const _exhaustiveCheck: never = role;
160 | throw new InvalidMessageRoleError({ role: _exhaustiveCheck });
161 | }
162 | }
163 | }
164 |
165 | /**
166 | * Convert part of a message to a LanguageModelV1Part.
167 | * @param part The part to convert.
168 | * @param downloadedAssets A map of URLs to their downloaded data. Only
169 | * available if the model does not support URLs, null otherwise.
170 | *
171 | * @returns The converted part.
172 | */
173 | function convertPartToLanguageModelPart(
174 | part: TextPart | ImagePart | FilePart,
175 | downloadedAssets: Record<
176 | string,
177 | { mimeType: string | undefined; data: Uint8Array }
178 | >
179 | ):
180 | | LanguageModelV1TextPart
181 | | LanguageModelV1ImagePart
182 | | LanguageModelV1FilePart {
183 | if (part.type === "text") {
184 | return {
185 | type: "text",
186 | text: part.text,
187 | providerMetadata:
188 | part.providerOptions ?? part.experimental_providerMetadata,
189 | };
190 | }
191 |
192 | let mimeType: string | undefined = part.mimeType;
193 | let data: DataContent | URL;
194 | let content: URL | ArrayBuffer | string;
195 | let normalizedData: Uint8Array | URL;
196 |
197 | const type = part.type;
198 | switch (type) {
199 | case "image":
200 | data = part.image;
201 | break;
202 | case "file":
203 | data = part.data;
204 | break;
205 | default:
206 | throw new Error(`Unsupported part type: ${type}`);
207 | }
208 |
209 | // Attempt to create a URL from the data. If it fails, we can assume the data
210 | // is not a URL and likely some other sort of data.
211 | try {
212 | content = typeof data === "string" ? new URL(data) : (data as ArrayBuffer);
213 | } catch (error) {
214 | content = data as ArrayBuffer;
215 | }
216 |
217 | // If we successfully created a URL, we can use that to normalize the data
218 | // either by passing it through or converting normalizing the base64 content
219 | // to a Uint8Array.
220 | if (content instanceof URL) {
221 | // If the content is a data URL, we want to convert that to a Uint8Array
222 | if (content.protocol === "data:") {
223 | const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(
224 | content.toString()
225 | );
226 |
227 | if (dataUrlMimeType == null || base64Content == null) {
228 | throw new Error(`Invalid data URL format in part ${type}`);
229 | }
230 |
231 | mimeType = dataUrlMimeType;
232 | normalizedData = convertDataContentToUint8Array(base64Content);
233 | } else {
234 | /**
235 | * If the content is a URL, we should first see if it was downloaded. And if not,
236 | * we can let the model decide if it wants to support the URL. This also allows
237 | * for non-HTTP URLs to be passed through (e.g. gs://).
238 | */
239 | const downloadedFile = downloadedAssets[content.toString()];
240 | if (downloadedFile) {
241 | normalizedData = downloadedFile.data;
242 | mimeType ??= downloadedFile.mimeType;
243 | } else {
244 | normalizedData = content;
245 | }
246 | }
247 | } else {
248 | // Since we know now the content is not a URL, we can attempt to normalize
249 | // the data assuming it is some sort of data.
250 | normalizedData = convertDataContentToUint8Array(content);
251 | }
252 |
253 | // Now that we have the normalized data either as a URL or a Uint8Array,
254 | // we can create the LanguageModelV1Part.
255 | switch (type) {
256 | case "image": {
257 | // When possible, try to detect the mimetype automatically
258 | // to deal with incorrect mimetype inputs.
259 | // When detection fails, use provided mimetype.
260 |
261 | if (normalizedData instanceof Uint8Array) {
262 | mimeType =
263 | detectMimeType({
264 | data: normalizedData,
265 | signatures: imageMimeTypeSignatures,
266 | }) ?? mimeType;
267 | }
268 | return {
269 | type: "image",
270 | image: normalizedData,
271 | mimeType,
272 | providerMetadata:
273 | part.providerOptions ?? part.experimental_providerMetadata,
274 | };
275 | }
276 |
277 | case "file": {
278 | // We should have a mimeType at this point, if not, throw an error.
279 | if (mimeType == null) {
280 | throw new Error(`Mime type is missing for file part`);
281 | }
282 |
283 | return {
284 | type: "file",
285 | data:
286 | normalizedData instanceof Uint8Array
287 | ? convertDataContentToBase64String(normalizedData)
288 | : normalizedData,
289 | filename: part.filename,
290 | mimeType,
291 | providerMetadata:
292 | part.providerOptions ?? part.experimental_providerMetadata,
293 | };
294 | }
295 | }
296 | }
297 |
--------------------------------------------------------------------------------
/src/data-content.ts:
--------------------------------------------------------------------------------
1 | import {
2 | convertBase64ToUint8Array,
3 | convertUint8ArrayToBase64,
4 | } from "@ai-sdk/provider-utils";
5 | import { InvalidDataContentError } from "./invalid-data-content-error";
6 | import { z } from "zod";
7 |
8 | /**
9 | Data content. Can either be a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer.
10 | */
11 | export type DataContent = string | Uint8Array | ArrayBuffer | Buffer;
12 |
13 | /**
14 | @internal
15 | */
16 | export const dataContentSchema: z.ZodType = z.union([
17 | z.string(),
18 | z.instanceof(Uint8Array),
19 | z.instanceof(ArrayBuffer),
20 | z.custom(
21 | // Buffer might not be available in some environments such as CloudFlare:
22 | (value: unknown): value is Buffer =>
23 | globalThis.Buffer?.isBuffer(value) ?? false,
24 | { message: "Must be a Buffer" }
25 | ),
26 | ]);
27 |
28 | /**
29 | Converts data content to a base64-encoded string.
30 |
31 | @param content - Data content to convert.
32 | @returns Base64-encoded string.
33 | */
34 | export function convertDataContentToBase64String(content: DataContent): string {
35 | if (typeof content === "string") {
36 | return content;
37 | }
38 |
39 | if (content instanceof ArrayBuffer) {
40 | return convertUint8ArrayToBase64(new Uint8Array(content));
41 | }
42 |
43 | return convertUint8ArrayToBase64(content);
44 | }
45 |
46 | /**
47 | Converts data content to a Uint8Array.
48 |
49 | @param content - Data content to convert.
50 | @returns Uint8Array.
51 | */
52 | export function convertDataContentToUint8Array(
53 | content: DataContent
54 | ): Uint8Array {
55 | if (content instanceof Uint8Array) {
56 | return content;
57 | }
58 |
59 | if (typeof content === "string") {
60 | try {
61 | return convertBase64ToUint8Array(content);
62 | } catch (error) {
63 | throw new InvalidDataContentError({
64 | message:
65 | "Invalid data content. Content string is not a base64-encoded media.",
66 | content,
67 | cause: error,
68 | });
69 | }
70 | }
71 |
72 | if (content instanceof ArrayBuffer) {
73 | return new Uint8Array(content);
74 | }
75 |
76 | throw new InvalidDataContentError({ content });
77 | }
78 |
79 | /**
80 | * Converts a Uint8Array to a string of text.
81 | *
82 | * @param uint8Array - The Uint8Array to convert.
83 | * @returns The converted string.
84 | */
85 | export function convertUint8ArrayToText(uint8Array: Uint8Array): string {
86 | try {
87 | return new TextDecoder().decode(uint8Array);
88 | } catch (error) {
89 | throw new Error("Error decoding Uint8Array to text");
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/src/detect-mimetype.ts:
--------------------------------------------------------------------------------
1 | import { convertBase64ToUint8Array } from "@ai-sdk/provider-utils";
2 |
3 | export const imageMimeTypeSignatures = [
4 | {
5 | mimeType: "image/gif" as const,
6 | bytesPrefix: [0x47, 0x49, 0x46],
7 | base64Prefix: "R0lG",
8 | },
9 | {
10 | mimeType: "image/png" as const,
11 | bytesPrefix: [0x89, 0x50, 0x4e, 0x47],
12 | base64Prefix: "iVBORw",
13 | },
14 | {
15 | mimeType: "image/jpeg" as const,
16 | bytesPrefix: [0xff, 0xd8],
17 | base64Prefix: "/9j/",
18 | },
19 | {
20 | mimeType: "image/webp" as const,
21 | bytesPrefix: [0x52, 0x49, 0x46, 0x46],
22 | base64Prefix: "UklGRg",
23 | },
24 | {
25 | mimeType: "image/bmp" as const,
26 | bytesPrefix: [0x42, 0x4d],
27 | base64Prefix: "Qk",
28 | },
29 | {
30 | mimeType: "image/tiff" as const,
31 | bytesPrefix: [0x49, 0x49, 0x2a, 0x00],
32 | base64Prefix: "SUkqAA",
33 | },
34 | {
35 | mimeType: "image/tiff" as const,
36 | bytesPrefix: [0x4d, 0x4d, 0x00, 0x2a],
37 | base64Prefix: "TU0AKg",
38 | },
39 | {
40 | mimeType: "image/avif" as const,
41 | bytesPrefix: [
42 | 0x00, 0x00, 0x00, 0x20, 0x66, 0x74, 0x79, 0x70, 0x61, 0x76, 0x69, 0x66,
43 | ],
44 | base64Prefix: "AAAAIGZ0eXBhdmlm",
45 | },
46 | {
47 | mimeType: "image/heic" as const,
48 | bytesPrefix: [
49 | 0x00, 0x00, 0x00, 0x20, 0x66, 0x74, 0x79, 0x70, 0x68, 0x65, 0x69, 0x63,
50 | ],
51 | base64Prefix: "AAAAIGZ0eXBoZWlj",
52 | },
53 | ] as const;
54 |
55 | export const audioMimeTypeSignatures = [
56 | {
57 | mimeType: "audio/mpeg" as const,
58 | bytesPrefix: [0xff, 0xfb],
59 | base64Prefix: "//s=",
60 | },
61 | {
62 | mimeType: "audio/wav" as const,
63 | bytesPrefix: [0x52, 0x49, 0x46, 0x46],
64 | base64Prefix: "UklGR",
65 | },
66 | {
67 | mimeType: "audio/ogg" as const,
68 | bytesPrefix: [0x4f, 0x67, 0x67, 0x53],
69 | base64Prefix: "T2dnUw",
70 | },
71 | {
72 | mimeType: "audio/flac" as const,
73 | bytesPrefix: [0x66, 0x4c, 0x61, 0x43],
74 | base64Prefix: "ZkxhQw",
75 | },
76 | {
77 | mimeType: "audio/aac" as const,
78 | bytesPrefix: [0x40, 0x15, 0x00, 0x00],
79 | base64Prefix: "QBUA",
80 | },
81 | {
82 | mimeType: "audio/mp4" as const,
83 | bytesPrefix: [0x66, 0x74, 0x79, 0x70],
84 | base64Prefix: "ZnR5cA",
85 | },
86 | ] as const;
87 |
88 | const stripID3 = (data: Uint8Array | string) => {
89 | const bytes =
90 | typeof data === "string" ? convertBase64ToUint8Array(data) : data;
91 | const id3Size =
92 | ((bytes[6]! & 0x7f) << 21) |
93 | ((bytes[7]! & 0x7f) << 14) |
94 | ((bytes[8]! & 0x7f) << 7) |
95 | (bytes[9]! & 0x7f);
96 |
97 | // The raw MP3 starts here
98 | return bytes.slice(id3Size + 10);
99 | };
100 |
101 | function stripID3TagsIfPresent(data: Uint8Array | string): Uint8Array | string {
102 | const hasId3 =
103 | (typeof data === "string" && data.startsWith("SUQz")) ||
104 | (typeof data !== "string" &&
105 | data.length > 10 &&
106 | data[0] === 0x49 && // 'I'
107 | data[1] === 0x44 && // 'D'
108 | data[2] === 0x33); // '3'
109 |
110 | return hasId3 ? stripID3(data) : data;
111 | }
112 |
113 | export function detectMimeType({
114 | data,
115 | signatures,
116 | }: {
117 | data: Uint8Array | string;
118 | signatures: typeof audioMimeTypeSignatures | typeof imageMimeTypeSignatures;
119 | }): (typeof signatures)[number]["mimeType"] | undefined {
120 | const processedData = stripID3TagsIfPresent(data);
121 |
122 | for (const signature of signatures) {
123 | if (
124 | typeof processedData === "string"
125 | ? processedData.startsWith(signature.base64Prefix)
126 | : processedData.length >= signature.bytesPrefix.length &&
127 | signature.bytesPrefix.every(
128 | (byte, index) => processedData[index] === byte
129 | )
130 | ) {
131 | return signature.mimeType;
132 | }
133 | }
134 |
135 | return undefined;
136 | }
137 |
--------------------------------------------------------------------------------
/src/invalid-data-content-error.ts:
--------------------------------------------------------------------------------
1 | import { AISDKError } from "@ai-sdk/provider";
2 |
3 | const name = "AI_InvalidDataContentError";
4 | const marker = `vercel.ai.error.${name}`;
5 | const symbol = Symbol.for(marker);
6 |
7 | export class InvalidDataContentError extends AISDKError {
8 | private readonly [symbol] = true; // used in isInstance
9 |
10 | readonly content: unknown;
11 |
12 | constructor({
13 | content,
14 | cause,
15 | message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`,
16 | }: {
17 | content: unknown;
18 | cause?: unknown;
19 | message?: string;
20 | }) {
21 | super({ name, message, cause });
22 |
23 | this.content = content;
24 | }
25 |
26 | static isInstance(error: unknown): error is InvalidDataContentError {
27 | return AISDKError.hasMarker(error, marker);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/json-schema.ts:
--------------------------------------------------------------------------------
1 | import type { JSONSchema7 } from "json-schema";
2 |
3 | export function providerizeSchema(
4 | provider: string,
5 | schema: JSONSchema7
6 | ): JSONSchema7 {
7 | // Handle primitive types or schemas without properties
8 | if (
9 | !schema ||
10 | typeof schema !== "object" ||
11 | schema.type !== "object" ||
12 | !schema.properties
13 | ) {
14 | return schema;
15 | }
16 |
17 | const processedProperties: Record = {};
18 |
19 | // Recursively process each property
20 | for (const [key, property] of Object.entries(schema.properties)) {
21 | if (typeof property === "object" && property !== null) {
22 | let processedProperty = property as JSONSchema7;
23 |
24 | // Remove uri format for OpenAI and Google
25 | if ((provider === "openai" || provider === "google") && processedProperty.format === "uri") {
26 | processedProperty = { ...processedProperty };
27 | delete processedProperty.format;
28 | }
29 |
30 | if (processedProperty.type === "object") {
31 | // Recursively process nested objects
32 | processedProperties[key] = providerizeSchema(
33 | provider,
34 | processedProperty
35 | );
36 | } else if (
37 | processedProperty.type === "array" &&
38 | processedProperty.items
39 | ) {
40 | // Handle arrays with object items
41 | const items = processedProperty.items;
42 | if (
43 | typeof items === "object" &&
44 | !Array.isArray(items) &&
45 | items.type === "object"
46 | ) {
47 | processedProperties[key] = {
48 | ...processedProperty,
49 | items: providerizeSchema(provider, items as JSONSchema7),
50 | };
51 | } else {
52 | processedProperties[key] = processedProperty;
53 | }
54 | } else {
55 | processedProperties[key] = processedProperty;
56 | }
57 | } else {
58 | // Handle boolean properties (true/false schemas)
59 | processedProperties[key] = property as unknown as JSONSchema7;
60 | }
61 | }
62 |
63 | const result: JSONSchema7 = {
64 | ...schema,
65 | properties: processedProperties,
66 | };
67 |
68 | // Only add required properties for OpenAI
69 | if (provider === "openai") {
70 | result.required = Object.keys(schema.properties);
71 | result.additionalProperties = false;
72 | }
73 |
74 | return result;
75 | }
76 |
--------------------------------------------------------------------------------
/src/main.ts:
--------------------------------------------------------------------------------
1 | // This is just intended to execute Claude Code while setting up a proxy for tokens.
2 |
3 | import { createAnthropic } from "@ai-sdk/anthropic";
4 | import { createAzure } from "@ai-sdk/azure";
5 | import { createGoogleGenerativeAI } from "@ai-sdk/google";
6 | import { createOpenAI } from "@ai-sdk/openai";
7 | import { createXai } from "@ai-sdk/xai";
8 | import { spawn } from "child_process";
9 | import {
10 | createAnthropicProxy,
11 | type CreateAnthropicProxyOptions,
12 | } from "./anthropic-proxy";
13 |
14 | // providers are supported providers to proxy requests by name.
15 | // Model names are split when requested by `/`. The provider
16 | // name is the first part, and the rest is the model name.
17 | const providers: CreateAnthropicProxyOptions["providers"] = {
18 | openai: createOpenAI({
19 | apiKey: process.env.OPENAI_API_KEY,
20 | baseURL: process.env.OPENAI_API_URL,
21 | }),
22 | azure: createAzure({
23 | apiKey: process.env.AZURE_API_KEY,
24 | baseURL: process.env.AZURE_API_URL,
25 | }),
26 | google: createGoogleGenerativeAI({
27 | apiKey: process.env.GOOGLE_API_KEY,
28 | baseURL: process.env.GOOGLE_API_URL,
29 | }),
30 | xai: createXai({
31 | apiKey: process.env.XAI_API_KEY,
32 | baseURL: process.env.XAI_API_URL,
33 | }),
34 | };
35 |
36 | // We exclude this by default, because the Claude Code
37 | // API key is not supported by Anthropic endpoints.
38 | if (process.env.ANTHROPIC_API_KEY) {
39 | providers.anthropic = createAnthropic({
40 | apiKey: process.env.ANTHROPIC_API_KEY,
41 | baseURL: process.env.ANTHROPIC_API_URL,
42 | });
43 | }
44 |
45 | const proxyURL = createAnthropicProxy({
46 | providers,
47 | });
48 |
49 | if (process.env.PROXY_ONLY === "true") {
50 | console.log("Proxy only mode: "+proxyURL);
51 | } else {
52 | const claudeArgs = process.argv.slice(2);
53 | const proc = spawn("claude", claudeArgs, {
54 | env: {
55 | ...process.env,
56 | ANTHROPIC_BASE_URL: proxyURL,
57 | },
58 | stdio: "inherit",
59 | });
60 | proc.on("exit", (code) => {
61 | if (claudeArgs[0] === "-h" || claudeArgs[0] === "--help") {
62 | console.log("\nCustom Models:")
63 | console.log(" --model / e.g. openai/o3");
64 | }
65 |
66 | process.exit(code);
67 | });
68 | }
69 |
70 |
--------------------------------------------------------------------------------
/src/split-data-url.ts:
--------------------------------------------------------------------------------
1 | export function splitDataUrl(dataUrl: string): {
2 | mimeType: string | undefined;
3 | base64Content: string | undefined;
4 | } {
5 | try {
6 | const [header, base64Content] = dataUrl.split(",");
7 | return {
8 | mimeType: header?.split(";")[0]?.split(":")[1],
9 | base64Content,
10 | };
11 | } catch (error) {
12 | return {
13 | mimeType: undefined,
14 | base64Content: undefined,
15 | };
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | // Environment setup & latest features
4 | "lib": ["esnext"],
5 | "target": "ESNext",
6 | "module": "ESNext",
7 | "moduleDetection": "force",
8 | "jsx": "react-jsx",
9 | "allowJs": true,
10 |
11 | // Bundler mode
12 | "moduleResolution": "bundler",
13 | "allowImportingTsExtensions": true,
14 | "verbatimModuleSyntax": true,
15 | "noEmit": true,
16 |
17 | // Best practices
18 | "strict": true,
19 | "skipLibCheck": true,
20 | "noFallthroughCasesInSwitch": true,
21 | "noUncheckedIndexedAccess": true,
22 |
23 | // Some stricter flags (disabled by default)
24 | "noUnusedLocals": false,
25 | "noUnusedParameters": false,
26 | "noPropertyAccessFromIndexSignature": false
27 | }
28 | }
29 |
--------------------------------------------------------------------------------