├── .gitignore
├── LICENSE
├── README.md
├── anthropic.js
├── anthropic_test.js
├── gemini.js
├── gemini_test.js
├── index.d.ts
├── index.js
├── package-lock.json
├── package.json
├── samples
├── anthropic-tools.txt
├── anthropic-tools2.txt
├── anthropic.txt
├── errors.txt
├── gemini-tools.txt
├── gemini-tools2.txt
├── gemini.txt
├── openai-tools.txt
├── openai-tools2.txt
├── openai.txt
├── openrouter.txt
└── output.txt
└── test.js
/.gitignore:
--------------------------------------------------------------------------------
1 | dist/
2 | node_modules/
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2024 Anand S
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be
12 | included in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # asyncLLM
2 |
3 | [![npm version](https://img.shields.io/npm/v/asyncllm.svg)](https://www.npmjs.com/package/asyncllm)
4 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
5 |
6 | Fetch LLM responses across multiple providers as an async iterable.
7 |
8 | ## Features
9 |
10 | - 🚀 Lightweight (~2KB) and dependency-free
11 | - 🔄 Works with multiple LLM providers (OpenAI, Anthropic, Gemini, and more)
12 | - 🌐 Browser and Node.js compatible
13 | - 📦 Easy to use with ES modules
14 |
15 | ## Installation
16 |
17 | ```bash
18 | npm install asyncllm
19 | ```
20 |
21 | ## Anthropic and Gemini Adapters
22 |
23 | Adapters convert OpenAI-style request bodies to the [Anthropic](https://docs.anthropic.com/en/api/messages) or [Gemini](https://ai.google.dev/gemini-api/docs/text-generation?lang=rest) formats. For example:
24 |
25 | ```javascript
26 | import { anthropic } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/anthropic.js";
27 | import { gemini } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/gemini.js";
28 |
29 | // Create an OpenAI-style request
30 | const body = {
31 | messages: [{ role: "user", content: "Hello, world!" }],
32 | temperature: 0.5,
33 | };
34 |
35 | // Fetch request with the Anthropic API
36 | const anthropicResponse = await fetch("https://api.anthropic.com/v1/messages", {
37 | method: "POST",
38 | headers: { "Content-Type": "application/json", "x-api-key": "YOUR_API_KEY" },
39 | // anthropic() converts the OpenAI-style request to Anthropic's format
40 | body: JSON.stringify(anthropic({ ...body, model: "claude-3-haiku-20240307" })),
41 | }).then((r) => r.json());
42 |
43 | // Fetch request with the Gemini API
44 | const geminiResponse = await fetch(
45 | "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-8b:generateContent",
46 | {
47 | method: "POST",
48 | headers: { "Content-Type": "application/json", Authorization: `Bearer YOUR_API_KEY` },
49 | // gemini() converts the OpenAI-style request to Gemini's format
50 | body: JSON.stringify(gemini(body)),
51 | },
52 | ).then((r) => r.json());
53 | ```
54 |
55 | Here are the parameters supported by each provider.
56 |
57 | | OpenAI Parameter | Anthropic | Gemini |
58 | | ----------------------------------- | --------- | ------ |
59 | | messages | Y | Y |
60 | | system message | Y | Y |
61 | | temperature | Y | Y |
62 | | max_tokens | Y | Y |
63 | | top_p | Y | Y |
64 | | stop sequences | Y | Y |
65 | | stream | Y | Y |
66 | | presence_penalty | | Y |
67 | | frequency_penalty | | Y |
68 | | logprobs | | Y |
69 | | top_logprobs | | Y |
70 | | n (multiple candidates) | | Y |
71 | | metadata.user_id | Y | |
72 | | tools/functions | Y | Y |
73 | | tool_choice | Y | Y |
74 | | parallel_tool_calls | Y | |
75 | | response_format.type: "json_object" | | Y |
76 | | response_format.type: "json_schema" | | Y |
77 |
78 | Content types:
79 |
80 | | OpenAI | Anthropic | Gemini |
81 | | ------ | --------- | ------ |
82 | | Text | Y | Y |
83 | | Images | Y | Y |
84 | | Audio | | Y |
85 |
86 | Image Sources
87 |
88 | | OpenAI Parameter | Anthropic | Gemini |
89 | | ---------------- | --------- | ------ |
90 | | Data URI | Y | Y |
91 | | External URLs | | Y |
92 |
93 | ## Streaming
94 |
95 | Call `asyncLLM()` just like you would use `fetch` with any LLM provider with streaming responses.
96 |
97 | - [OpenAI Streaming](https://platform.openai.com/docs/api-reference/chat/streaming). Many providers like Azure, Groq, OpenRouter, etc. follow the OpenAI API.
98 | - [Anthropic Streaming](https://docs.anthropic.com/en/api/messages-streaming)
99 | - [Gemini Streaming](https://ai.google.dev/gemini-api/docs/text-generation?lang=rest#generate-a-text-stream)
100 |
101 | The result is an async generator that yields objects with `content`, `tool`, and `args` properties.
102 |
103 | For example, to update the DOM with the LLM's response:
104 |
105 | ```html
106 |
107 |
108 |
109 |
110 |
111 |
112 |
135 |
136 | ```
137 |
138 | ### Node.js or bundled projects
139 |
140 | ```javascript
141 | import { asyncLLM } from "asyncllm";
142 |
143 | // Usage is the same as in the browser example
144 | ```
145 |
146 | ## Examples
147 |
148 | ### OpenAI streaming
149 |
150 | ```javascript
151 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
152 |
153 | const body = {
154 | model: "gpt-4o-mini",
155 | // You MUST enable streaming, else the API will return an {error}
156 | stream: true,
157 | messages: [{ role: "user", content: "Hello, world!" }],
158 | };
159 |
160 | for await (const data of asyncLLM("https://api.openai.com/v1/chat/completions", {
161 | method: "POST",
162 | headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
163 | body: JSON.stringify(body),
164 | })) {
165 | console.log(data);
166 | }
167 | ```
168 |
169 | This will log something like this on the console:
170 |
171 | ```js
172 | { content: "", tool: undefined, args: undefined, message: { "id": "chatcmpl-...", ...} }
173 | { content: "Hello", tool: undefined, args: undefined, message: { "id": "chatcmpl-...", ...} }
174 | { content: "Hello!", tool: undefined, args: undefined, message: { "id": "chatcmpl-...", ...} }
175 | { content: "Hello! How", tool: undefined, args: undefined, message: { "id": "chatcmpl-...", ...} }
176 | ...
177 | { content: "Hello! How can I assist you today?", tool: undefined, args: undefined, message: { "id": "chatcmpl-...", ...} }
178 | ```
179 |
180 | ### Anthropic streaming
181 |
182 | The package includes an Anthropic adapter that converts OpenAI-style requests to Anthropic's format,
183 | allowing you to use the same code structure across providers.
184 |
185 | ```javascript
186 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
187 | import { anthropic } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/anthropic.js";
188 |
189 | // You can use the anthropic() adapter to convert OpenAI-style requests to Anthropic's format.
190 | const body = anthropic({
191 | // Same as OpenAI example above
192 | });
193 |
194 | // Or you can use the asyncLLM() function directly with the Anthropic API endpoint.
195 | const body = {
196 | model: "claude-3-haiku-20240307",
197 | // You MUST enable streaming, else the API will return an {error}
198 | stream: true,
199 | max_tokens: 10,
200 | messages: [{ role: "user", content: "What is 2 + 2" }],
201 | };
202 |
203 | for await (const data of asyncLLM("https://api.anthropic.com/v1/messages", {
204 | headers: { "Content-Type": "application/json", "x-api-key": apiKey },
205 | body: JSON.stringify(body),
206 | })) {
207 | console.log(data);
208 | }
209 | ```
210 |
211 | ### Gemini streaming
212 |
213 | The package includes a Gemini adapter that converts OpenAI-style requests to Gemini's format,
214 | allowing you to use the same code structure across providers.
215 |
216 | ```javascript
217 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
218 | import { gemini } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/gemini.js";
219 |
220 | // You can use the gemini() adapter to convert OpenAI-style requests to Gemini's format.
221 | const body = gemini({
222 | // Same as OpenAI example above
223 | });
224 |
225 | // Or you can use the asyncLLM() function directly with the Gemini API endpoint.
226 | const body = {
227 | contents: [{ role: "user", parts: [{ text: "What is 2+2?" }] }],
228 | };
229 |
230 | for await (const data of asyncLLM(
231 | // You MUST use a streaming endpoint, else the API will return an {error}
232 | "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-8b:streamGenerateContent?alt=sse",
233 | {
234 | method: "POST",
235 | headers: {
236 | "Content-Type": "application/json",
237 | Authorization: `Bearer ${apiKey}`,
238 | },
239 | body: JSON.stringify(body),
240 | },
241 | )) {
242 | console.log(data);
243 | }
244 | ```
245 |
246 | ### Function Calling
247 |
248 | asyncLLM supports function calling (aka tools). Here's an example with OpenAI:
249 |
250 | ```javascript
251 | for await (const { tools } of asyncLLM("https://api.openai.com/v1/chat/completions", {
252 | method: "POST",
253 | headers: {
254 | "Content-Type": "application/json",
255 | Authorization: `Bearer ${apiKey}`,
256 | },
257 | body: JSON.stringify({
258 | model: "gpt-4o-mini",
259 | stream: true,
260 | messages: [
261 | { role: "system", content: "Get delivery date for order" },
262 | { role: "user", content: "Order ID: 123456" },
263 | ],
264 | tool_choice: "required",
265 | tools: [
266 | {
267 | type: "function",
268 | function: {
269 | name: "get_delivery_date",
270 | parameters: { type: "object", properties: { order_id: { type: "string" } }, required: ["order_id"] },
271 | },
272 | },
273 | ],
274 | }),
275 | })) {
276 | console.log(JSON.stringify(tools));
277 | }
278 | ```
279 |
280 | `tools` is an array of objects with `name`, `id` (for Anthropic and OpenAI, not Gemini), and `args` properties. It streams like this:
281 |
282 | ```json
283 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":""}]
284 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\""}]
285 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order"}]
286 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id"}]
287 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\""}]
288 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\"123"}]
289 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\"123456"}]
290 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\"123456\"}"}]
291 | ```
292 |
293 | Use a library like [partial-json](https://www.npmjs.com/package/partial-json) to parse the `args` incrementally.
294 |
295 | ### Streaming Config
296 |
297 | asyncLLM accepts a `config` object with the following properties:
298 |
299 | - `fetch`: Custom fetch implementation (defaults to global `fetch`).
300 | - `onResponse`: Async callback function that receives the Response object before streaming begins. If the callback returns a promise, it will be awaited before continuing the stream.
301 |
302 | Here's how you can use a custom fetch implementation:
303 |
304 | ```javascript
305 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
306 |
307 | const body = {
308 | // Same as OpenAI example above
309 | };
310 |
311 | // Optional configuration. You can ignore it for most use cases.
312 | const config = {
313 | onResponse: async (response) => {
314 | console.log(response.status, response.headers);
315 | },
316 | // You can use a custom fetch implementation if needed
317 | fetch: fetch,
318 | };
319 |
320 | for await (const { content } of asyncLLM(
321 | "https://api.openai.com/v1/chat/completions",
322 | {
323 | method: "POST",
324 | headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
325 | body: JSON.stringify(body),
326 | },
327 | config,
328 | )) {
329 | console.log(content);
330 | }
331 | ```
332 |
333 | ## Streaming from text
334 |
335 | You can parse streamed SSE events from a text string (e.g. from a cached response) using the provided `fetchText` helper:
336 |
337 | ```javascript
338 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
339 | import { fetchText } from "https://cdn.jsdelivr.net/npm/asyncsse@1/dist/fetchtext.js";
340 |
341 | const text = `
342 | data: {"candidates": [{"content": {"parts": [{"text": "2"}],"role": "model"}}]}
343 |
344 | data: {"candidates": [{"content": {"parts": [{"text": " + 2 = 4\\n"}],"role": "model"}}]}
345 |
346 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"}}]}
347 | `;
348 |
349 | // Stream events from text
350 | for await (const event of asyncLLM(text, {}, { fetch: fetchText })) {
351 | console.log(event);
352 | }
353 | ```
354 |
355 | This outputs:
356 |
357 | ```
358 | { data: "Hello" }
359 | { data: "World" }
360 | ```
361 |
362 | This is particularly useful for testing SSE parsing without making actual HTTP requests.
363 |
364 | ### Error handling
365 |
366 | If an error occurs, it will be yielded in the `error` property. For example:
367 |
368 | ```javascript
369 | for await (const { content, error } of asyncLLM("https://api.openai.com/v1/chat/completions", {
370 | method: "POST",
371 | // ...
372 | })) {
373 | if (error) console.error(error);
374 | else console.log(content);
375 | }
376 | ```
377 |
378 | The `error` property is set if:
379 |
380 | - The underlying API (e.g. OpenAI, Anthropic, Gemini) returns an error in the response (e.g. `error.message` or `message.error` or `error`)
381 | - The fetch request fails (e.g. network error)
382 | - The response body cannot be parsed as JSON
383 |
384 | ### `asyncLLM(request: string | Request, options?: RequestInit, config?: SSEConfig): AsyncGenerator`
385 |
386 | Fetches streaming responses from LLM providers and yields events.
387 |
388 | - `request`: The URL or Request object for the LLM API endpoint
389 | - `options`: Optional [fetch options](https://developer.mozilla.org/en-US/docs/Web/API/fetch#parameters)
390 | - `config`: Optional configuration object for SSE handling
391 | - `fetch`: Custom fetch implementation (defaults to global fetch)
392 | - `onResponse`: Async callback function that receives the Response object before streaming begins. If the callback returns a promise, it will be awaited before continuing the stream.
393 |
394 | Returns an async generator that yields [`LLMEvent` objects](#llmevent).
395 |
396 | #### LLMEvent
397 |
398 | - `content`: The text content of the response
399 | - `tools`: Array of tool call objects with:
400 | - `name`: The name of the tool being called
401 | - `args`: The arguments for the tool call as a JSON-encoded string, e.g. `{"order_id":"123456"}`
402 | - `id`: Optional unique identifier for the tool call (e.g. OpenAI's `call_F8YHCjnzrrTjfE4YSSpVW2Bc` or Anthropic's `toolu_01T1x1fJ34qAmk2tNTrN7Up6`. Gemini does not return an id.)
403 | - `message`: The raw message object from the LLM provider (may include id, model, usage stats, etc.)
404 | - `error`: Error message if the request fails
405 |
406 | ## Changelog
407 |
408 | - 2.1.2: Update repo links
409 | - 2.1.1: Document standalone adapter usage
410 | - 2.1.0: Added `id` to tools to support unique tool call identifiers from providers
411 | - 2.0.1: Multiple tools support.
412 | - Breaking change: `tool` and `args` are not part of the response. Instead, it has `tools`, an array of `{ name, args }`
413 | - Fixed Gemini adapter to return `toolConfig` instead of `toolsConfig`
414 | - 1.2.2: Added streaming from text documentation via `config.fetch`. Upgrade to asyncSSE 1.3.1 (bug fix).
415 | - 1.2.1: Added `config.fetch` for custom fetch implementation
416 | - 1.2.0: Added `config.onResponse(response)` that receives the Response object before streaming begins
417 | - 1.1.3: Ensure `max_tokens` for Anthropic. Improve error handling
418 | - 1.1.1: Added [Anthropic adapter](#anthropic)
419 | - 1.1.0: Added [Gemini adapter](#gemini)
420 | - 1.0.0: Initial release with [asyncLLM](#asyncllm) and [LLMEvent](#llmevent)
421 |
422 | ## Contributing
423 |
424 | Contributions are welcome! Please feel free to submit a Pull Request.
425 |
426 | ## License
427 |
428 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
429 |
--------------------------------------------------------------------------------
/anthropic.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Convert an OpenAI body to an Anthropic body
3 | * @param {Object} body
4 | * @returns {Object}
5 | */
6 | export function anthropic(body) {
7 | // System messages are specified at the top level in Anthropic
8 | const system = body.messages.find((msg) => msg.role === "system");
9 |
10 | // Convert messages
11 | const messages = body.messages
12 | .filter((msg) => msg.role !== "system")
13 | .map((msg) => ({
14 | role: msg.role,
15 | // Handle both text and binary content (images)
16 | content: Array.isArray(msg.content)
17 | ? msg.content.map(({ type, text, image_url }) => {
18 | if (type === "text") return { type: "text", text };
19 | else if (type === "image_url")
20 | return {
21 | type: "image",
22 | source: anthropicSourceFromURL(image_url.url),
23 | };
24 | // Anthropic doesn't support audio
25 | })
26 | : msg.content,
27 | }));
28 |
29 | const parallel_tool_calls =
30 | typeof body.parallel_tool_calls == "boolean" ? { disable_parallel_tool_use: !body.parallel_tool_calls } : {};
31 | // Map OpenAI parameters to Anthropic equivalents, only including if defined
32 | const params = {
33 | model: body.model,
34 | max_tokens: body.max_tokens ?? 4096,
35 | ...(body.metadata?.user_id ? { metadata: { user_id: body.metadata?.user_id } } : {}),
36 | ...(typeof body.stream == "boolean" ? { stream: body.stream } : {}),
37 | ...(typeof body.temperature == "number" ? { temperature: body.temperature } : {}),
38 | ...(typeof body.top_p == "number" ? { top_p: body.top_p } : {}),
39 | // Convert single string or array of stop sequences
40 | ...(typeof body.stop == "string"
41 | ? { stop_sequences: [body.stop] }
42 | : Array.isArray(body.stop)
43 | ? { stop_sequences: body.stop }
44 | : {}),
45 | // Anthropic does not support JSON mode
46 | // Convert OpenAI tool_choice to Anthropic's tools_choice
47 | ...(body.tool_choice == "auto"
48 | ? { tool_choice: { type: "auto", ...parallel_tool_calls } }
49 | : body.tool_choice == "required"
50 | ? { tool_choice: { type: "any", ...parallel_tool_calls } }
51 | : body.tool_choice == "none"
52 | ? {}
53 | : typeof body.tool_choice == "object"
54 | ? {
55 | tool_choice: {
56 | type: "tool",
57 | name: body.tool_choice.function?.name,
58 | ...parallel_tool_calls,
59 | },
60 | }
61 | : {}),
62 | };
63 |
64 | // Convert function definitions to Anthropic's tool format
65 | const tools = body.tools?.map((tool) => ({
66 | name: tool.function.name,
67 | description: tool.function.description,
68 | input_schema: tool.function.parameters,
69 | }));
70 |
71 | // Only include optional configs if they exist
72 | return {
73 | ...(system ? { system: system.content } : {}),
74 | messages,
75 | ...params,
76 | ...(body.tools ? { tools } : {}),
77 | };
78 | }
79 |
80 | // Handle data URIs in Anthropic's format. External URLs are not supported.
81 | const anthropicSourceFromURL = (url) => {
82 | if (url.startsWith("data:")) {
83 | const [base, base64Data] = url.split(",");
84 | return {
85 | type: "base64",
86 | media_type: base.replace("data:", "").replace(";base64", ""),
87 | data: base64Data,
88 | };
89 | }
90 | };
91 |
--------------------------------------------------------------------------------
/anthropic_test.js:
--------------------------------------------------------------------------------
1 | import { anthropic } from "./anthropic.js";
2 |
3 | function assertEquals(actual, expected, message) {
4 | if (JSON.stringify(actual) === JSON.stringify(expected)) return;
5 | throw new Error(
6 | message || `Expected:\n${JSON.stringify(expected, null, 2)}. Actual:\n${JSON.stringify(actual, null, 2)}`,
7 | );
8 | }
9 |
10 | // 1. System message handling
11 | Deno.test("anthropic - system message handling", () => {
12 | const input = {
13 | messages: [
14 | { role: "system", content: "You are helpful" },
15 | { role: "user", content: "Hi" },
16 | ],
17 | };
18 |
19 | const expected = {
20 | system: "You are helpful",
21 | messages: [{ role: "user", content: "Hi" }],
22 | max_tokens: 4096,
23 | };
24 |
25 | assertEquals(anthropic(input), expected);
26 | });
27 |
28 | // 2. Basic message conversion
29 | Deno.test("anthropic - basic message conversion", () => {
30 | const input = {
31 | messages: [{ role: "user", content: "Hello" }],
32 | };
33 |
34 | const expected = {
35 | messages: [{ role: "user", content: "Hello" }],
36 | max_tokens: 4096,
37 | };
38 |
39 | assertEquals(anthropic(input), expected);
40 | });
41 |
42 | // 2b. Multimodal content handling
43 | Deno.test("anthropic - multimodal content", () => {
44 | const input = {
45 | messages: [
46 | {
47 | role: "user",
48 | content: [
49 | { type: "text", text: "What's in this image?" },
50 | {
51 | type: "image_url",
52 | image_url: { url: "data:image/jpeg;base64,abc123" },
53 | },
54 | ],
55 | },
56 | ],
57 | };
58 |
59 | const expected = {
60 | messages: [
61 | {
62 | role: "user",
63 | content: [
64 | { type: "text", text: "What's in this image?" },
65 | {
66 | type: "image",
67 | source: {
68 | type: "base64",
69 | media_type: "image/jpeg",
70 | data: "abc123",
71 | },
72 | },
73 | ],
74 | },
75 | ],
76 | max_tokens: 4096,
77 | };
78 |
79 | assertEquals(anthropic(input), expected);
80 | });
81 |
82 | // 3. Parameter conversion
83 | Deno.test("anthropic - parameter conversion", () => {
84 | const input = {
85 | messages: [{ role: "user", content: "Hi" }],
86 | model: "claude-3-5-sonnet-20241002",
87 | max_tokens: 100,
88 | metadata: { user_id: "123" },
89 | stream: true,
90 | temperature: 0.7,
91 | top_p: 0.9,
92 | stop: ["END"],
93 | };
94 |
95 | const expected = {
96 | messages: [{ role: "user", content: "Hi" }],
97 | model: "claude-3-5-sonnet-20241002",
98 | max_tokens: 100,
99 | metadata: { user_id: "123" },
100 | stream: true,
101 | temperature: 0.7,
102 | top_p: 0.9,
103 | stop_sequences: ["END"],
104 | };
105 |
106 | assertEquals(anthropic(input), expected);
107 | });
108 |
109 | // 3b. Array stop sequences
110 | Deno.test("anthropic - array stop sequences", () => {
111 | const input = {
112 | messages: [{ role: "user", content: "Hi" }],
113 | stop: ["STOP", "END"],
114 | };
115 |
116 | const expected = {
117 | messages: [{ role: "user", content: "Hi" }],
118 | max_tokens: 4096,
119 | stop_sequences: ["STOP", "END"],
120 | };
121 |
122 | assertEquals(anthropic(input), expected);
123 | });
124 |
125 | // 4. Tool handling
126 | Deno.test("anthropic - tool calling configurations", () => {
127 | const input = {
128 | messages: [{ role: "user", content: "Hi" }],
129 | tool_choice: "auto",
130 | parallel_tool_calls: true,
131 | tools: [
132 | {
133 | function: {
134 | name: "get_weather",
135 | description: "Get weather info",
136 | parameters: {
137 | type: "object",
138 | properties: { location: { type: "string" } },
139 | },
140 | },
141 | },
142 | ],
143 | };
144 |
145 | const expected = {
146 | messages: [{ role: "user", content: "Hi" }],
147 | max_tokens: 4096,
148 | tool_choice: { type: "auto", disable_parallel_tool_use: false },
149 | tools: [
150 | {
151 | name: "get_weather",
152 | description: "Get weather info",
153 | input_schema: {
154 | type: "object",
155 | properties: { location: { type: "string" } },
156 | },
157 | },
158 | ],
159 | };
160 |
161 | assertEquals(anthropic(input), expected);
162 | });
163 |
164 | // 4b. Specific tool choice
165 | Deno.test("anthropic - specific tool choice", () => {
166 | const input = {
167 | messages: [{ role: "user", content: "Hi" }],
168 | tool_choice: { function: { name: "get_weather" } },
169 | };
170 |
171 | const expected = {
172 | messages: [{ role: "user", content: "Hi" }],
173 | max_tokens: 4096,
174 | tool_choice: { type: "tool", name: "get_weather" },
175 | };
176 |
177 | assertEquals(anthropic(input), expected);
178 | });
179 |
--------------------------------------------------------------------------------
/gemini.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Convert an OpenAI body to a Gemini body
3 | * @param {Object} body
4 | * @returns {Object}
5 | */
6 | export function gemini(body) {
7 | // System messages live in a separate object in Gemini
8 | const systemMessage = body.messages.find((msg) => msg.role === "system");
9 | const systemInstruction = systemMessage ? { systemInstruction: { parts: [{ text: systemMessage.content }] } } : {};
10 |
11 | // Convert messages: Gemini uses "model" instead of "assistant" and has different content structure
12 | const contents = body.messages
13 | .filter((msg) => msg.role !== "system")
14 | .map((msg) => ({
15 | role: msg.role == "assistant" ? "model" : msg.role,
16 | // Handle both text and binary content (images/audio)
17 | parts: Array.isArray(msg.content)
18 | ? msg.content.map(({ type, text, image_url, input_audio }) => {
19 | if (type === "text") return { text };
20 | else if (type === "image_url") return geminiPartFromURL(image_url.url);
21 | else if (type == "input_audio") return geminiPartFromURL(input_audio.data);
22 | })
23 | : [{ text: msg.content }],
24 | }));
25 |
26 | // Map OpenAI parameters to Gemini equivalents, only including if defined
27 | const generationConfig = {
28 | ...(typeof body.temperature == "number" ? { temperature: body.temperature } : {}),
29 | ...(typeof body.max_tokens == "number" ? { maxOutputTokens: body.max_tokens } : {}),
30 | ...(typeof body.max_completion_tokens == "number" ? { maxOutputTokens: body.max_completion_tokens } : {}),
31 | ...(typeof body.top_p == "number" ? { topP: body.top_p } : {}),
32 | ...(typeof body.presence_penalty == "number" ? { presencePenalty: body.presence_penalty } : {}),
33 | ...(typeof body.frequency_penalty == "number" ? { frequencyPenalty: body.frequency_penalty } : {}),
34 | ...(typeof body.logprobs == "boolean" ? { responseLogprobs: body.logprobs } : {}),
35 | ...(typeof body.top_logprobs == "number" ? { logprobs: body.top_logprobs } : {}),
36 | ...(typeof body.n == "number" ? { candidateCount: body.n } : {}),
37 | // Convert single string or array of stop sequences
38 | ...(typeof body.stop == "string"
39 | ? { stopSequences: [body.stop] }
40 | : Array.isArray(body.stop)
41 | ? { stopSequences: body.stop }
42 | : {}),
43 | // Handle JSON response formatting and schemas
44 | ...(body.response_format?.type == "json_object"
45 | ? { responseMimeType: "application/json" }
46 | : body.response_format?.type == "json_schema"
47 | ? {
48 | responseMimeType: "application/json",
49 | responseSchema: geminiSchema(structuredClone(body.response_format?.json_schema?.schema)),
50 | }
51 | : {}),
52 | };
53 |
54 | // Convert OpenAI tool_choice to Gemini's function calling modes
55 | const toolConfig =
56 | body.tool_choice == "auto"
57 | ? { function_calling_config: { mode: "AUTO" } }
58 | : body.tool_choice == "required"
59 | ? { function_calling_config: { mode: "ANY" } }
60 | : body.tool_choice == "none"
61 | ? { function_calling_config: { mode: "NONE" } }
62 | : typeof body.tool_choice == "object"
63 | ? {
64 | function_calling_config: {
65 | mode: "ANY",
66 | allowed_function_names: [body.tool_choice.function?.name],
67 | },
68 | }
69 | : {};
70 |
71 | // Convert function definitions to Gemini's tool format
72 | const tools = body.tools
73 | ? {
74 | functionDeclarations: body.tools.map((tool) => ({
75 | name: tool.function.name,
76 | description: tool.function.description,
77 | parameters: geminiSchema(structuredClone(tool.function.parameters)),
78 | })),
79 | }
80 | : {};
81 |
82 | // Only include optional configs if they exist
83 | return {
84 | ...systemInstruction,
85 | contents,
86 | ...(Object.keys(generationConfig).length > 0 ? { generationConfig } : {}),
87 | ...(body.tool_choice ? { toolConfig } : {}),
88 | ...(body.tools ? { tools } : {}),
89 | };
90 | }
91 |
92 | // Handle both data URIs and external URLs in Gemini's required format
93 | const geminiPartFromURL = (url) => {
94 | if (url.startsWith("data:")) {
95 | const [base, base64Data] = url.split(",");
96 | return {
97 | inlineData: {
98 | mimeType: base.replace("data:", "").replace(";base64", ""),
99 | data: base64Data,
100 | },
101 | };
102 | }
103 | return { fileData: { fileUri: url } };
104 | };
105 |
106 | // Gemini doesn't support additionalProperties in schemas. Recursively remove it.
107 | function geminiSchema(obj) {
108 | if (Array.isArray(obj)) obj.forEach(geminiSchema);
109 | else if (obj && typeof obj === "object") {
110 | for (const key in obj) {
111 | if (key === "additionalProperties") delete obj[key];
112 | else geminiSchema(obj[key]);
113 | }
114 | }
115 | return obj;
116 | }
117 |
--------------------------------------------------------------------------------
/gemini_test.js:
--------------------------------------------------------------------------------
1 | import { gemini } from "./gemini.js";
2 |
3 | function assertEquals(actual, expected, message) {
4 | if (JSON.stringify(actual) === JSON.stringify(expected)) return;
5 | throw new Error(
6 | message || `Expected:\n${JSON.stringify(expected, null, 2)}. Actual:\n${JSON.stringify(actual, null, 2)}`,
7 | );
8 | }
9 |
10 | Deno.test("gemini - basic message conversion", () => {
11 | const input = {
12 | messages: [{ role: "user", content: "Hello" }],
13 | };
14 |
15 | const expected = {
16 | contents: [{ role: "user", parts: [{ text: "Hello" }] }],
17 | };
18 |
19 | assertEquals(gemini(input), expected);
20 | });
21 |
22 | Deno.test("gemini - system message handling", () => {
23 | const input = {
24 | messages: [
25 | { role: "system", content: "You are helpful" },
26 | { role: "user", content: "Hi" },
27 | ],
28 | };
29 |
30 | const expected = {
31 | systemInstruction: { parts: [{ text: "You are helpful" }] },
32 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
33 | };
34 |
35 | assertEquals(gemini(input), expected);
36 | });
37 |
38 | Deno.test("gemini - assistant message conversion", () => {
39 | const input = {
40 | messages: [
41 | { role: "user", content: "Hi" },
42 | { role: "assistant", content: "Hello" },
43 | ],
44 | };
45 |
46 | const expected = {
47 | contents: [
48 | { role: "user", parts: [{ text: "Hi" }] },
49 | { role: "model", parts: [{ text: "Hello" }] },
50 | ],
51 | };
52 |
53 | assertEquals(gemini(input), expected);
54 | });
55 |
56 | Deno.test("gemini - multimodal content", () => {
57 | const input = {
58 | messages: [
59 | {
60 | role: "user",
61 | content: [
62 | { type: "text", text: "What's in this image?" },
63 | {
64 | type: "image_url",
65 | image_url: { url: "data:image/jpeg;base64,abc123" },
66 | },
67 | {
68 | type: "input_audio",
69 | input_audio: { data: "https://example.com/audio.mp3" },
70 | },
71 | ],
72 | },
73 | ],
74 | };
75 |
76 | const expected = {
77 | contents: [
78 | {
79 | role: "user",
80 | parts: [
81 | { text: "What's in this image?" },
82 | { inlineData: { mimeType: "image/jpeg", data: "abc123" } },
83 | { fileData: { fileUri: "https://example.com/audio.mp3" } },
84 | ],
85 | },
86 | ],
87 | };
88 |
89 | assertEquals(gemini(input), expected);
90 | });
91 |
92 | Deno.test("gemini - generation config parameters", () => {
93 | const input = {
94 | messages: [{ role: "user", content: "Hi" }],
95 | temperature: 0.7,
96 | max_tokens: 100,
97 | top_p: 0.9,
98 | presence_penalty: 0.5,
99 | frequency_penalty: 0.5,
100 | logprobs: true,
101 | top_logprobs: 3,
102 | n: 2,
103 | stop: ["END"],
104 | response_format: { type: "json_object" },
105 | };
106 |
107 | const expected = {
108 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
109 | generationConfig: {
110 | temperature: 0.7,
111 | maxOutputTokens: 100,
112 | topP: 0.9,
113 | presencePenalty: 0.5,
114 | frequencyPenalty: 0.5,
115 | responseLogprobs: true,
116 | logprobs: 3,
117 | candidateCount: 2,
118 | stopSequences: ["END"],
119 | responseMimeType: "application/json",
120 | },
121 | };
122 |
123 | assertEquals(gemini(input), expected);
124 | });
125 |
126 | Deno.test("gemini - tool calling configurations", () => {
127 | const input = {
128 | messages: [{ role: "user", content: "Hi" }],
129 | tool_choice: "auto",
130 | tools: [
131 | {
132 | function: {
133 | name: "get_weather",
134 | description: "Get weather info",
135 | parameters: {
136 | type: "object",
137 | properties: { location: { type: "string" } },
138 | required: ["location"],
139 | additionalProperties: false,
140 | },
141 | },
142 | },
143 | ],
144 | };
145 |
146 | const expected = {
147 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
148 | toolConfig: { function_calling_config: { mode: "AUTO" } },
149 | tools: {
150 | functionDeclarations: [
151 | {
152 | name: "get_weather",
153 | description: "Get weather info",
154 | parameters: {
155 | type: "object",
156 | properties: { location: { type: "string" } },
157 | required: ["location"],
158 | },
159 | },
160 | ],
161 | },
162 | };
163 |
164 | assertEquals(gemini(input), expected);
165 | });
166 |
167 | Deno.test("gemini - specific tool choice", () => {
168 | const input = {
169 | messages: [{ role: "user", content: "Hi" }],
170 | tool_choice: { function: { name: "get_weather" } },
171 | };
172 |
173 | const expected = {
174 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
175 | toolConfig: {
176 | function_calling_config: {
177 | mode: "ANY",
178 | allowed_function_names: ["get_weather"],
179 | },
180 | },
181 | };
182 |
183 | assertEquals(gemini(input), expected);
184 | });
185 |
186 | Deno.test("gemini - json schema response format", () => {
187 | const input = {
188 | messages: [{ role: "user", content: "Hi" }],
189 | response_format: {
190 | type: "json_schema",
191 | json_schema: {
192 | schema: {
193 | type: "object",
194 | properties: {
195 | name: { type: "string" },
196 | age: { type: "number" },
197 | additionalProperties: false,
198 | },
199 | },
200 | },
201 | },
202 | };
203 |
204 | const expected = {
205 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
206 | generationConfig: {
207 | responseMimeType: "application/json",
208 | responseSchema: {
209 | type: "object",
210 | properties: { name: { type: "string" }, age: { type: "number" } },
211 | },
212 | },
213 | };
214 |
215 | assertEquals(gemini(input), expected);
216 | });
217 |
218 | Deno.test("gemini - required tool choice", () => {
219 | const input = {
220 | messages: [{ role: "user", content: "Hi" }],
221 | tool_choice: "required",
222 | };
223 |
224 | const expected = {
225 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
226 | toolConfig: { function_calling_config: { mode: "ANY" } },
227 | };
228 |
229 | assertEquals(gemini(input), expected);
230 | });
231 |
232 | Deno.test("gemini - none tool choice", () => {
233 | const input = {
234 | messages: [{ role: "user", content: "Hi" }],
235 | tool_choice: "none",
236 | };
237 |
238 | const expected = {
239 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
240 | toolConfig: { function_calling_config: { mode: "NONE" } },
241 | };
242 |
243 | assertEquals(gemini(input), expected);
244 | });
245 |
246 | Deno.test("gemini - string stop sequence", () => {
247 | const input = { messages: [{ role: "user", content: "Hi" }], stop: "STOP" };
248 |
249 | const expected = {
250 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
251 | generationConfig: { stopSequences: ["STOP"] },
252 | };
253 |
254 | assertEquals(gemini(input), expected);
255 | });
256 |
257 | Deno.test("gemini - max_completion_tokens parameter", () => {
258 | const input = {
259 | messages: [{ role: "user", content: "Hi" }],
260 | max_completion_tokens: 150,
261 | };
262 |
263 | const expected = {
264 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
265 | generationConfig: { maxOutputTokens: 150 },
266 | };
267 |
268 | assertEquals(gemini(input), expected);
269 | });
270 |
--------------------------------------------------------------------------------
/index.d.ts:
--------------------------------------------------------------------------------
1 | import { SSEConfig } from "asyncsse";
2 |
3 | export interface LLMTool {
4 | id?: string;
5 | name?: string;
6 | args?: string;
7 | }
8 |
9 | export interface LLMEvent {
10 | content?: string;
11 | tools?: LLMTool[];
12 | error?: string;
13 | message?: Record;
14 | }
15 |
16 | export function asyncLLM(
17 | request: string | Request,
18 | options?: RequestInit,
19 | config?: SSEConfig
20 | ): AsyncGenerator;
21 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | import { asyncSSE } from "asyncsse";
2 |
3 | /**
4 | * asyncLLM yields events when streaming from a streaming LLM endpoint.
5 | *
6 | * @param {Request} request
7 | * @param {RequestInit} options
8 | * @param {SSEConfig} config
9 | * @returns {AsyncGenerator, void, unknown>}
10 | *
11 | * @example
12 | * for await (const event of asyncLLM("https://api.openai.com/v1/chat/completions", {
13 | * method: "POST",
14 | * headers: {
15 | * "Content-Type": "application/json",
16 | * "Authorization": "Bearer YOUR_API_KEY",
17 | * },
18 | * body: JSON.stringify({
19 | * model: "gpt-3.5-turbo",
20 | * messages: [{ role: "user", content: "Hello, world!" }],
21 | * }),
22 | * }, {
23 | * onResponse: async (response) => {
24 | * console.log(response.status, response.headers);
25 | * },
26 | * })) {
27 | * console.log(event);
28 | * }
29 | */
30 | export async function* asyncLLM(request, options = {}, config = {}) {
31 | let content,
32 | tools = [];
33 |
34 | function latestTool() {
35 | if (!tools.length) tools.push({});
36 | return tools.at(-1);
37 | }
38 |
39 | for await (const event of asyncSSE(request, options, config)) {
40 | // OpenAI and Cloudflare AI Workers use "[DONE]" to indicate the end of the stream
41 | if (event.data === "[DONE]") break;
42 |
43 | if (event.error) {
44 | yield event;
45 | continue;
46 | }
47 |
48 | let message;
49 | try {
50 | message = JSON.parse(event.data);
51 | } catch (error) {
52 | yield { error: error.message, data: event.data };
53 | continue;
54 | }
55 |
56 | // Handle errors. asyncSSE yields { error: ... } if the fetch fails.
57 | // OpenAI, Anthropic, and Gemini return {"error": ...}.
58 | // OpenRouter returns {"message":{"error": ...}}.
59 | const error = message.message?.error ?? message.error?.message ?? message.error ?? event.error;
60 | if (error) {
61 | yield { error };
62 | continue;
63 | }
64 |
65 | // Attempt to parse with each provider's format
66 | let hasNewData = false;
67 | for (const parser of Object.values(providers)) {
68 | const extract = parser(message);
69 | hasNewData = !isEmpty(extract.content) || extract.tools.length > 0;
70 | if (!isEmpty(extract.content)) content = (content ?? "") + extract.content;
71 | for (const { name, args, id } of extract.tools) {
72 | if (!isEmpty(name)) {
73 | const tool = { name };
74 | if (!isEmpty(id)) tool.id = id;
75 | tools.push(tool);
76 | }
77 | if (!isEmpty(args)) {
78 | const tool = latestTool();
79 | tool.args = (tool.args ?? "") + args;
80 | }
81 | }
82 | if (hasNewData) break;
83 | }
84 |
85 | if (hasNewData) {
86 | const data = { content, message };
87 | if (!isEmpty(content)) data.content = content;
88 | if (tools.length) data.tools = tools;
89 | yield data;
90 | }
91 | }
92 | }
93 |
94 | // Return the delta from each message as { content, tools }
95 | // content delta is string | undefined
96 | // tools delta is [{ name?: string, args?: string }] | []
97 | const providers = {
98 | // Azure, OpenRouter, Groq, and a few others follow OpenAI's format
99 | openai: (m) => ({
100 | content: m.choices?.[0]?.delta?.content,
101 | tools: (m.choices?.[0]?.delta?.tool_calls ?? []).map((tool) => ({
102 | id: tool.id,
103 | name: tool.function.name,
104 | args: tool.function.arguments,
105 | })),
106 | }),
107 | anthropic: (m) => ({
108 | content: m.delta?.text,
109 | tools: !isEmpty(m.content_block?.name)
110 | ? [{ name: m.content_block.name, id: m.content_block.id }]
111 | : !isEmpty(m.delta?.partial_json)
112 | ? [{ args: m.delta?.partial_json }]
113 | : [],
114 | }),
115 | gemini: (m) => ({
116 | content: m.candidates?.[0]?.content?.parts?.[0]?.text,
117 | tools: (m.candidates?.[0]?.content?.parts ?? [])
118 | .map((part) => part.functionCall)
119 | .filter((d) => d)
120 | .map((d) => ({ name: d.name, args: JSON.stringify(d.args) })),
121 | }),
122 | cloudflare: (m) => ({
123 | content: m.response,
124 | tools: [],
125 | }),
126 | };
127 |
128 | const isEmpty = (value) => value === undefined || value === null;
129 |
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "asyncllm",
3 | "version": "2.1.1",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "asyncllm",
9 | "version": "2.1.1",
10 | "license": "MIT",
11 | "dependencies": {
12 | "asyncsse": "^1.3.1"
13 | },
14 | "engines": {
15 | "node": ">=14.0.0"
16 | }
17 | },
18 | "node_modules/asyncsse": {
19 | "version": "1.3.1",
20 | "resolved": "https://registry.npmjs.org/asyncsse/-/asyncsse-1.3.1.tgz",
21 | "integrity": "sha512-sPd7NAmOKAl+optdLYe0zyaXqMf4PCYNtkHvSddRs4cZs40i/zdG+1h8qrkg2QKoUsRmwoUaTuv+5iKkT5iv7w==",
22 | "license": "MIT",
23 | "engines": {
24 | "node": ">=14.0.0"
25 | }
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "asyncllm",
3 | "version": "2.1.2",
4 | "description": "Fetch streaming LLM responses as an async iterable",
5 | "main": "dist/asyncllm.js",
6 | "type": "module",
7 | "module": "index.js",
8 | "exports": {
9 | ".": "./dist/asyncllm.js",
10 | "./anthropic": "./dist/anthropic.js",
11 | "./gemini": "./dist/gemini.js"
12 | },
13 | "scripts": {
14 | "test": "deno test --allow-net --allow-read",
15 | "build-asyncllm": "npx -y esbuild index.js --bundle --minify --format=esm --outfile=dist/asyncllm.js",
16 | "build-gemini": "npx -y esbuild gemini.js --bundle --minify --format=esm --outfile=dist/gemini.js",
17 | "build-anthropic": "npx -y esbuild anthropic.js --bundle --minify --format=esm --outfile=dist/anthropic.js",
18 | "build": "npm run build-asyncllm && npm run build-gemini && npm run build-anthropic",
19 | "lint": "npx prettier@3.3 --write *.js *.md",
20 | "prepublishOnly": "npm run lint && npm run build"
21 | },
22 | "keywords": [
23 | "sse",
24 | "fetch",
25 | "async",
26 | "iterable",
27 | "server-sent-events",
28 | "streaming",
29 | "llm",
30 | "openai",
31 | "anthropic",
32 | "gemini",
33 | "cloudflare"
34 | ],
35 | "author": "S Anand ",
36 | "license": "MIT",
37 | "repository": {
38 | "type": "git",
39 | "url": "https://github.com/sanand0/asyncllm.git"
40 | },
41 | "bugs": {
42 | "url": "https://github.com/sanand0/asyncllm/issues"
43 | },
44 | "homepage": "https://github.com/sanand0/asyncllm#readme",
45 | "engines": {
46 | "node": ">=14.0.0"
47 | },
48 | "prettier": {
49 | "printWidth": 120
50 | },
51 | "files": [
52 | "README.md",
53 | "dist",
54 | "index.js",
55 | "index.d.ts"
56 | ],
57 | "dependencies": {
58 | "asyncsse": "^1.3.1"
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/samples/anthropic-tools.txt:
--------------------------------------------------------------------------------
1 | event: message_start
2 | data: {"type":"message_start","message":{"id":"msg_014p7gG3wDgGV9EUtLvnow3U","type":"message","role":"assistant","model":"claude-3-haiku-20240307","stop_sequence":null,"usage":{"input_tokens":472,"output_tokens":2},"content":[],"stop_reason":null}}
3 |
4 | event: content_block_start
5 | data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
6 |
7 | event: ping
8 | data: {"type": "ping"}
9 |
10 | event: content_block_delta
11 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Okay"}}
12 |
13 | event: content_block_delta
14 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","}}
15 |
16 | event: content_block_delta
17 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" let"}}
18 |
19 | event: content_block_delta
20 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"'s"}}
21 |
22 | event: content_block_delta
23 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" check"}}
24 |
25 | event: content_block_delta
26 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" the"}}
27 |
28 | event: content_block_delta
29 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" weather"}}
30 |
31 | event: content_block_delta
32 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" for"}}
33 |
34 | event: content_block_delta
35 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" San"}}
36 |
37 | event: content_block_delta
38 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Francisco"}}
39 |
40 | event: content_block_delta
41 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","}}
42 |
43 | event: content_block_delta
44 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" CA"}}
45 |
46 | event: content_block_delta
47 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":":"}}
48 |
49 | event: content_block_stop
50 | data: {"type":"content_block_stop","index":0}
51 |
52 | event: content_block_start
53 | data: {"type":"content_block_start","index":1,"content_block":{"type":"tool_use","id":"toolu_01T1x1fJ34qAmk2tNTrN7Up6","name":"get_weather","input":{}}}
54 |
55 | event: content_block_delta
56 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":""}}
57 |
58 | event: content_block_delta
59 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"{\"location\":"}}
60 |
61 | event: content_block_delta
62 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":" \"San"}}
63 |
64 | event: content_block_delta
65 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":" Francisc"}}
66 |
67 | event: content_block_delta
68 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"o,"}}
69 |
70 | event: content_block_delta
71 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":" CA\""}}
72 |
73 | event: content_block_delta
74 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":", "}}
75 |
76 | event: content_block_delta
77 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"\"unit\": \"fah"}}
78 |
79 | event: content_block_delta
80 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"renheit\"}"}}
81 |
82 | event: content_block_stop
83 | data: {"type":"content_block_stop","index":1}
84 |
85 | event: message_delta
86 | data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":89}}
87 |
88 | event: message_stop
89 | data: {"type":"message_stop"}
90 |
--------------------------------------------------------------------------------
/samples/anthropic-tools2.txt:
--------------------------------------------------------------------------------
1 | event: message_start
2 | data: {"type":"message_start","message":{"id":"msg_01NpRfBZDJHQvTKGtrwFJheH","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":482,"output_tokens":8}} }
3 |
4 | event: content_block_start
5 | data: {"type":"content_block_start","index":0,"content_block":{"type":"tool_use","id":"toolu_015yB3TjTS1RBaM7VScM2MQY","name":"get_order","input":{}} }
6 |
7 | event: ping
8 | data: {"type": "ping"}
9 |
10 | event: content_block_delta
11 | data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":""} }
12 |
13 | event: content_block_delta
14 | data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":"{\"id\": \"1"} }
15 |
16 | event: content_block_delta
17 | data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":"23456\"}"} }
18 |
19 | event: content_block_stop
20 | data: {"type":"content_block_stop","index":0 }
21 |
22 | event: content_block_start
23 | data: {"type":"content_block_start","index":1,"content_block":{"type":"tool_use","id":"toolu_013VAZTYqMJm2JuRCqEA4kam","name":"get_customer","input":{}} }
24 |
25 | event: content_block_delta
26 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":""} }
27 |
28 | event: content_block_delta
29 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"{\"id\": \""} }
30 |
31 | event: content_block_delta
32 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"789"} }
33 |
34 | event: content_block_delta
35 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"0\"}"} }
36 |
37 | event: content_block_stop
38 | data: {"type":"content_block_stop","index":1 }
39 |
40 | event: message_delta
41 | data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":76} }
42 |
43 | event: message_stop
44 | data: {"type":"message_stop" }
45 |
--------------------------------------------------------------------------------
/samples/anthropic.txt:
--------------------------------------------------------------------------------
1 | event: message_start
2 | data: {"type":"message_start","message":{"id":"msg_013uu3QExnpT3UYsC9mo2Em8","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":19,"output_tokens":3}}}
3 |
4 | event: content_block_start
5 | data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
6 |
7 | event: ping
8 | data: {"type": "ping"}
9 |
10 | event: content_block_delta
11 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"2 "}}
12 |
13 | event: content_block_delta
14 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"+ 2 "}}
15 |
16 | event: content_block_delta
17 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"= 4."}}
18 |
19 | event: content_block_stop
20 | data: {"type":"content_block_stop","index":0}
21 |
22 | event: message_delta
23 | data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":14}}
24 |
25 | event: message_stop
26 | data: {"type":"message_stop"}
27 |
--------------------------------------------------------------------------------
/samples/errors.txt:
--------------------------------------------------------------------------------
1 | data: invalid json
2 |
3 | data: {"error": {"message": "OpenAI API error", "type": "api_error"}}
4 |
5 | data: {"error": {"type": "invalid_request_error", "message": "Anthropic API error"}}
6 |
7 | data: {"error": {"code": 400, "message": "Gemini API error"}}
8 |
9 | data: {"message": {"error": "OpenRouter API error"}}
10 |
11 | data:
12 |
--------------------------------------------------------------------------------
/samples/gemini-tools.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"functionCall": {"name": "take_notes","args": {"note": "Capitalism and socialism are two of the most prevalent economic systems in the world. Capitalism is characterized by private ownership of the means of production, free markets, and the pursuit of profit. Socialism, on the other hand, emphasizes social ownership of the means of production, with the goal of achieving social equality and economic justice. The two systems have been the subject of much debate, with proponents of each arguing for its superiority. Capitalism is often praised for its efficiency and innovation, while socialism is lauded for its potential to reduce inequality and provide for the needs of the most vulnerable. However, both systems have their drawbacks. Capitalism can lead to economic instability and social inequality, while socialism can stifle innovation and reduce individual freedom. Ultimately, the best economic system for a given society depends on its specific circumstances and values."}}}],"role": "model"},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 50,"candidatesTokenCount": 174,"totalTokenCount": 224}}
2 |
--------------------------------------------------------------------------------
/samples/gemini-tools2.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"functionCall": {"name": "get_order","args": {"id": "123456"}}},{"functionCall": {"name": "get_customer","args": {"id": "7890"}}}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 104,"totalTokenCount": 104},"modelVersion": "gemini-1.5-flash-8b-001"}
2 |
3 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"avgLogprobs": "NaN"}],"usageMetadata": {"promptTokenCount": 104,"totalTokenCount": 104},"modelVersion": "gemini-1.5-flash-8b-001"}
4 |
5 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 104,"candidatesTokenCount": 18,"totalTokenCount": 122},"modelVersion": "gemini-1.5-flash-8b-001"}
6 |
--------------------------------------------------------------------------------
/samples/gemini.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"text": "2"}],"role": "model"}}],"usageMetadata": {"promptTokenCount": 13,"totalTokenCount": 13}}
2 |
3 | data: {"candidates": [{"content": {"parts": [{"text": " + 2 = 4\n"}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 13,"totalTokenCount": 13}}
4 |
5 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 13,"candidatesTokenCount": 8,"totalTokenCount": 21}}
6 |
--------------------------------------------------------------------------------
/samples/openai-tools.txt:
--------------------------------------------------------------------------------
1 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","type":"function","function":{"name":"get_delivery_date","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}]}
2 |
3 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}]}
4 |
5 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"order"}}]},"logprobs":null,"finish_reason":null}]}
6 |
7 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"_id"}}]},"logprobs":null,"finish_reason":null}]}
8 |
9 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]}
10 |
11 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"123"}}]},"logprobs":null,"finish_reason":null}]}
12 |
13 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"456"}}]},"logprobs":null,"finish_reason":null}]}
14 |
15 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}]}
16 |
17 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]}
18 |
19 | data: [DONE]
20 |
21 |
--------------------------------------------------------------------------------
/samples/openai-tools2.txt:
--------------------------------------------------------------------------------
1 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}
2 |
3 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_wnH2cswb4JAnm69pUAP4MNEN","type":"function","function":{"name":"get_order","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
4 |
5 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"id"}}]},"logprobs":null,"finish_reason":null}]}
6 |
7 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\": \"1"}}]},"logprobs":null,"finish_reason":null}]}
8 |
9 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"23456\""}}]},"logprobs":null,"finish_reason":null}]}
10 |
11 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"}"}}]},"logprobs":null,"finish_reason":null}]}
12 |
13 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_f4GVABhbwSOLoaisOBOajnsm","type":"function","function":{"name":"get_customer","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
14 |
15 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"id"}}]},"logprobs":null,"finish_reason":null}]}
16 |
17 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\": \"7"}}]},"logprobs":null,"finish_reason":null}]}
18 |
19 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"890\"}"}}]},"logprobs":null,"finish_reason":null}]}
20 |
21 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]}
22 |
23 | data: [DONE]
24 |
--------------------------------------------------------------------------------
/samples/openai.txt:
--------------------------------------------------------------------------------
1 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
2 |
3 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":"Hello"},"logprobs":null,"finish_reason":null}]}
4 |
5 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
6 |
7 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" How"},"logprobs":null,"finish_reason":null}]}
8 |
9 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}]}
10 |
11 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}
12 |
13 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" assist"},"logprobs":null,"finish_reason":null}]}
14 |
15 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}]}
16 |
17 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" today"},"logprobs":null,"finish_reason":null}]}
18 |
19 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
20 |
21 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
22 |
23 | data: [DONE]
24 |
--------------------------------------------------------------------------------
/samples/openrouter.txt:
--------------------------------------------------------------------------------
1 | : OPENROUTER PROCESSING
2 |
3 | : OPENROUTER PROCESSING
4 |
5 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"logprobs":null}]}
6 |
7 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" The"},"finish_reason":null,"logprobs":null}]}
8 |
9 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" sum"},"finish_reason":null,"logprobs":null}]}
10 |
11 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" of"},"finish_reason":null,"logprobs":null}]}
12 |
13 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
14 |
15 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
16 |
17 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"logprobs":null}]}
18 |
19 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
20 |
21 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
22 |
23 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"logprobs":null}]}
24 |
25 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
26 |
27 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"4"},"finish_reason":null,"logprobs":null}]}
28 |
29 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"logprobs":null}]}
30 |
31 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" This"},"finish_reason":null,"logprobs":null}]}
32 |
33 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"logprobs":null}]}
34 |
35 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" a"},"finish_reason":null,"logprobs":null}]}
36 |
37 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" basic"},"finish_reason":null,"logprobs":null}]}
38 |
39 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" arithmetic"},"finish_reason":null,"logprobs":null}]}
40 |
41 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" operation"},"finish_reason":null,"logprobs":null}]}
42 |
43 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" where"},"finish_reason":null,"logprobs":null}]}
44 |
45 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" you"},"finish_reason":null,"logprobs":null}]}
46 |
47 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" add"},"finish_reason":null,"logprobs":null}]}
48 |
49 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
50 |
51 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" two"},"finish_reason":null,"logprobs":null}]}
52 |
53 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" numbers"},"finish_reason":null,"logprobs":null}]}
54 |
55 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" together"},"finish_reason":null,"logprobs":null}]}
56 |
57 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"logprobs":null}]}
58 |
59 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" get"},"finish_reason":null,"logprobs":null}]}
60 |
61 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
62 |
63 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" total"},"finish_reason":null,"logprobs":null}]}
64 |
65 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"logprobs":null}]}
66 |
67 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
68 |
69 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
70 |
71 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
72 |
73 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"Here"},"finish_reason":null,"logprobs":null}]}
74 |
75 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"'"},"finish_reason":null,"logprobs":null}]}
76 |
77 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"s"},"finish_reason":null,"logprobs":null}]}
78 |
79 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
80 |
81 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" calculation"},"finish_reason":null,"logprobs":null}]}
82 |
83 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":":"},"finish_reason":null,"logprobs":null}]}
84 |
85 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
86 |
87 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
88 |
89 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
90 |
91 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" +"},"finish_reason":null,"logprobs":null}]}
92 |
93 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
94 |
95 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
96 |
97 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" ="},"finish_reason":null,"logprobs":null}]}
98 |
99 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
100 |
101 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"4"},"finish_reason":null,"logprobs":null}]}
102 |
103 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
104 |
105 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
106 |
107 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"So"},"finish_reason":null,"logprobs":null}]}
108 |
109 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"logprobs":null}]}
110 |
111 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
112 |
113 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" answer"},"finish_reason":null,"logprobs":null}]}
114 |
115 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"logprobs":null}]}
116 |
117 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" your"},"finish_reason":null,"logprobs":null}]}
118 |
119 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" question"},"finish_reason":null,"logprobs":null}]}
120 |
121 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"logprobs":null}]}
122 |
123 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
124 |
125 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"4"},"finish_reason":null,"logprobs":null}]}
126 |
127 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"logprobs":null}]}
128 |
129 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop","logprobs":null}]}
130 |
131 | : OPENROUTER PROCESSING
132 |
133 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":17,"completion_tokens":62,"total_tokens":79}}
134 |
135 | data: [DONE]
136 |
--------------------------------------------------------------------------------
/samples/output.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"functionCall": {"name": "take_notes","args": {"note": "Capitalism and socialism are two of the most prevalent economic systems in the world. Capitalism is characterized by private ownership of the means of production, free markets, and the pursuit of profit. Socialism, on the other hand, emphasizes social ownership of the means of production, with the goal of achieving social equality and economic justice. The two systems have been the subject of much debate, with proponents of each arguing for its superiority. Capitalism is often praised for its efficiency and innovation, while socialism is lauded for its potential to reduce inequality and provide for the needs of the most vulnerable. However, both systems have their drawbacks. Capitalism can lead to economic instability and social inequality, while socialism can stifle innovation and reduce individual freedom. Ultimately, the best economic system for a given society depends on its specific circumstances and values."}}}],"role": "model"},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 50,"candidatesTokenCount": 174,"totalTokenCount": 224}}
2 |
3 |
--------------------------------------------------------------------------------
/test.js:
--------------------------------------------------------------------------------
1 | import { asyncLLM } from "./index.js";
2 |
3 | const PORT = 8080;
4 | const BASE_URL = `http://localhost:${PORT}`;
5 |
6 | function assertEquals(actual, expected, message) {
7 | if (JSON.stringify(actual) === JSON.stringify(expected)) return;
8 | throw new Error(
9 | message || `Expected:\n${JSON.stringify(expected, null, 2)}. Actual:\n${JSON.stringify(actual, null, 2)}`,
10 | );
11 | }
12 |
13 | Deno.serve({ port: PORT }, async (req) => {
14 | const url = new URL(req.url);
15 | const file = await Deno.readFile(`samples${url.pathname}`);
16 | return new Response(file, {
17 | headers: { "Content-Type": "text/event-stream" },
18 | });
19 | });
20 |
21 | /*
22 | curl -X POST https://llmfoundry.straive.com/openai/v1/chat/completions \
23 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
24 | -H "Content-Type: application/json" \
25 | -d '{"model": "gpt-4o-mini", "stream": true, "messages": [{"role": "user", "content": "Hello world"}]}'
26 | */
27 | Deno.test("asyncLLM - OpenAI", async () => {
28 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/openai.txt`));
29 |
30 | assertEquals(results.length, 10);
31 | assertEquals(results[0].content, "");
32 | assertEquals(results[1].content, "Hello");
33 | assertEquals(results[9].content, "Hello! How can I assist you today?");
34 | assertEquals(results.at(-1).tools, undefined);
35 | });
36 |
37 | /*
38 | curl -X POST https://llmfoundry.straive.com/openai/v1/chat/completions \
39 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
40 | -H "Content-Type: application/json" \
41 | -d '{
42 | "model": "gpt-4o-mini",
43 | "stream": true,
44 | "messages": [
45 | {"role": "system", "content": "Call get_delivery_date with the order ID."},
46 | {"role": "user", "content": "123456"}
47 | ],
48 | "tools": [
49 | {
50 | "type": "function",
51 | "function": {
52 | "name": "get_delivery_date",
53 | "description": "Get the delivery date for a customer order.",
54 | "parameters": {
55 | "type": "object",
56 | "properties": { "order_id": { "type": "string", "description": "The customer order ID." } },
57 | "required": ["order_id"],
58 | "additionalProperties": false
59 | }
60 | }
61 | }
62 | ]
63 | }'
64 | */
65 | Deno.test("asyncLLM - OpenAI with tool calls", async () => {
66 | let index = 0;
67 | let data = {};
68 | for await (data of asyncLLM(`${BASE_URL}/openai-tools.txt`)) {
69 | if (index == 0) {
70 | assertEquals(data.tools[0].name, "get_delivery_date");
71 | assertEquals(data.tools[0].id, "call_F8YHCjnzrrTjfE4YSSpVW2Bc");
72 | assertEquals(data.tools[0].args, "");
73 | }
74 | if (index == 1) assertEquals(data.tools[0].args, '{"');
75 | if (index == 7) assertEquals(data.tools[0].args, '{"order_id":"123456"}');
76 | if (index == 7) assertEquals(data.content, undefined);
77 | index++;
78 | }
79 | assertEquals(JSON.parse(data.tools[0].args), { order_id: "123456" });
80 | assertEquals(index, 8);
81 | });
82 |
83 | /*
84 | curl -X POST https://llmfoundry.straive.com/openai/v1/chat/completions \
85 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
86 | -H "Content-Type: application/json" \
87 | -d '{
88 | "model": "gpt-4o-mini",
89 | "stream": true,
90 | "messages": [
91 | { "role": "system", "content": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel" },
92 | { "role": "user", "content": "Order ID: 123456, Customer ID: 7890" }
93 | ],
94 | "tool_choice": "required",
95 | "tools": [
96 | {
97 | "type": "function",
98 | "function": { "name": "get_order", "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } }
99 | },
100 | {
101 | "type": "function",
102 | "function": { "name": "get_customer", "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } }
103 | }
104 | ]
105 | }
106 | }
107 | */
108 | Deno.test("asyncLLM - OpenAI with multiple tool calls", async () => {
109 | let index = 0;
110 | let data = {};
111 | for await (data of asyncLLM(`${BASE_URL}/openai-tools2.txt`)) {
112 | if (index === 0) {
113 | assertEquals(data.tools[0], {
114 | name: "get_order",
115 | id: "call_wnH2cswb4JAnm69pUAP4MNEN",
116 | args: "",
117 | });
118 | }
119 | if (index === 5) assertEquals(data.tools[0].args, '{"id": "123456"}');
120 | if (index === 6) {
121 | assertEquals(data.tools[1], {
122 | name: "get_customer",
123 | id: "call_f4GVABhbwSOLoaisOBOajnsm",
124 | args: '{"id',
125 | });
126 | }
127 | if (index === 9) assertEquals(data.tools[1].args, '{"id": "7890"}');
128 | index++;
129 | }
130 | assertEquals(index, 9);
131 | assertEquals(data.tools[0], { name: "get_order", id: "call_wnH2cswb4JAnm69pUAP4MNEN", args: '{"id": "123456"}' });
132 | assertEquals(data.tools[1], { name: "get_customer", id: "call_f4GVABhbwSOLoaisOBOajnsm", args: '{"id": "7890"}' });
133 | });
134 |
135 | /*
136 | curl -X POST https://llmfoundry.straive.com/anthropic/v1/messages \
137 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
138 | -H "Content-Type: application/json" \
139 | -d '{"model": "claude-3-haiku-20240307", "stream": true, "max_tokens": 10, "messages": [{"role": "user", "content": "What is 2 + 2"}]}'
140 | */
141 | Deno.test("asyncLLM - Anthropic", async () => {
142 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/anthropic.txt`));
143 |
144 | assertEquals(results.length, 3);
145 | assertEquals(results[0].content, "2 ");
146 | assertEquals(results[1].content, "2 + 2 ");
147 | assertEquals(results[2].content, "2 + 2 = 4.");
148 | assertEquals(results.at(-1).tools, undefined);
149 | });
150 |
151 | Deno.test("asyncLLM - Anthropic with tool calls", async () => {
152 | let index = 0;
153 | let data = {};
154 | for await (data of asyncLLM(`${BASE_URL}/anthropic-tools.txt`)) {
155 | if (index === 0) assertEquals(data.content, "Okay");
156 | if (index === 12) assertEquals(data.content, "Okay, let's check the weather for San Francisco, CA:");
157 | if (index === 13) assertEquals(data.tools[0], { name: "get_weather", id: "toolu_01T1x1fJ34qAmk2tNTrN7Up6" });
158 | if (index === 14) assertEquals(data.tools[0].args, "");
159 | index++;
160 | }
161 | assertEquals(data.tools[0].name, "get_weather");
162 | assertEquals(data.tools[0].id, "toolu_01T1x1fJ34qAmk2tNTrN7Up6");
163 | assertEquals(JSON.parse(data.tools[0].args), {
164 | location: "San Francisco, CA",
165 | unit: "fahrenheit",
166 | });
167 | assertEquals(index, 23);
168 | });
169 |
170 | /*
171 | curl -X POST https://llmfoundry.straive.com/anthropic/v1/messages \
172 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
173 | -H "Content-Type: application/json" \
174 | -d '{
175 | "system": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel",
176 | "messages": [{ "role": "user", "content": "Order ID: 123456, Customer ID: 7890" }],
177 | "model": "claude-3-haiku-20240307",
178 | "max_tokens": 4096,
179 | "stream": true,
180 | "tool_choice": { "type": "any", "disable_parallel_tool_use": false },
181 | "tools": [
182 | { "name": "get_order", "input_schema": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } },
183 | { "name": "get_customer", "input_schema": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } }
184 | ]
185 | }
186 | }`
187 | */
188 | Deno.test("asyncLLM - Anthropic with multiple tool calls", async () => {
189 | let index = 0;
190 | let data = {};
191 | for await (data of asyncLLM(`${BASE_URL}/anthropic-tools2.txt`)) {
192 | if (index === 0) assertEquals(data.tools[0], { name: "get_order", id: "toolu_015yB3TjTS1RBaM7VScM2MQY" });
193 | if (index === 2) assertEquals(data.tools[0].args, '{"id": "1');
194 | if (index === 7)
195 | assertEquals(data.tools[1], { name: "get_customer", id: "toolu_013VAZTYqMJm2JuRCqEA4kam", args: '{"id": "789' });
196 | index++;
197 | }
198 | assertEquals(index, 9);
199 | assertEquals(data.tools[0], { name: "get_order", id: "toolu_015yB3TjTS1RBaM7VScM2MQY", args: '{"id": "123456"}' });
200 | assertEquals(data.tools[1], { name: "get_customer", id: "toolu_013VAZTYqMJm2JuRCqEA4kam", args: '{"id": "7890"}' });
201 | });
202 |
203 | /*
204 | curl -X POST https://llmfoundry.straive.com/gemini/v1beta/models/gemini-1.5-flash-8b:streamGenerateContent?alt=sse \
205 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
206 | -H "Content-Type: application/json" \
207 | -d '{
208 | "system_instruction": { "parts": [{ "text": "You are a helpful assistant" }] },
209 | "contents": [{ "role": "user", "parts": [{ "text": "What is 2+2?" }] }]
210 | }'
211 | */
212 | Deno.test("asyncLLM - Gemini", async () => {
213 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/gemini.txt`));
214 |
215 | assertEquals(results.length, 3);
216 | assertEquals(results[0].content, "2");
217 | assertEquals(results[1].content, "2 + 2 = 4\n");
218 | assertEquals(results[2].content, "2 + 2 = 4\n");
219 | assertEquals(results.at(-1).tools, undefined);
220 | });
221 |
222 | /*
223 | curl -X POST https://llmfoundry.straive.com/gemini/v1beta/models/gemini-1.5-flash-latest:streamGenerateContent?alt=sse \
224 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
225 | -H "Content-Type: application/json" \
226 | -d '{
227 | "contents": { "role": "user", "parts": { "text": "Call take_notes passing it an essay about capitalism vs socialism" } },
228 | "tools": [
229 | {
230 | "function_declarations": [
231 | {
232 | "name": "take_notes",
233 | "description": "Take notes about a topic",
234 | "parameters": {
235 | "type": "object",
236 | "properties": { "note": { "type": "string" } },
237 | "required": ["note"]
238 | }
239 | }
240 | ]
241 | }
242 | ]
243 | }'
244 | */
245 | Deno.test("asyncLLM - Gemini with tool calls", async () => {
246 | let index = 0;
247 | let data = {};
248 | for await (data of asyncLLM(`${BASE_URL}/gemini-tools.txt`)) {
249 | if (index === 0) assertEquals(data.tools[0].name, "take_notes");
250 | if (index === 0) assertEquals(data.tools[0].args.startsWith('{"note":"Capitalism'), true);
251 | index++;
252 | }
253 | assertEquals(data.content, undefined);
254 | assertEquals(JSON.parse(data.tools[0].args).note.startsWith("Capitalism and socialism"), true);
255 | assertEquals(JSON.parse(data.tools[0].args).note.endsWith("specific circumstances and values."), true);
256 | assertEquals(index, 1);
257 | });
258 |
259 | /*
260 | curl -X POST https://llmfoundry.straive.com/gemini/v1beta/models/gemini-1.5-flash-latest:streamGenerateContent?alt=sse \
261 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
262 | -H "Content-Type: application/json" \
263 | -d '{
264 | "systemInstruction": {"parts": [{"text": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel"}]},
265 | "contents": [{"role": "user", "parts": [{ "text": "Order ID: 123456, Customer ID: 7890" }] }],
266 | "toolConfig": { "function_calling_config": { "mode": "ANY" } },
267 | "tools": {
268 | "functionDeclarations": [
269 | {
270 | "name": "get_order",
271 | "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] }
272 | },
273 | {
274 | "name": "get_customer",
275 | "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] }
276 | }
277 | ]
278 | }
279 | }
280 | }`
281 | */
282 | Deno.test("asyncLLM - Gemini with multiple tool calls", async () => {
283 | let index = 0;
284 | let data = {};
285 | for await (data of asyncLLM(`${BASE_URL}/gemini-tools2.txt`)) {
286 | if (index === 0) {
287 | assertEquals(data.tools[0], { name: "get_order", args: '{"id":"123456"}' });
288 | assertEquals(data.tools[1], { name: "get_customer", args: '{"id":"7890"}' });
289 | }
290 | index++;
291 | }
292 | assertEquals(index, 3);
293 | assertEquals(data.tools[0].name, "get_order");
294 | assertEquals(JSON.parse(data.tools[0].args), { id: "123456" });
295 | assertEquals(data.tools[1].name, "get_customer");
296 | assertEquals(JSON.parse(data.tools[1].args), { id: "7890" });
297 | });
298 |
299 | /*
300 | curl -X POST https://llmfoundry.straive.com/openrouter/v1/chat/completions \
301 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
302 | -H "Content-Type: application/json" \
303 | -d '{"model": "meta-llama/llama-3.2-11b-vision-instruct", "stream": true, "messages": [{"role": "user", "content": "What is 2 + 2"}]}'
304 | */
305 | Deno.test("asyncLLM - OpenRouter", async () => {
306 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/openrouter.txt`));
307 |
308 | assertEquals(results.length, 64);
309 | assertEquals(results[0].content, "");
310 | assertEquals(results[1].content, " The");
311 | assertEquals(results[2].content, " The sum");
312 | assertEquals(
313 | results.at(-1).content,
314 | " The sum of 2 and 2 is 4. This is a basic arithmetic operation where you add the two numbers together to get the total. \n\nHere's the calculation:\n\n2 + 2 = 4\n\nSo, the answer to your question is 4.",
315 | );
316 | assertEquals(results.at(-1).tools, undefined);
317 | });
318 |
319 | Deno.test("asyncLLM - Error handling", async () => {
320 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/errors.txt`));
321 |
322 | assertEquals(results.length, 6);
323 |
324 | // Malformed JSON
325 | assertEquals(results[0].error, "Unexpected token 'i', \"invalid json\" is not valid JSON");
326 |
327 | // OpenAI-style error
328 | assertEquals(results[1].error, "OpenAI API error");
329 |
330 | // Anthropic-style error
331 | assertEquals(results[2].error, "Anthropic API error");
332 |
333 | // Gemini-style error
334 | assertEquals(results[3].error, "Gemini API error");
335 |
336 | // OpenRouter-style error
337 | assertEquals(results[4].error, "OpenRouter API error");
338 |
339 | // No data
340 | assertEquals(results[5].error, "Unexpected end of JSON input");
341 | });
342 |
343 | Deno.test("asyncLLM - Config callback", async () => {
344 | let responseStatus = 0;
345 | let contentType = "";
346 |
347 | const results = await Array.fromAsync(
348 | asyncLLM(
349 | `${BASE_URL}/openai.txt`,
350 | {},
351 | {
352 | onResponse: async (response) => {
353 | responseStatus = response.status;
354 | contentType = response.headers.get("Content-Type");
355 | },
356 | },
357 | ),
358 | );
359 |
360 | assertEquals(responseStatus, 200);
361 | assertEquals(contentType, "text/event-stream");
362 | assertEquals(results.length, 10); // Verify normal operation still works
363 | });
364 |
365 | Deno.test("asyncLLM - Config callback error handling", async () => {
366 | let responseStatus = 0;
367 |
368 | const results = await Array.fromAsync(
369 | asyncLLM(
370 | `${BASE_URL}/errors.txt`,
371 | {},
372 | {
373 | onResponse: async (response) => {
374 | responseStatus = response.status;
375 | },
376 | },
377 | ),
378 | );
379 |
380 | assertEquals(responseStatus, 200);
381 | assertEquals(results[0].error, "Unexpected token 'i', \"invalid json\" is not valid JSON");
382 | });
383 |
384 | Deno.test("asyncLLM - Request object input", async () => {
385 | const request = new Request(`${BASE_URL}/openai.txt`);
386 | const results = await Array.fromAsync(asyncLLM(request));
387 |
388 | assertEquals(results.length, 10);
389 | });
390 |