├── .gitignore
├── LICENSE
├── README.md
├── anthropic.js
├── anthropic_test.js
├── gemini.js
├── gemini_test.js
├── index.d.ts
├── index.js
├── package-lock.json
├── package.json
├── samples
├── anthropic-tools.txt
├── anthropic-tools2.txt
├── anthropic.txt
├── errors.txt
├── gemini-tools.txt
├── gemini-tools2.txt
├── gemini.txt
├── openai-responses-tools.txt
├── openai-responses-tools2.txt
├── openai-responses.txt
├── openai-tools.txt
├── openai-tools2.txt
├── openai.txt
├── openrouter.txt
└── output.txt
└── test.js
/.gitignore:
--------------------------------------------------------------------------------
1 | dist/
2 | node_modules/
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2024 Anand S
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be
12 | included in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # asyncLLM
2 |
3 | [](https://www.npmjs.com/package/asyncllm)
4 | [](https://opensource.org/licenses/MIT)
5 |
6 | Fetch LLM responses across multiple providers as an async iterable.
7 |
8 | ## Features
9 |
10 | - 🚀 Lightweight (~2KB) and dependency-free
11 | - 🔄 Works with multiple LLM providers (OpenAI, Anthropic, Gemini, and more)
12 | - 🌐 Browser and Node.js compatible
13 | - 📦 Easy to use with ES modules
14 |
15 | ## Installation
16 |
17 | ```bash
18 | npm install asyncllm
19 | ```
20 |
21 | ## Anthropic and Gemini Adapters
22 |
23 | Adapters convert OpenAI chat completions request bodies to the [Anthropic](https://docs.anthropic.com/en/api/messages) or [Gemini](https://ai.google.dev/gemini-api/docs/text-generation?lang=rest) formats. For example:
24 |
25 | ```javascript
26 | import { anthropic } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/anthropic.js";
27 | import { gemini } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/gemini.js";
28 |
29 | // Create an OpenAI chat completions request
30 | const body = {
31 | messages: [{ role: "user", content: "Hello, world!" }],
32 | temperature: 0.5,
33 | };
34 |
35 | // Fetch request with the Anthropic API
36 | const anthropicResponse = await fetch("https://api.anthropic.com/v1/messages", {
37 | method: "POST",
38 | headers: { "Content-Type": "application/json", "x-api-key": "YOUR_API_KEY" },
39 | // anthropic() converts the OpenAI chat completions request to Anthropic's format
40 | body: JSON.stringify(anthropic({ ...body, model: "claude-3-haiku-20240307" })),
41 | }).then((r) => r.json());
42 |
43 | // Fetch request with the Gemini API
44 | const geminiResponse = await fetch(
45 | "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-8b:generateContent",
46 | {
47 | method: "POST",
48 | headers: { "Content-Type": "application/json", Authorization: `Bearer YOUR_API_KEY` },
49 | // gemini() converts the OpenAI chat completions request to Gemini's format
50 | body: JSON.stringify(gemini(body)),
51 | },
52 | ).then((r) => r.json());
53 | ```
54 |
55 | Here are the parameters supported by each provider.
56 |
57 | | OpenAI Parameter | Anthropic | Gemini |
58 | | ----------------------------------- | --------- | ------ |
59 | | messages | Y | Y |
60 | | system message | Y | Y |
61 | | temperature | Y | Y |
62 | | max_tokens | Y | Y |
63 | | top_p | Y | Y |
64 | | stop sequences | Y | Y |
65 | | stream | Y | Y |
66 | | presence_penalty | | Y |
67 | | frequency_penalty | | Y |
68 | | logprobs | | Y |
69 | | top_logprobs | | Y |
70 | | n (multiple candidates) | | Y |
71 | | metadata.user_id | Y | |
72 | | tools/functions | Y | Y |
73 | | tool_choice | Y | Y |
74 | | parallel_tool_calls | Y | |
75 | | response_format.type: "json_object" | | Y |
76 | | response_format.type: "json_schema" | | Y |
77 |
78 | Content types:
79 |
80 | | OpenAI | Anthropic | Gemini |
81 | | ------ | --------- | ------ |
82 | | Text | Y | Y |
83 | | Images | Y | Y |
84 | | Audio | | Y |
85 |
86 | Image Sources
87 |
88 | | OpenAI Parameter | Anthropic | Gemini |
89 | | ---------------- | --------- | ------ |
90 | | Data URI | Y | Y |
91 | | External URLs | | Y |
92 |
93 | ## Streaming
94 |
95 | Call `asyncLLM()` just like you would use `fetch` with any LLM provider with streaming responses.
96 |
97 | - [OpenAI Chat Completion Streaming](https://platform.openai.com/docs/api-reference/chat-streaming). Many providers like Azure, Groq, OpenRouter, etc. follow the OpenAI Chat Completion API.
98 | - [OpenAI Responses API Streaming](https://platform.openai.com/docs/api-reference/responses-streaming).
99 | - [Anthropic Streaming](https://docs.anthropic.com/en/api/messages-streaming)
100 | - [Gemini Streaming](https://ai.google.dev/gemini-api/docs/text-generation?lang=rest#generate-a-text-stream)
101 |
102 | The result is an async generator that yields objects with `content`, `tool`, and `args` properties.
103 |
104 | For example, to update the DOM with the LLM's response:
105 |
106 | ```html
107 |
108 |
109 |
110 |
111 |
112 |
113 |
136 |
137 | ```
138 |
139 | ### Node.js or bundled projects
140 |
141 | ```javascript
142 | import { asyncLLM } from "asyncllm";
143 |
144 | // Usage is the same as in the browser example
145 | ```
146 |
147 | ## Examples
148 |
149 | ### OpenAI streaming
150 |
151 | ```javascript
152 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
153 |
154 | const body = {
155 | model: "gpt-4.1-nano",
156 | // You MUST enable streaming, else the API will return an {error}
157 | stream: true,
158 | messages: [{ role: "user", content: "Hello, world!" }],
159 | };
160 |
161 | for await (const data of asyncLLM("https://api.openai.com/v1/chat/completions", {
162 | method: "POST",
163 | headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
164 | body: JSON.stringify(body),
165 | })) {
166 | console.log(data);
167 | }
168 | ```
169 |
170 | This will log something like this on the console:
171 |
172 | ```js
173 | { content: "", message: { "id": "chatcmpl-...", ...} }
174 | { content: "Hello", message: { "id": "chatcmpl-...", ...} }
175 | { content: "Hello!", message: { "id": "chatcmpl-...", ...} }
176 | { content: "Hello! How", message: { "id": "chatcmpl-...", ...} }
177 | ...
178 | { content: "Hello! How can I assist you today?", message: { "id": "chatcmpl-...", ...} }
179 | ```
180 |
181 | ## OpenAI Responses API streaming
182 |
183 | ```javascript
184 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
185 |
186 | const body = {
187 | model: "gpt-4.1-mini",
188 | // You MUST enable streaming, else the API will return an {error}
189 | stream: true,
190 | input: "Hello, world!",
191 | };
192 |
193 | for await (const data of asyncLLM("https://api.openai.com/v1/responses", {
194 | method: "POST",
195 | headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
196 | body: JSON.stringify(body),
197 | })) {
198 | console.log(data);
199 | }
200 | ```
201 |
202 | This will log something like this on the console:
203 |
204 | ```js
205 | { content: "Hello", message: { "item_id": "msg_...", ...} }
206 | { content: "Hello!", message: { "item_id": "msg_...", ...} }
207 | { content: "Hello! How", message: { "item_id": "msg_...", ...} }
208 | ...
209 | { content: "Hello! How can I assist you today?", message: { "item_id": "msg_...", ...} }
210 | ```
211 |
212 | ### Anthropic streaming
213 |
214 | The package includes an Anthropic adapter that converts OpenAI chat completions requests to Anthropic's format,
215 | allowing you to use the same code structure across providers.
216 |
217 | ```javascript
218 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
219 | import { anthropic } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/anthropic.js";
220 |
221 | // You can use the anthropic() adapter to convert OpenAI chat completions requests to Anthropic's format.
222 | const body = anthropic({
223 | // Same as OpenAI example above
224 | });
225 |
226 | // Or you can use the asyncLLM() function directly with the Anthropic API endpoint.
227 | const body = {
228 | model: "claude-3-haiku-20240307",
229 | // You MUST enable streaming, else the API will return an {error}
230 | stream: true,
231 | max_tokens: 10,
232 | messages: [{ role: "user", content: "What is 2 + 2" }],
233 | };
234 |
235 | for await (const data of asyncLLM("https://api.anthropic.com/v1/messages", {
236 | headers: { "Content-Type": "application/json", "x-api-key": apiKey },
237 | body: JSON.stringify(body),
238 | })) {
239 | console.log(data);
240 | }
241 | ```
242 |
243 | ### Gemini streaming
244 |
245 | The package includes a Gemini adapter that converts OpenAI chat completions requests to Gemini's format,
246 | allowing you to use the same code structure across providers.
247 |
248 | ```javascript
249 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
250 | import { gemini } from "https://cdn.jsdelivr.net/npm/asyncllm@2/dist/gemini.js";
251 |
252 | // You can use the gemini() adapter to convert OpenAI chat completions requests to Gemini's format.
253 | const body = gemini({
254 | // Same as OpenAI example above
255 | });
256 |
257 | // Or you can use the asyncLLM() function directly with the Gemini API endpoint.
258 | const body = {
259 | contents: [{ role: "user", parts: [{ text: "What is 2+2?" }] }],
260 | };
261 |
262 | for await (const data of asyncLLM(
263 | // You MUST use a streaming endpoint, else the API will return an {error}
264 | "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-8b:streamGenerateContent?alt=sse",
265 | {
266 | method: "POST",
267 | headers: {
268 | "Content-Type": "application/json",
269 | Authorization: `Bearer ${apiKey}`,
270 | },
271 | body: JSON.stringify(body),
272 | },
273 | )) {
274 | console.log(data);
275 | }
276 | ```
277 |
278 | ### Function Calling
279 |
280 | asyncLLM supports function calling (aka tools). Here's an example with OpenAI chat completions:
281 |
282 | ```javascript
283 | for await (const { tools } of asyncLLM("https://api.openai.com/v1/chat/completions", {
284 | method: "POST",
285 | headers: {
286 | "Content-Type": "application/json",
287 | Authorization: `Bearer ${apiKey}`,
288 | },
289 | body: JSON.stringify({
290 | model: "gpt-4.1-nano",
291 | stream: true,
292 | messages: [
293 | { role: "system", content: "Get delivery date for order" },
294 | { role: "user", content: "Order ID: 123456" },
295 | ],
296 | tool_choice: "required",
297 | tools: [
298 | {
299 | type: "function",
300 | function: {
301 | name: "get_delivery_date",
302 | parameters: { type: "object", properties: { order_id: { type: "string" } }, required: ["order_id"] },
303 | },
304 | },
305 | ],
306 | }),
307 | })) {
308 | console.log(JSON.stringify(tools));
309 | }
310 | ```
311 |
312 | `tools` is an array of objects with `name`, `id` (for Anthropic and OpenAI, not Gemini), and `args` properties. It streams like this:
313 |
314 | ```json
315 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":""}]
316 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\""}]
317 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order"}]
318 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id"}]
319 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\""}]
320 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\"123"}]
321 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\"123456"}]
322 | [{"name":"get_delivery_date","id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","args":"{\"order_id\":\"123456\"}"}]
323 | ```
324 |
325 | Use a library like [partial-json](https://www.npmjs.com/package/partial-json) to parse the `args` incrementally.
326 |
327 | ### Streaming Config
328 |
329 | asyncLLM accepts a `config` object with the following properties:
330 |
331 | - `fetch`: Custom fetch implementation (defaults to global `fetch`).
332 | - `onResponse`: Async callback function that receives the Response object before streaming begins. If the callback returns a promise, it will be awaited before continuing the stream.
333 |
334 | Here's how you can use a custom fetch implementation:
335 |
336 | ```javascript
337 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
338 |
339 | const body = {
340 | // Same as OpenAI example above
341 | };
342 |
343 | // Optional configuration. You can ignore it for most use cases.
344 | const config = {
345 | onResponse: async (response) => {
346 | console.log(response.status, response.headers);
347 | },
348 | // You can use a custom fetch implementation if needed
349 | fetch: fetch,
350 | };
351 |
352 | for await (const { content } of asyncLLM(
353 | "https://api.openai.com/v1/chat/completions",
354 | {
355 | method: "POST",
356 | headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
357 | body: JSON.stringify(body),
358 | },
359 | config,
360 | )) {
361 | console.log(content);
362 | }
363 | ```
364 |
365 | ## Streaming from text
366 |
367 | You can parse streamed SSE events from a text string (e.g. from a cached response) using the provided `fetchText` helper:
368 |
369 | ```javascript
370 | import { asyncLLM } from "https://cdn.jsdelivr.net/npm/asyncllm@2";
371 | import { fetchText } from "https://cdn.jsdelivr.net/npm/asyncsse@1/dist/fetchtext.js";
372 |
373 | const text = `
374 | data: {"candidates": [{"content": {"parts": [{"text": "2"}],"role": "model"}}]}
375 |
376 | data: {"candidates": [{"content": {"parts": [{"text": " + 2 = 4\\n"}],"role": "model"}}]}
377 |
378 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"}}]}
379 | `;
380 |
381 | // Stream events from text
382 | for await (const event of asyncLLM(text, {}, { fetch: fetchText })) {
383 | console.log(event);
384 | }
385 | ```
386 |
387 | This outputs:
388 |
389 | ```
390 | { data: "Hello" }
391 | { data: "World" }
392 | ```
393 |
394 | This is particularly useful for testing SSE parsing without making actual HTTP requests.
395 |
396 | ### Error handling
397 |
398 | If an error occurs, it will be yielded in the `error` property. For example:
399 |
400 | ```javascript
401 | for await (const { content, error } of asyncLLM("https://api.openai.com/v1/chat/completions", {
402 | method: "POST",
403 | // ...
404 | })) {
405 | if (error) console.error(error);
406 | else console.log(content);
407 | }
408 | ```
409 |
410 | The `error` property is set if:
411 |
412 | - The underlying API (e.g. OpenAI, Anthropic, Gemini) returns an error in the response (e.g. `error.message` or `message.error` or `error`)
413 | - The fetch request fails (e.g. network error)
414 | - The response body cannot be parsed as JSON
415 |
416 | ### `asyncLLM(request: string | Request, options?: RequestInit, config?: SSEConfig): AsyncGenerator`
417 |
418 | Fetches streaming responses from LLM providers and yields events.
419 |
420 | - `request`: The URL or Request object for the LLM API endpoint
421 | - `options`: Optional [fetch options](https://developer.mozilla.org/en-US/docs/Web/API/fetch#parameters)
422 | - `config`: Optional configuration object for SSE handling
423 | - `fetch`: Custom fetch implementation (defaults to global fetch)
424 | - `onResponse`: Async callback function that receives the Response object before streaming begins. If the callback returns a promise, it will be awaited before continuing the stream.
425 |
426 | Returns an async generator that yields [`LLMEvent` objects](#llmevent).
427 |
428 | #### LLMEvent
429 |
430 | - `content`: The text content of the response
431 | - `tools`: Array of tool call objects with:
432 | - `name`: The name of the tool being called
433 | - `args`: The arguments for the tool call as a JSON-encoded string, e.g. `{"order_id":"123456"}`
434 | - `id`: Optional unique identifier for the tool call (e.g. OpenAI's `call_F8YHCjnzrrTjfE4YSSpVW2Bc` or Anthropic's `toolu_01T1x1fJ34qAmk2tNTrN7Up6`. Gemini does not return an id.)
435 | - `message`: The raw message object from the LLM provider (may include id, model, usage stats, etc.)
436 | - `error`: Error message if the request fails
437 |
438 | ## Setup
439 |
440 | ```bash
441 | git clone https://github.com/sanand0/asyncllm
442 | cd asyncllm
443 | npm install
444 |
445 | # Run all tests
446 | npm test
447 | # ... or specific tests
448 | npm test -- --filter 'OpenAI'
449 |
450 | # Deploy: update package.json version, then:
451 | npm publish
452 | ```
453 |
454 | ## Changelog
455 |
456 | - 2.2.0: Added [OpenAI Responses API](https://platform.openai.com/docs/api-reference/responses-streaming)
457 | - 2.1.2: Update repo links
458 | - 2.1.1: Document standalone adapter usage
459 | - 2.1.0: Added `id` to tools to support unique tool call identifiers from providers
460 | - 2.0.1: Multiple tools support.
461 | - Breaking change: `tool` and `args` are not part of the response. Instead, it has `tools`, an array of `{ name, args }`
462 | - Fixed Gemini adapter to return `toolConfig` instead of `toolsConfig`
463 | - 1.2.2: Added streaming from text documentation via `config.fetch`. Upgrade to asyncSSE 1.3.1 (bug fix).
464 | - 1.2.1: Added `config.fetch` for custom fetch implementation
465 | - 1.2.0: Added `config.onResponse(response)` that receives the Response object before streaming begins
466 | - 1.1.3: Ensure `max_tokens` for Anthropic. Improve error handling
467 | - 1.1.1: Added [Anthropic adapter](#anthropic)
468 | - 1.1.0: Added [Gemini adapter](#gemini)
469 | - 1.0.0: Initial release with [asyncLLM](#asyncllm) and [LLMEvent](#llmevent)
470 |
471 | ## Contributing
472 |
473 | Contributions are welcome! Please feel free to submit a Pull Request.
474 |
475 | ## License
476 |
477 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
478 |
--------------------------------------------------------------------------------
/anthropic.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Convert an OpenAI body to an Anthropic body
3 | * @param {Object} body
4 | * @returns {Object}
5 | */
6 | export function anthropic(body) {
7 | // System messages are specified at the top level in Anthropic
8 | const system = body.messages.find((msg) => msg.role === "system");
9 |
10 | // Convert messages
11 | const messages = body.messages
12 | .filter((msg) => msg.role !== "system")
13 | .map((msg) => ({
14 | role: msg.role,
15 | // Handle both text and binary content (images)
16 | content: Array.isArray(msg.content)
17 | ? msg.content.map(({ type, text, image_url }) => {
18 | if (type === "text") return { type: "text", text };
19 | else if (type === "image_url")
20 | return {
21 | type: "image",
22 | source: anthropicSourceFromURL(image_url.url),
23 | };
24 | // Anthropic doesn't support audio
25 | })
26 | : msg.content,
27 | }));
28 |
29 | const parallel_tool_calls =
30 | typeof body.parallel_tool_calls == "boolean" ? { disable_parallel_tool_use: !body.parallel_tool_calls } : {};
31 | // Map OpenAI parameters to Anthropic equivalents, only including if defined
32 | const params = {
33 | model: body.model,
34 | max_tokens: body.max_tokens ?? 4096,
35 | ...(body.metadata?.user_id ? { metadata: { user_id: body.metadata?.user_id } } : {}),
36 | ...(typeof body.stream == "boolean" ? { stream: body.stream } : {}),
37 | ...(typeof body.temperature == "number" ? { temperature: body.temperature } : {}),
38 | ...(typeof body.top_p == "number" ? { top_p: body.top_p } : {}),
39 | // Convert single string or array of stop sequences
40 | ...(typeof body.stop == "string"
41 | ? { stop_sequences: [body.stop] }
42 | : Array.isArray(body.stop)
43 | ? { stop_sequences: body.stop }
44 | : {}),
45 | // Anthropic does not support JSON mode
46 | // Convert OpenAI tool_choice to Anthropic's tools_choice
47 | ...(body.tool_choice == "auto"
48 | ? { tool_choice: { type: "auto", ...parallel_tool_calls } }
49 | : body.tool_choice == "required"
50 | ? { tool_choice: { type: "any", ...parallel_tool_calls } }
51 | : body.tool_choice == "none"
52 | ? {}
53 | : typeof body.tool_choice == "object"
54 | ? {
55 | tool_choice: {
56 | type: "tool",
57 | name: body.tool_choice.function?.name,
58 | ...parallel_tool_calls,
59 | },
60 | }
61 | : {}),
62 | };
63 |
64 | // Convert function definitions to Anthropic's tool format
65 | const tools = body.tools?.map((tool) => ({
66 | name: tool.function.name,
67 | description: tool.function.description,
68 | input_schema: tool.function.parameters,
69 | }));
70 |
71 | // Only include optional configs if they exist
72 | return {
73 | ...(system ? { system: system.content } : {}),
74 | messages,
75 | ...params,
76 | ...(body.tools ? { tools } : {}),
77 | };
78 | }
79 |
80 | // Handle data URIs in Anthropic's format. External URLs are not supported.
81 | const anthropicSourceFromURL = (url) => {
82 | if (url.startsWith("data:")) {
83 | const [base, base64Data] = url.split(",");
84 | return {
85 | type: "base64",
86 | media_type: base.replace("data:", "").replace(";base64", ""),
87 | data: base64Data,
88 | };
89 | }
90 | };
91 |
--------------------------------------------------------------------------------
/anthropic_test.js:
--------------------------------------------------------------------------------
1 | import { anthropic } from "./anthropic.js";
2 |
3 | function assertEquals(actual, expected, message) {
4 | if (JSON.stringify(actual) === JSON.stringify(expected)) return;
5 | throw new Error(
6 | message || `Expected:\n${JSON.stringify(expected, null, 2)}. Actual:\n${JSON.stringify(actual, null, 2)}`,
7 | );
8 | }
9 |
10 | // 1. System message handling
11 | Deno.test("anthropic - system message handling", () => {
12 | const input = {
13 | messages: [
14 | { role: "system", content: "You are helpful" },
15 | { role: "user", content: "Hi" },
16 | ],
17 | };
18 |
19 | const expected = {
20 | system: "You are helpful",
21 | messages: [{ role: "user", content: "Hi" }],
22 | max_tokens: 4096,
23 | };
24 |
25 | assertEquals(anthropic(input), expected);
26 | });
27 |
28 | // 2. Basic message conversion
29 | Deno.test("anthropic - basic message conversion", () => {
30 | const input = {
31 | messages: [{ role: "user", content: "Hello" }],
32 | };
33 |
34 | const expected = {
35 | messages: [{ role: "user", content: "Hello" }],
36 | max_tokens: 4096,
37 | };
38 |
39 | assertEquals(anthropic(input), expected);
40 | });
41 |
42 | // 2b. Multimodal content handling
43 | Deno.test("anthropic - multimodal content", () => {
44 | const input = {
45 | messages: [
46 | {
47 | role: "user",
48 | content: [
49 | { type: "text", text: "What's in this image?" },
50 | {
51 | type: "image_url",
52 | image_url: { url: "data:image/jpeg;base64,abc123" },
53 | },
54 | ],
55 | },
56 | ],
57 | };
58 |
59 | const expected = {
60 | messages: [
61 | {
62 | role: "user",
63 | content: [
64 | { type: "text", text: "What's in this image?" },
65 | {
66 | type: "image",
67 | source: {
68 | type: "base64",
69 | media_type: "image/jpeg",
70 | data: "abc123",
71 | },
72 | },
73 | ],
74 | },
75 | ],
76 | max_tokens: 4096,
77 | };
78 |
79 | assertEquals(anthropic(input), expected);
80 | });
81 |
82 | // 3. Parameter conversion
83 | Deno.test("anthropic - parameter conversion", () => {
84 | const input = {
85 | messages: [{ role: "user", content: "Hi" }],
86 | model: "claude-3-5-sonnet-20241002",
87 | max_tokens: 100,
88 | metadata: { user_id: "123" },
89 | stream: true,
90 | temperature: 0.7,
91 | top_p: 0.9,
92 | stop: ["END"],
93 | };
94 |
95 | const expected = {
96 | messages: [{ role: "user", content: "Hi" }],
97 | model: "claude-3-5-sonnet-20241002",
98 | max_tokens: 100,
99 | metadata: { user_id: "123" },
100 | stream: true,
101 | temperature: 0.7,
102 | top_p: 0.9,
103 | stop_sequences: ["END"],
104 | };
105 |
106 | assertEquals(anthropic(input), expected);
107 | });
108 |
109 | // 3b. Array stop sequences
110 | Deno.test("anthropic - array stop sequences", () => {
111 | const input = {
112 | messages: [{ role: "user", content: "Hi" }],
113 | stop: ["STOP", "END"],
114 | };
115 |
116 | const expected = {
117 | messages: [{ role: "user", content: "Hi" }],
118 | max_tokens: 4096,
119 | stop_sequences: ["STOP", "END"],
120 | };
121 |
122 | assertEquals(anthropic(input), expected);
123 | });
124 |
125 | // 4. Tool handling
126 | Deno.test("anthropic - tool calling configurations", () => {
127 | const input = {
128 | messages: [{ role: "user", content: "Hi" }],
129 | tool_choice: "auto",
130 | parallel_tool_calls: true,
131 | tools: [
132 | {
133 | function: {
134 | name: "get_weather",
135 | description: "Get weather info",
136 | parameters: {
137 | type: "object",
138 | properties: { location: { type: "string" } },
139 | },
140 | },
141 | },
142 | ],
143 | };
144 |
145 | const expected = {
146 | messages: [{ role: "user", content: "Hi" }],
147 | max_tokens: 4096,
148 | tool_choice: { type: "auto", disable_parallel_tool_use: false },
149 | tools: [
150 | {
151 | name: "get_weather",
152 | description: "Get weather info",
153 | input_schema: {
154 | type: "object",
155 | properties: { location: { type: "string" } },
156 | },
157 | },
158 | ],
159 | };
160 |
161 | assertEquals(anthropic(input), expected);
162 | });
163 |
164 | // 4b. Specific tool choice
165 | Deno.test("anthropic - specific tool choice", () => {
166 | const input = {
167 | messages: [{ role: "user", content: "Hi" }],
168 | tool_choice: { function: { name: "get_weather" } },
169 | };
170 |
171 | const expected = {
172 | messages: [{ role: "user", content: "Hi" }],
173 | max_tokens: 4096,
174 | tool_choice: { type: "tool", name: "get_weather" },
175 | };
176 |
177 | assertEquals(anthropic(input), expected);
178 | });
179 |
--------------------------------------------------------------------------------
/gemini.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Convert an OpenAI body to a Gemini body
3 | * @param {Object} body
4 | * @returns {Object}
5 | */
6 | export function gemini(body) {
7 | // System messages live in a separate object in Gemini
8 | const systemMessage = body.messages.find((msg) => msg.role === "system");
9 | const systemInstruction = systemMessage ? { systemInstruction: { parts: [{ text: systemMessage.content }] } } : {};
10 |
11 | // Convert messages: Gemini uses "model" instead of "assistant" and has different content structure
12 | const contents = body.messages
13 | .filter((msg) => msg.role !== "system")
14 | .map((msg) => ({
15 | role: msg.role == "assistant" ? "model" : msg.role,
16 | // Handle both text and binary content (images/audio)
17 | parts: Array.isArray(msg.content)
18 | ? msg.content.map(({ type, text, image_url, input_audio }) => {
19 | if (type === "text") return { text };
20 | else if (type === "image_url") return geminiPartFromURL(image_url.url);
21 | else if (type == "input_audio") return geminiPartFromURL(input_audio.data);
22 | })
23 | : [{ text: msg.content }],
24 | }));
25 |
26 | // Map OpenAI parameters to Gemini equivalents, only including if defined
27 | const generationConfig = {
28 | ...(typeof body.temperature == "number" ? { temperature: body.temperature } : {}),
29 | ...(typeof body.max_tokens == "number" ? { maxOutputTokens: body.max_tokens } : {}),
30 | ...(typeof body.max_completion_tokens == "number" ? { maxOutputTokens: body.max_completion_tokens } : {}),
31 | ...(typeof body.top_p == "number" ? { topP: body.top_p } : {}),
32 | ...(typeof body.presence_penalty == "number" ? { presencePenalty: body.presence_penalty } : {}),
33 | ...(typeof body.frequency_penalty == "number" ? { frequencyPenalty: body.frequency_penalty } : {}),
34 | ...(typeof body.logprobs == "boolean" ? { responseLogprobs: body.logprobs } : {}),
35 | ...(typeof body.top_logprobs == "number" ? { logprobs: body.top_logprobs } : {}),
36 | ...(typeof body.n == "number" ? { candidateCount: body.n } : {}),
37 | // Convert single string or array of stop sequences
38 | ...(typeof body.stop == "string"
39 | ? { stopSequences: [body.stop] }
40 | : Array.isArray(body.stop)
41 | ? { stopSequences: body.stop }
42 | : {}),
43 | // Handle JSON response formatting and schemas
44 | ...(body.response_format?.type == "json_object"
45 | ? { responseMimeType: "application/json" }
46 | : body.response_format?.type == "json_schema"
47 | ? {
48 | responseMimeType: "application/json",
49 | responseSchema: geminiSchema(structuredClone(body.response_format?.json_schema?.schema)),
50 | }
51 | : {}),
52 | };
53 |
54 | // Convert OpenAI tool_choice to Gemini's function calling modes
55 | const toolConfig =
56 | body.tool_choice == "auto"
57 | ? { function_calling_config: { mode: "AUTO" } }
58 | : body.tool_choice == "required"
59 | ? { function_calling_config: { mode: "ANY" } }
60 | : body.tool_choice == "none"
61 | ? { function_calling_config: { mode: "NONE" } }
62 | : typeof body.tool_choice == "object"
63 | ? {
64 | function_calling_config: {
65 | mode: "ANY",
66 | allowed_function_names: [body.tool_choice.function?.name],
67 | },
68 | }
69 | : {};
70 |
71 | // Convert function definitions to Gemini's tool format
72 | const tools = body.tools
73 | ? {
74 | functionDeclarations: body.tools.map((tool) => ({
75 | name: tool.function.name,
76 | description: tool.function.description,
77 | parameters: geminiSchema(structuredClone(tool.function.parameters)),
78 | })),
79 | }
80 | : {};
81 |
82 | // Only include optional configs if they exist
83 | return {
84 | ...systemInstruction,
85 | contents,
86 | ...(Object.keys(generationConfig).length > 0 ? { generationConfig } : {}),
87 | ...(body.tool_choice ? { toolConfig } : {}),
88 | ...(body.tools ? { tools } : {}),
89 | };
90 | }
91 |
92 | // Handle both data URIs and external URLs in Gemini's required format
93 | const geminiPartFromURL = (url) => {
94 | if (url.startsWith("data:")) {
95 | const [base, base64Data] = url.split(",");
96 | return {
97 | inlineData: {
98 | mimeType: base.replace("data:", "").replace(";base64", ""),
99 | data: base64Data,
100 | },
101 | };
102 | }
103 | return { fileData: { fileUri: url } };
104 | };
105 |
106 | // Gemini doesn't support additionalProperties in schemas. Recursively remove it.
107 | function geminiSchema(obj) {
108 | if (Array.isArray(obj)) obj.forEach(geminiSchema);
109 | else if (obj && typeof obj === "object") {
110 | for (const key in obj) {
111 | if (key === "additionalProperties") delete obj[key];
112 | else geminiSchema(obj[key]);
113 | }
114 | }
115 | return obj;
116 | }
117 |
--------------------------------------------------------------------------------
/gemini_test.js:
--------------------------------------------------------------------------------
1 | import { gemini } from "./gemini.js";
2 |
3 | function assertEquals(actual, expected, message) {
4 | if (JSON.stringify(actual) === JSON.stringify(expected)) return;
5 | throw new Error(
6 | message || `Expected:\n${JSON.stringify(expected, null, 2)}. Actual:\n${JSON.stringify(actual, null, 2)}`,
7 | );
8 | }
9 |
10 | Deno.test("gemini - basic message conversion", () => {
11 | const input = {
12 | messages: [{ role: "user", content: "Hello" }],
13 | };
14 |
15 | const expected = {
16 | contents: [{ role: "user", parts: [{ text: "Hello" }] }],
17 | };
18 |
19 | assertEquals(gemini(input), expected);
20 | });
21 |
22 | Deno.test("gemini - system message handling", () => {
23 | const input = {
24 | messages: [
25 | { role: "system", content: "You are helpful" },
26 | { role: "user", content: "Hi" },
27 | ],
28 | };
29 |
30 | const expected = {
31 | systemInstruction: { parts: [{ text: "You are helpful" }] },
32 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
33 | };
34 |
35 | assertEquals(gemini(input), expected);
36 | });
37 |
38 | Deno.test("gemini - assistant message conversion", () => {
39 | const input = {
40 | messages: [
41 | { role: "user", content: "Hi" },
42 | { role: "assistant", content: "Hello" },
43 | ],
44 | };
45 |
46 | const expected = {
47 | contents: [
48 | { role: "user", parts: [{ text: "Hi" }] },
49 | { role: "model", parts: [{ text: "Hello" }] },
50 | ],
51 | };
52 |
53 | assertEquals(gemini(input), expected);
54 | });
55 |
56 | Deno.test("gemini - multimodal content", () => {
57 | const input = {
58 | messages: [
59 | {
60 | role: "user",
61 | content: [
62 | { type: "text", text: "What's in this image?" },
63 | {
64 | type: "image_url",
65 | image_url: { url: "data:image/jpeg;base64,abc123" },
66 | },
67 | {
68 | type: "input_audio",
69 | input_audio: { data: "https://example.com/audio.mp3" },
70 | },
71 | ],
72 | },
73 | ],
74 | };
75 |
76 | const expected = {
77 | contents: [
78 | {
79 | role: "user",
80 | parts: [
81 | { text: "What's in this image?" },
82 | { inlineData: { mimeType: "image/jpeg", data: "abc123" } },
83 | { fileData: { fileUri: "https://example.com/audio.mp3" } },
84 | ],
85 | },
86 | ],
87 | };
88 |
89 | assertEquals(gemini(input), expected);
90 | });
91 |
92 | Deno.test("gemini - generation config parameters", () => {
93 | const input = {
94 | messages: [{ role: "user", content: "Hi" }],
95 | temperature: 0.7,
96 | max_tokens: 100,
97 | top_p: 0.9,
98 | presence_penalty: 0.5,
99 | frequency_penalty: 0.5,
100 | logprobs: true,
101 | top_logprobs: 3,
102 | n: 2,
103 | stop: ["END"],
104 | response_format: { type: "json_object" },
105 | };
106 |
107 | const expected = {
108 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
109 | generationConfig: {
110 | temperature: 0.7,
111 | maxOutputTokens: 100,
112 | topP: 0.9,
113 | presencePenalty: 0.5,
114 | frequencyPenalty: 0.5,
115 | responseLogprobs: true,
116 | logprobs: 3,
117 | candidateCount: 2,
118 | stopSequences: ["END"],
119 | responseMimeType: "application/json",
120 | },
121 | };
122 |
123 | assertEquals(gemini(input), expected);
124 | });
125 |
126 | Deno.test("gemini - tool calling configurations", () => {
127 | const input = {
128 | messages: [{ role: "user", content: "Hi" }],
129 | tool_choice: "auto",
130 | tools: [
131 | {
132 | function: {
133 | name: "get_weather",
134 | description: "Get weather info",
135 | parameters: {
136 | type: "object",
137 | properties: { location: { type: "string" } },
138 | required: ["location"],
139 | additionalProperties: false,
140 | },
141 | },
142 | },
143 | ],
144 | };
145 |
146 | const expected = {
147 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
148 | toolConfig: { function_calling_config: { mode: "AUTO" } },
149 | tools: {
150 | functionDeclarations: [
151 | {
152 | name: "get_weather",
153 | description: "Get weather info",
154 | parameters: {
155 | type: "object",
156 | properties: { location: { type: "string" } },
157 | required: ["location"],
158 | },
159 | },
160 | ],
161 | },
162 | };
163 |
164 | assertEquals(gemini(input), expected);
165 | });
166 |
167 | Deno.test("gemini - specific tool choice", () => {
168 | const input = {
169 | messages: [{ role: "user", content: "Hi" }],
170 | tool_choice: { function: { name: "get_weather" } },
171 | };
172 |
173 | const expected = {
174 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
175 | toolConfig: {
176 | function_calling_config: {
177 | mode: "ANY",
178 | allowed_function_names: ["get_weather"],
179 | },
180 | },
181 | };
182 |
183 | assertEquals(gemini(input), expected);
184 | });
185 |
186 | Deno.test("gemini - json schema response format", () => {
187 | const input = {
188 | messages: [{ role: "user", content: "Hi" }],
189 | response_format: {
190 | type: "json_schema",
191 | json_schema: {
192 | schema: {
193 | type: "object",
194 | properties: {
195 | name: { type: "string" },
196 | age: { type: "number" },
197 | additionalProperties: false,
198 | },
199 | },
200 | },
201 | },
202 | };
203 |
204 | const expected = {
205 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
206 | generationConfig: {
207 | responseMimeType: "application/json",
208 | responseSchema: {
209 | type: "object",
210 | properties: { name: { type: "string" }, age: { type: "number" } },
211 | },
212 | },
213 | };
214 |
215 | assertEquals(gemini(input), expected);
216 | });
217 |
218 | Deno.test("gemini - required tool choice", () => {
219 | const input = {
220 | messages: [{ role: "user", content: "Hi" }],
221 | tool_choice: "required",
222 | };
223 |
224 | const expected = {
225 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
226 | toolConfig: { function_calling_config: { mode: "ANY" } },
227 | };
228 |
229 | assertEquals(gemini(input), expected);
230 | });
231 |
232 | Deno.test("gemini - none tool choice", () => {
233 | const input = {
234 | messages: [{ role: "user", content: "Hi" }],
235 | tool_choice: "none",
236 | };
237 |
238 | const expected = {
239 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
240 | toolConfig: { function_calling_config: { mode: "NONE" } },
241 | };
242 |
243 | assertEquals(gemini(input), expected);
244 | });
245 |
246 | Deno.test("gemini - string stop sequence", () => {
247 | const input = { messages: [{ role: "user", content: "Hi" }], stop: "STOP" };
248 |
249 | const expected = {
250 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
251 | generationConfig: { stopSequences: ["STOP"] },
252 | };
253 |
254 | assertEquals(gemini(input), expected);
255 | });
256 |
257 | Deno.test("gemini - max_completion_tokens parameter", () => {
258 | const input = {
259 | messages: [{ role: "user", content: "Hi" }],
260 | max_completion_tokens: 150,
261 | };
262 |
263 | const expected = {
264 | contents: [{ role: "user", parts: [{ text: "Hi" }] }],
265 | generationConfig: { maxOutputTokens: 150 },
266 | };
267 |
268 | assertEquals(gemini(input), expected);
269 | });
270 |
--------------------------------------------------------------------------------
/index.d.ts:
--------------------------------------------------------------------------------
1 | import { SSEConfig } from "asyncsse";
2 |
3 | export interface LLMTool {
4 | id?: string;
5 | name?: string;
6 | args?: string;
7 | }
8 |
9 | export interface LLMEvent {
10 | content?: string;
11 | tools?: LLMTool[];
12 | error?: string;
13 | message?: Record;
14 | }
15 |
16 | export function asyncLLM(
17 | request: string | Request,
18 | options?: RequestInit,
19 | config?: SSEConfig
20 | ): AsyncGenerator;
21 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | import { asyncSSE } from "asyncsse";
2 |
3 | /**
4 | * asyncLLM yields events when streaming from a streaming LLM endpoint.
5 | *
6 | * @param {Request} request
7 | * @param {RequestInit} options
8 | * @param {SSEConfig} config
9 | * @returns {AsyncGenerator, void, unknown>}
10 | *
11 | * @example
12 | * for await (const event of asyncLLM("https://api.openai.com/v1/chat/completions", {
13 | * method: "POST",
14 | * headers: {
15 | * "Content-Type": "application/json",
16 | * "Authorization": "Bearer YOUR_API_KEY",
17 | * },
18 | * body: JSON.stringify({
19 | * model: "gpt-3.5-turbo",
20 | * messages: [{ role: "user", content: "Hello, world!" }],
21 | * }),
22 | * }, {
23 | * onResponse: async (response) => {
24 | * console.log(response.status, response.headers);
25 | * },
26 | * })) {
27 | * console.log(event);
28 | * }
29 | */
30 | export async function* asyncLLM(request, options = {}, config = {}) {
31 | let content,
32 | tools = [];
33 |
34 | function latestTool() {
35 | if (!tools.length) tools.push({});
36 | return tools.at(-1);
37 | }
38 |
39 | for await (const event of asyncSSE(request, options, config)) {
40 | // OpenAI and Cloudflare AI Workers use "[DONE]" to indicate the end of the stream
41 | if (event.data === "[DONE]") break;
42 |
43 | if (event.error) {
44 | yield event;
45 | continue;
46 | }
47 |
48 | let message;
49 | try {
50 | message = JSON.parse(event.data);
51 | } catch (error) {
52 | yield { error: error.message, data: event.data };
53 | continue;
54 | }
55 |
56 | // Handle errors. asyncSSE yields { error: ... } if the fetch fails.
57 | // OpenAI, Anthropic, and Gemini return {"error": ...}.
58 | // OpenRouter returns {"message":{"error": ...}}.
59 | const error = message.message?.error ?? message.error?.message ?? message.error ?? event.error;
60 | if (error) {
61 | yield { error };
62 | continue;
63 | }
64 |
65 | // Attempt to parse with each provider's format
66 | let hasNewData = false;
67 | for (const parser of Object.values(providers)) {
68 | const extract = parser(message);
69 | hasNewData = !isEmpty(extract.content) || extract.tools.length > 0;
70 | // console.log(hasNewData, parser, extract, message);
71 | if (!isEmpty(extract.content)) content = (content ?? "") + extract.content;
72 | for (const { name, args, id } of extract.tools) {
73 | if (!isEmpty(name)) {
74 | const tool = { name };
75 | if (!isEmpty(id)) tool.id = id;
76 | tools.push(tool);
77 | }
78 | if (!isEmpty(args)) {
79 | const tool = latestTool();
80 | tool.args = (tool.args ?? "") + args;
81 | }
82 | }
83 | if (hasNewData) break;
84 | }
85 |
86 | if (hasNewData) {
87 | const data = { content, message };
88 | if (!isEmpty(content)) data.content = content;
89 | if (tools.length) data.tools = tools;
90 | yield data;
91 | }
92 | }
93 | }
94 |
95 | // Return the delta from each message as { content, tools }
96 | // content delta is string | undefined
97 | // tools delta is [{ name?: string, args?: string }] | []
98 | const providers = {
99 | // Azure, OpenRouter, Groq, and a few others follow OpenAI's format
100 | openai: (m) => ({
101 | content: m.choices?.[0]?.delta?.content,
102 | tools: (m.choices?.[0]?.delta?.tool_calls ?? []).map((tool) => ({
103 | id: tool.id,
104 | name: tool.function.name,
105 | args: tool.function.arguments,
106 | })),
107 | }),
108 | // OpenAI Responses API (streaming w/ tool support)
109 | openaiResponses: (m) => ({
110 | content: m.type == "response.output_text.delta" ? m.delta : undefined,
111 | tools:
112 | m.type == "response.output_item.added" && m.item?.type == "function_call"
113 | ? [{ id: m.item.id, name: m.item.name, args: m.item.arguments }]
114 | : m.type == "response.function_call_arguments.delta"
115 | ? [{ args: m.delta }]
116 | : [],
117 | }),
118 | anthropic: (m) => ({
119 | content: m.delta?.text,
120 | tools: !isEmpty(m.content_block?.name)
121 | ? [{ name: m.content_block.name, id: m.content_block.id }]
122 | : !isEmpty(m.delta?.partial_json)
123 | ? [{ args: m.delta?.partial_json }]
124 | : [],
125 | }),
126 | gemini: (m) => ({
127 | content: m.candidates?.[0]?.content?.parts?.[0]?.text,
128 | tools: (m.candidates?.[0]?.content?.parts ?? [])
129 | .map((part) => part.functionCall)
130 | .filter((d) => d)
131 | .map((d) => ({ name: d.name, args: JSON.stringify(d.args) })),
132 | }),
133 | // OpenAI's Responses API also has a .response, so we need to disambiguate.
134 | /*
135 | cloudflare: (m) => ({
136 | content: m.response,
137 | tools: [],
138 | }),
139 | */
140 | };
141 |
142 | const isEmpty = (value) => value === undefined || value === null;
143 |
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "asyncllm",
3 | "version": "2.1.2",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "asyncllm",
9 | "version": "2.1.2",
10 | "license": "MIT",
11 | "dependencies": {
12 | "asyncsse": "^1.3.1"
13 | },
14 | "engines": {
15 | "node": ">=14.0.0"
16 | }
17 | },
18 | "node_modules/asyncsse": {
19 | "version": "1.3.1",
20 | "resolved": "https://registry.npmjs.org/asyncsse/-/asyncsse-1.3.1.tgz",
21 | "integrity": "sha512-sPd7NAmOKAl+optdLYe0zyaXqMf4PCYNtkHvSddRs4cZs40i/zdG+1h8qrkg2QKoUsRmwoUaTuv+5iKkT5iv7w==",
22 | "license": "MIT",
23 | "engines": {
24 | "node": ">=14.0.0"
25 | }
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "asyncllm",
3 | "version": "2.2.0",
4 | "description": "Fetch streaming LLM responses as an async iterable",
5 | "main": "dist/asyncllm.js",
6 | "type": "module",
7 | "module": "index.js",
8 | "exports": {
9 | ".": "./dist/asyncllm.js",
10 | "./anthropic": "./dist/anthropic.js",
11 | "./gemini": "./dist/gemini.js"
12 | },
13 | "scripts": {
14 | "test": "deno test --allow-net --allow-read",
15 | "build-asyncllm": "npx -y esbuild index.js --bundle --minify --format=esm --outfile=dist/asyncllm.js",
16 | "build-gemini": "npx -y esbuild gemini.js --bundle --minify --format=esm --outfile=dist/gemini.js",
17 | "build-anthropic": "npx -y esbuild anthropic.js --bundle --minify --format=esm --outfile=dist/anthropic.js",
18 | "build": "npm run build-asyncllm && npm run build-gemini && npm run build-anthropic",
19 | "lint": "npx prettier@3.5 --write *.js *.md",
20 | "prepublishOnly": "npm run lint && npm run build"
21 | },
22 | "keywords": [
23 | "sse",
24 | "fetch",
25 | "async",
26 | "iterable",
27 | "server-sent-events",
28 | "streaming",
29 | "llm",
30 | "openai",
31 | "anthropic",
32 | "gemini",
33 | "cloudflare"
34 | ],
35 | "author": "S Anand ",
36 | "license": "MIT",
37 | "repository": {
38 | "type": "git",
39 | "url": "https://github.com/sanand0/asyncllm.git"
40 | },
41 | "bugs": {
42 | "url": "https://github.com/sanand0/asyncllm/issues"
43 | },
44 | "homepage": "https://github.com/sanand0/asyncllm#readme",
45 | "engines": {
46 | "node": ">=14.0.0"
47 | },
48 | "prettier": {
49 | "printWidth": 120
50 | },
51 | "files": [
52 | "README.md",
53 | "dist",
54 | "index.js",
55 | "index.d.ts"
56 | ],
57 | "dependencies": {
58 | "asyncsse": "^1.3.1"
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/samples/anthropic-tools.txt:
--------------------------------------------------------------------------------
1 | event: message_start
2 | data: {"type":"message_start","message":{"id":"msg_014p7gG3wDgGV9EUtLvnow3U","type":"message","role":"assistant","model":"claude-3-haiku-20240307","stop_sequence":null,"usage":{"input_tokens":472,"output_tokens":2},"content":[],"stop_reason":null}}
3 |
4 | event: content_block_start
5 | data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
6 |
7 | event: ping
8 | data: {"type": "ping"}
9 |
10 | event: content_block_delta
11 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Okay"}}
12 |
13 | event: content_block_delta
14 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","}}
15 |
16 | event: content_block_delta
17 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" let"}}
18 |
19 | event: content_block_delta
20 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"'s"}}
21 |
22 | event: content_block_delta
23 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" check"}}
24 |
25 | event: content_block_delta
26 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" the"}}
27 |
28 | event: content_block_delta
29 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" weather"}}
30 |
31 | event: content_block_delta
32 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" for"}}
33 |
34 | event: content_block_delta
35 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" San"}}
36 |
37 | event: content_block_delta
38 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Francisco"}}
39 |
40 | event: content_block_delta
41 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","}}
42 |
43 | event: content_block_delta
44 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" CA"}}
45 |
46 | event: content_block_delta
47 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":":"}}
48 |
49 | event: content_block_stop
50 | data: {"type":"content_block_stop","index":0}
51 |
52 | event: content_block_start
53 | data: {"type":"content_block_start","index":1,"content_block":{"type":"tool_use","id":"toolu_01T1x1fJ34qAmk2tNTrN7Up6","name":"get_weather","input":{}}}
54 |
55 | event: content_block_delta
56 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":""}}
57 |
58 | event: content_block_delta
59 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"{\"location\":"}}
60 |
61 | event: content_block_delta
62 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":" \"San"}}
63 |
64 | event: content_block_delta
65 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":" Francisc"}}
66 |
67 | event: content_block_delta
68 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"o,"}}
69 |
70 | event: content_block_delta
71 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":" CA\""}}
72 |
73 | event: content_block_delta
74 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":", "}}
75 |
76 | event: content_block_delta
77 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"\"unit\": \"fah"}}
78 |
79 | event: content_block_delta
80 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"renheit\"}"}}
81 |
82 | event: content_block_stop
83 | data: {"type":"content_block_stop","index":1}
84 |
85 | event: message_delta
86 | data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":89}}
87 |
88 | event: message_stop
89 | data: {"type":"message_stop"}
90 |
--------------------------------------------------------------------------------
/samples/anthropic-tools2.txt:
--------------------------------------------------------------------------------
1 | event: message_start
2 | data: {"type":"message_start","message":{"id":"msg_01NpRfBZDJHQvTKGtrwFJheH","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":482,"output_tokens":8}} }
3 |
4 | event: content_block_start
5 | data: {"type":"content_block_start","index":0,"content_block":{"type":"tool_use","id":"toolu_015yB3TjTS1RBaM7VScM2MQY","name":"get_order","input":{}} }
6 |
7 | event: ping
8 | data: {"type": "ping"}
9 |
10 | event: content_block_delta
11 | data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":""} }
12 |
13 | event: content_block_delta
14 | data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":"{\"id\": \"1"} }
15 |
16 | event: content_block_delta
17 | data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":"23456\"}"} }
18 |
19 | event: content_block_stop
20 | data: {"type":"content_block_stop","index":0 }
21 |
22 | event: content_block_start
23 | data: {"type":"content_block_start","index":1,"content_block":{"type":"tool_use","id":"toolu_013VAZTYqMJm2JuRCqEA4kam","name":"get_customer","input":{}} }
24 |
25 | event: content_block_delta
26 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":""} }
27 |
28 | event: content_block_delta
29 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"{\"id\": \""} }
30 |
31 | event: content_block_delta
32 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"789"} }
33 |
34 | event: content_block_delta
35 | data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"0\"}"} }
36 |
37 | event: content_block_stop
38 | data: {"type":"content_block_stop","index":1 }
39 |
40 | event: message_delta
41 | data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":76} }
42 |
43 | event: message_stop
44 | data: {"type":"message_stop" }
45 |
--------------------------------------------------------------------------------
/samples/anthropic.txt:
--------------------------------------------------------------------------------
1 | event: message_start
2 | data: {"type":"message_start","message":{"id":"msg_013uu3QExnpT3UYsC9mo2Em8","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":19,"output_tokens":3}}}
3 |
4 | event: content_block_start
5 | data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}
6 |
7 | event: ping
8 | data: {"type": "ping"}
9 |
10 | event: content_block_delta
11 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"2 "}}
12 |
13 | event: content_block_delta
14 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"+ 2 "}}
15 |
16 | event: content_block_delta
17 | data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"= 4."}}
18 |
19 | event: content_block_stop
20 | data: {"type":"content_block_stop","index":0}
21 |
22 | event: message_delta
23 | data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":14}}
24 |
25 | event: message_stop
26 | data: {"type":"message_stop"}
27 |
--------------------------------------------------------------------------------
/samples/errors.txt:
--------------------------------------------------------------------------------
1 | data: invalid json
2 |
3 | data: {"error": {"message": "OpenAI API error", "type": "api_error"}}
4 |
5 | data: {"error": {"type": "invalid_request_error", "message": "Anthropic API error"}}
6 |
7 | data: {"error": {"code": 400, "message": "Gemini API error"}}
8 |
9 | data: {"message": {"error": "OpenRouter API error"}}
10 |
11 | data:
12 |
--------------------------------------------------------------------------------
/samples/gemini-tools.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"functionCall": {"name": "take_notes","args": {"note": "Capitalism and socialism are two of the most prevalent economic systems in the world. Capitalism is characterized by private ownership of the means of production, free markets, and the pursuit of profit. Socialism, on the other hand, emphasizes social ownership of the means of production, with the goal of achieving social equality and economic justice. The two systems have been the subject of much debate, with proponents of each arguing for its superiority. Capitalism is often praised for its efficiency and innovation, while socialism is lauded for its potential to reduce inequality and provide for the needs of the most vulnerable. However, both systems have their drawbacks. Capitalism can lead to economic instability and social inequality, while socialism can stifle innovation and reduce individual freedom. Ultimately, the best economic system for a given society depends on its specific circumstances and values."}}}],"role": "model"},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 50,"candidatesTokenCount": 174,"totalTokenCount": 224}}
2 |
--------------------------------------------------------------------------------
/samples/gemini-tools2.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"functionCall": {"name": "get_order","args": {"id": "123456"}}},{"functionCall": {"name": "get_customer","args": {"id": "7890"}}}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 104,"totalTokenCount": 104},"modelVersion": "gemini-1.5-flash-8b-001"}
2 |
3 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"avgLogprobs": "NaN"}],"usageMetadata": {"promptTokenCount": 104,"totalTokenCount": 104},"modelVersion": "gemini-1.5-flash-8b-001"}
4 |
5 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 104,"candidatesTokenCount": 18,"totalTokenCount": 122},"modelVersion": "gemini-1.5-flash-8b-001"}
6 |
--------------------------------------------------------------------------------
/samples/gemini.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"text": "2"}],"role": "model"}}],"usageMetadata": {"promptTokenCount": 13,"totalTokenCount": 13}}
2 |
3 | data: {"candidates": [{"content": {"parts": [{"text": " + 2 = 4\n"}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 13,"totalTokenCount": 13}}
4 |
5 | data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 13,"candidatesTokenCount": 8,"totalTokenCount": 21}}
6 |
--------------------------------------------------------------------------------
/samples/openai-responses-tools.txt:
--------------------------------------------------------------------------------
1 | event: response.created
2 | data: {"type":"response.created","response":{"id":"resp_6808d3a020488192b2a013332cb5f5e70a601c2646a05cfd","object":"response","created_at":1745408928,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"required","tools":[{"type":"function","description":"Get the delivery date for a customer order.","name":"get_delivery_date","parameters":{"type":"object","properties":{"order_id":{"type":"string","description":"The customer order ID."}},"required":["order_id"],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
3 |
4 | event: response.in_progress
5 | data: {"type":"response.in_progress","response":{"id":"resp_6808d3a020488192b2a013332cb5f5e70a601c2646a05cfd","object":"response","created_at":1745408928,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"required","tools":[{"type":"function","description":"Get the delivery date for a customer order.","name":"get_delivery_date","parameters":{"type":"object","properties":{"order_id":{"type":"string","description":"The customer order ID."}},"required":["order_id"],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
6 |
7 | event: response.output_item.added
8 | data: {"type":"response.output_item.added","output_index":0,"item":{"id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","type":"function_call","status":"in_progress","arguments":"","call_id":"call_IEmWx3mU3gTg0kVsMN5tOHbq","name":"get_delivery_date"}}
9 |
10 | event: response.function_call_arguments.delta
11 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"{\""}
12 |
13 | event: response.function_call_arguments.delta
14 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"order"}
15 |
16 | event: response.function_call_arguments.delta
17 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"_id"}
18 |
19 | event: response.function_call_arguments.delta
20 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"\":\""}
21 |
22 | event: response.function_call_arguments.delta
23 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"123"}
24 |
25 | event: response.function_call_arguments.delta
26 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"456"}
27 |
28 | event: response.function_call_arguments.delta
29 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"delta":"\"}"}
30 |
31 | event: response.function_call_arguments.done
32 | data: {"type":"response.function_call_arguments.done","item_id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","output_index":0,"arguments":"{\"order_id\":\"123456\"}"}
33 |
34 | event: response.output_item.done
35 | data: {"type":"response.output_item.done","output_index":0,"item":{"id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","type":"function_call","status":"completed","arguments":"{\"order_id\":\"123456\"}","call_id":"call_IEmWx3mU3gTg0kVsMN5tOHbq","name":"get_delivery_date"}}
36 |
37 | event: response.completed
38 | data: {"type":"response.completed","response":{"id":"resp_6808d3a020488192b2a013332cb5f5e70a601c2646a05cfd","object":"response","created_at":1745408928,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[{"id":"fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd","type":"function_call","status":"completed","arguments":"{\"order_id\":\"123456\"}","call_id":"call_IEmWx3mU3gTg0kVsMN5tOHbq","name":"get_delivery_date"}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"default","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"required","tools":[{"type":"function","description":"Get the delivery date for a customer order.","name":"get_delivery_date","parameters":{"type":"object","properties":{"order_id":{"type":"string","description":"The customer order ID."}},"required":["order_id"],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":91,"input_tokens_details":{"cached_tokens":0},"output_tokens":8,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":99},"user":null,"metadata":{}}}
39 |
40 |
--------------------------------------------------------------------------------
/samples/openai-responses-tools2.txt:
--------------------------------------------------------------------------------
1 | event: response.created
2 | data: {"type":"response.created","response":{"id":"resp_6808d34264cc8192a90be606a7cc50bc01c57d45ab76fecc","object":"response","created_at":1745408834,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"required","tools":[{"type":"function","description":null,"name":"get_order","parameters":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]},"strict":true},{"type":"function","description":null,"name":"get_customer","parameters":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
3 |
4 | event: response.in_progress
5 | data: {"type":"response.in_progress","response":{"id":"resp_6808d34264cc8192a90be606a7cc50bc01c57d45ab76fecc","object":"response","created_at":1745408834,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"required","tools":[{"type":"function","description":null,"name":"get_order","parameters":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]},"strict":true},{"type":"function","description":null,"name":"get_customer","parameters":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
6 |
7 | event: response.output_item.added
8 | data: {"type":"response.output_item.added","output_index":0,"item":{"id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","type":"function_call","status":"in_progress","arguments":"","call_id":"call_khElVS1NoyNcckH2EuTtpSDR","name":"get_order"}}
9 |
10 | event: response.function_call_arguments.delta
11 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"delta":"{"}
12 |
13 | event: response.function_call_arguments.delta
14 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"delta":"\"id"}
15 |
16 | event: response.function_call_arguments.delta
17 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"delta":"\":"}
18 |
19 | event: response.function_call_arguments.delta
20 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"delta":"\"123"}
21 |
22 | event: response.function_call_arguments.delta
23 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"delta":"456"}
24 |
25 | event: response.function_call_arguments.delta
26 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"delta":"\"}"}
27 |
28 | event: response.function_call_arguments.done
29 | data: {"type":"response.function_call_arguments.done","item_id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","output_index":0,"arguments":"{\"id\":\"123456\"}"}
30 |
31 | event: response.output_item.done
32 | data: {"type":"response.output_item.done","output_index":0,"item":{"id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","type":"function_call","status":"completed","arguments":"{\"id\":\"123456\"}","call_id":"call_khElVS1NoyNcckH2EuTtpSDR","name":"get_order"}}
33 |
34 | event: response.output_item.added
35 | data: {"type":"response.output_item.added","output_index":1,"item":{"id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","type":"function_call","status":"in_progress","arguments":"","call_id":"call_562xX7CoxXqdLoTJBCK8VbZq","name":"get_customer"}}
36 |
37 | event: response.function_call_arguments.delta
38 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"delta":"{"}
39 |
40 | event: response.function_call_arguments.delta
41 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"delta":"\"id"}
42 |
43 | event: response.function_call_arguments.delta
44 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"delta":"\":"}
45 |
46 | event: response.function_call_arguments.delta
47 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"delta":"\"789"}
48 |
49 | event: response.function_call_arguments.delta
50 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"delta":"0"}
51 |
52 | event: response.function_call_arguments.delta
53 | data: {"type":"response.function_call_arguments.delta","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"delta":"\"}"}
54 |
55 | event: response.function_call_arguments.done
56 | data: {"type":"response.function_call_arguments.done","item_id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","output_index":1,"arguments":"{\"id\":\"7890\"}"}
57 |
58 | event: response.output_item.done
59 | data: {"type":"response.output_item.done","output_index":1,"item":{"id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","type":"function_call","status":"completed","arguments":"{\"id\":\"7890\"}","call_id":"call_562xX7CoxXqdLoTJBCK8VbZq","name":"get_customer"}}
60 |
61 | event: response.completed
62 | data: {"type":"response.completed","response":{"id":"resp_6808d34264cc8192a90be606a7cc50bc01c57d45ab76fecc","object":"response","created_at":1745408834,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[{"id":"fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc","type":"function_call","status":"completed","arguments":"{\"id\":\"123456\"}","call_id":"call_khElVS1NoyNcckH2EuTtpSDR","name":"get_order"},{"id":"fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc","type":"function_call","status":"completed","arguments":"{\"id\":\"7890\"}","call_id":"call_562xX7CoxXqdLoTJBCK8VbZq","name":"get_customer"}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"default","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"required","tools":[{"type":"function","description":null,"name":"get_order","parameters":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]},"strict":true},{"type":"function","description":null,"name":"get_customer","parameters":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":0,"input_tokens_details":{"cached_tokens":0},"output_tokens":0,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":0},"user":null,"metadata":{}}}
63 |
64 |
--------------------------------------------------------------------------------
/samples/openai-responses.txt:
--------------------------------------------------------------------------------
1 | event: response.created
2 | data: {"type":"response.created","response":{"id":"resp_6808c792b0808192929556caffbb1ce402452198540d326e","object":"response","created_at":1745405842,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
3 |
4 | event: response.in_progress
5 | data: {"type":"response.in_progress","response":{"id":"resp_6808c792b0808192929556caffbb1ce402452198540d326e","object":"response","created_at":1745405842,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
6 |
7 | event: response.output_item.added
8 | data: {"type":"response.output_item.added","output_index":0,"item":{"id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","type":"message","status":"in_progress","content":[],"role":"assistant"}}
9 |
10 | event: response.content_part.added
11 | data: {"type":"response.content_part.added","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"part":{"type":"output_text","annotations":[],"text":""}}
12 |
13 | event: response.output_text.delta
14 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":"Hello"}
15 |
16 | event: response.output_text.delta
17 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":"!"}
18 |
19 | event: response.output_text.delta
20 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":" How"}
21 |
22 | event: response.output_text.delta
23 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":" can"}
24 |
25 | event: response.output_text.delta
26 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":" I"}
27 |
28 | event: response.output_text.delta
29 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":" assist"}
30 |
31 | event: response.output_text.delta
32 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":" you"}
33 |
34 | event: response.output_text.delta
35 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":" today"}
36 |
37 | event: response.output_text.delta
38 | data: {"type":"response.output_text.delta","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"delta":"?"}
39 |
40 | event: response.output_text.done
41 | data: {"type":"response.output_text.done","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"text":"Hello! How can I assist you today?"}
42 |
43 | event: response.content_part.done
44 | data: {"type":"response.content_part.done","item_id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","output_index":0,"content_index":0,"part":{"type":"output_text","annotations":[],"text":"Hello! How can I assist you today?"}}
45 |
46 | event: response.output_item.done
47 | data: {"type":"response.output_item.done","output_index":0,"item":{"id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"text":"Hello! How can I assist you today?"}],"role":"assistant"}}
48 |
49 | event: response.completed
50 | data: {"type":"response.completed","response":{"id":"resp_6808c792b0808192929556caffbb1ce402452198540d326e","object":"response","created_at":1745405842,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4.1-nano-2025-04-14","output":[{"id":"msg_6808c79326f48192b14f4fa08354087a02452198540d326e","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"text":"Hello! How can I assist you today?"}],"role":"assistant"}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"summary":null},"service_tier":"default","store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":9,"input_tokens_details":{"cached_tokens":0},"output_tokens":10,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":19},"user":null,"metadata":{}}}
51 |
52 |
--------------------------------------------------------------------------------
/samples/openai-tools.txt:
--------------------------------------------------------------------------------
1 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_F8YHCjnzrrTjfE4YSSpVW2Bc","type":"function","function":{"name":"get_delivery_date","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}]}
2 |
3 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}]}
4 |
5 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"order"}}]},"logprobs":null,"finish_reason":null}]}
6 |
7 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"_id"}}]},"logprobs":null,"finish_reason":null}]}
8 |
9 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]}
10 |
11 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"123"}}]},"logprobs":null,"finish_reason":null}]}
12 |
13 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"456"}}]},"logprobs":null,"finish_reason":null}]}
14 |
15 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}]}
16 |
17 | data: {"id":"chatcmpl-AIYHs3Xp2vOtDdtgJUaTpUVMKk3a8","object":"chat.completion.chunk","created":1728985068,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_e2bde53e6e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]}
18 |
19 | data: [DONE]
20 |
21 |
--------------------------------------------------------------------------------
/samples/openai-tools2.txt:
--------------------------------------------------------------------------------
1 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}
2 |
3 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_wnH2cswb4JAnm69pUAP4MNEN","type":"function","function":{"name":"get_order","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
4 |
5 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"id"}}]},"logprobs":null,"finish_reason":null}]}
6 |
7 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\": \"1"}}]},"logprobs":null,"finish_reason":null}]}
8 |
9 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"23456\""}}]},"logprobs":null,"finish_reason":null}]}
10 |
11 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"}"}}]},"logprobs":null,"finish_reason":null}]}
12 |
13 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_f4GVABhbwSOLoaisOBOajnsm","type":"function","function":{"name":"get_customer","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
14 |
15 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"id"}}]},"logprobs":null,"finish_reason":null}]}
16 |
17 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\": \"7"}}]},"logprobs":null,"finish_reason":null}]}
18 |
19 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"890\"}"}}]},"logprobs":null,"finish_reason":null}]}
20 |
21 | data: {"id":"chatcmpl-AQ3zpRW1u9JcFF4vG4yvlRk6Dl0Nk","object":"chat.completion.chunk","created":1730775253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_9b78b61c52","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]}
22 |
23 | data: [DONE]
24 |
--------------------------------------------------------------------------------
/samples/openai.txt:
--------------------------------------------------------------------------------
1 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
2 |
3 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":"Hello"},"logprobs":null,"finish_reason":null}]}
4 |
5 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
6 |
7 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" How"},"logprobs":null,"finish_reason":null}]}
8 |
9 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}]}
10 |
11 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}
12 |
13 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" assist"},"logprobs":null,"finish_reason":null}]}
14 |
15 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}]}
16 |
17 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":" today"},"logprobs":null,"finish_reason":null}]}
18 |
19 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
20 |
21 | data: {"id":"chatcmpl-AIXwzd0Ul2u3WWUqaXvmzE4o5Th8b","object":"chat.completion.chunk","created":1728983773,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_6b68a8204b","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
22 |
23 | data: [DONE]
24 |
--------------------------------------------------------------------------------
/samples/openrouter.txt:
--------------------------------------------------------------------------------
1 | : OPENROUTER PROCESSING
2 |
3 | : OPENROUTER PROCESSING
4 |
5 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"logprobs":null}]}
6 |
7 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" The"},"finish_reason":null,"logprobs":null}]}
8 |
9 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" sum"},"finish_reason":null,"logprobs":null}]}
10 |
11 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" of"},"finish_reason":null,"logprobs":null}]}
12 |
13 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
14 |
15 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
16 |
17 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"logprobs":null}]}
18 |
19 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
20 |
21 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
22 |
23 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"logprobs":null}]}
24 |
25 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
26 |
27 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"4"},"finish_reason":null,"logprobs":null}]}
28 |
29 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"logprobs":null}]}
30 |
31 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" This"},"finish_reason":null,"logprobs":null}]}
32 |
33 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"logprobs":null}]}
34 |
35 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" a"},"finish_reason":null,"logprobs":null}]}
36 |
37 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" basic"},"finish_reason":null,"logprobs":null}]}
38 |
39 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" arithmetic"},"finish_reason":null,"logprobs":null}]}
40 |
41 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" operation"},"finish_reason":null,"logprobs":null}]}
42 |
43 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" where"},"finish_reason":null,"logprobs":null}]}
44 |
45 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" you"},"finish_reason":null,"logprobs":null}]}
46 |
47 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" add"},"finish_reason":null,"logprobs":null}]}
48 |
49 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
50 |
51 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" two"},"finish_reason":null,"logprobs":null}]}
52 |
53 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" numbers"},"finish_reason":null,"logprobs":null}]}
54 |
55 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" together"},"finish_reason":null,"logprobs":null}]}
56 |
57 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"logprobs":null}]}
58 |
59 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" get"},"finish_reason":null,"logprobs":null}]}
60 |
61 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
62 |
63 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" total"},"finish_reason":null,"logprobs":null}]}
64 |
65 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"logprobs":null}]}
66 |
67 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
68 |
69 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
70 |
71 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
72 |
73 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"Here"},"finish_reason":null,"logprobs":null}]}
74 |
75 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"'"},"finish_reason":null,"logprobs":null}]}
76 |
77 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"s"},"finish_reason":null,"logprobs":null}]}
78 |
79 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
80 |
81 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" calculation"},"finish_reason":null,"logprobs":null}]}
82 |
83 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":":"},"finish_reason":null,"logprobs":null}]}
84 |
85 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
86 |
87 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
88 |
89 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
90 |
91 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" +"},"finish_reason":null,"logprobs":null}]}
92 |
93 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
94 |
95 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"logprobs":null}]}
96 |
97 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" ="},"finish_reason":null,"logprobs":null}]}
98 |
99 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
100 |
101 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"4"},"finish_reason":null,"logprobs":null}]}
102 |
103 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
104 |
105 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n"},"finish_reason":null,"logprobs":null}]}
106 |
107 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"So"},"finish_reason":null,"logprobs":null}]}
108 |
109 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"logprobs":null}]}
110 |
111 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"logprobs":null}]}
112 |
113 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" answer"},"finish_reason":null,"logprobs":null}]}
114 |
115 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"logprobs":null}]}
116 |
117 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" your"},"finish_reason":null,"logprobs":null}]}
118 |
119 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" question"},"finish_reason":null,"logprobs":null}]}
120 |
121 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"logprobs":null}]}
122 |
123 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"logprobs":null}]}
124 |
125 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"4"},"finish_reason":null,"logprobs":null}]}
126 |
127 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"logprobs":null}]}
128 |
129 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop","logprobs":null}]}
130 |
131 | : OPENROUTER PROCESSING
132 |
133 | data: {"id":"gen-1729004990-gTyfUdC2AMGEv0NpAg7u","provider":"Azure","model":"microsoft/phi-3.5-mini-128k-instruct","object":"chat.completion.chunk","created":1729004990,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":17,"completion_tokens":62,"total_tokens":79}}
134 |
135 | data: [DONE]
136 |
--------------------------------------------------------------------------------
/samples/output.txt:
--------------------------------------------------------------------------------
1 | data: {"candidates": [{"content": {"parts": [{"functionCall": {"name": "take_notes","args": {"note": "Capitalism and socialism are two of the most prevalent economic systems in the world. Capitalism is characterized by private ownership of the means of production, free markets, and the pursuit of profit. Socialism, on the other hand, emphasizes social ownership of the means of production, with the goal of achieving social equality and economic justice. The two systems have been the subject of much debate, with proponents of each arguing for its superiority. Capitalism is often praised for its efficiency and innovation, while socialism is lauded for its potential to reduce inequality and provide for the needs of the most vulnerable. However, both systems have their drawbacks. Capitalism can lead to economic instability and social inequality, while socialism can stifle innovation and reduce individual freedom. Ultimately, the best economic system for a given society depends on its specific circumstances and values."}}}],"role": "model"},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 50,"candidatesTokenCount": 174,"totalTokenCount": 224}}
2 |
3 |
--------------------------------------------------------------------------------
/test.js:
--------------------------------------------------------------------------------
1 | import { asyncLLM } from "./index.js";
2 |
3 | const PORT = 8080;
4 | const BASE_URL = `http://localhost:${PORT}`;
5 |
6 | function assertEquals(actual, expected, message) {
7 | if (JSON.stringify(actual) === JSON.stringify(expected)) return;
8 | throw new Error(
9 | message || `Expected:\n${JSON.stringify(expected, null, 2)}. Actual:\n${JSON.stringify(actual, null, 2)}`,
10 | );
11 | }
12 |
13 | Deno.serve({ port: PORT }, async (req) => {
14 | const url = new URL(req.url);
15 | const file = await Deno.readFile(`samples${url.pathname}`);
16 | return new Response(file, {
17 | headers: { "Content-Type": "text/event-stream" },
18 | });
19 | });
20 |
21 | /*
22 | curl -X POST https://llmfoundry.straive.com/openai/v1/chat/completions \
23 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
24 | -H "Content-Type: application/json" \
25 | -d '{"model": "gpt-4o-mini", "stream": true, "messages": [{"role": "user", "content": "Hello world"}]}'
26 | */
27 | Deno.test("asyncLLM - OpenAI basic", async () => {
28 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/openai.txt`));
29 |
30 | assertEquals(results.length, 10);
31 | assertEquals(results[0].content, "");
32 | assertEquals(results[1].content, "Hello");
33 | assertEquals(results[9].content, "Hello! How can I assist you today?");
34 | assertEquals(results.at(-1).tools, undefined);
35 | });
36 |
37 | /*
38 | curl -X POST https://llmfoundry.straive.com/openai/v1/chat/completions \
39 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
40 | -H "Content-Type: application/json" \
41 | -d '{
42 | "model": "gpt-4o-mini",
43 | "stream": true,
44 | "messages": [
45 | {"role": "system", "content": "Call get_delivery_date with the order ID."},
46 | {"role": "user", "content": "123456"}
47 | ],
48 | "tools": [
49 | {
50 | "type": "function",
51 | "function": {
52 | "name": "get_delivery_date",
53 | "description": "Get the delivery date for a customer order.",
54 | "parameters": {
55 | "type": "object",
56 | "properties": { "order_id": { "type": "string", "description": "The customer order ID." } },
57 | "required": ["order_id"],
58 | "additionalProperties": false
59 | }
60 | }
61 | }
62 | ]
63 | }'
64 | */
65 | Deno.test("asyncLLM - OpenAI with tool calls", async () => {
66 | let index = 0;
67 | let data = {};
68 | for await (data of asyncLLM(`${BASE_URL}/openai-tools.txt`)) {
69 | if (index == 0) {
70 | assertEquals(data.tools[0].name, "get_delivery_date");
71 | assertEquals(data.tools[0].id, "call_F8YHCjnzrrTjfE4YSSpVW2Bc");
72 | assertEquals(data.tools[0].args, "");
73 | }
74 | if (index == 1) assertEquals(data.tools[0].args, '{"');
75 | if (index == 7) assertEquals(data.tools[0].args, '{"order_id":"123456"}');
76 | if (index == 7) assertEquals(data.content, undefined);
77 | index++;
78 | }
79 | assertEquals(JSON.parse(data.tools[0].args), { order_id: "123456" });
80 | assertEquals(index, 8);
81 | });
82 |
83 | /*
84 | curl -X POST https://llmfoundry.straive.com/openai/v1/chat/completions \
85 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
86 | -H "Content-Type: application/json" \
87 | -d '{
88 | "model": "gpt-4o-mini",
89 | "stream": true,
90 | "messages": [
91 | { "role": "system", "content": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel" },
92 | { "role": "user", "content": "Order ID: 123456, Customer ID: 7890" }
93 | ],
94 | "tool_choice": "required",
95 | "tools": [
96 | {
97 | "type": "function",
98 | "function": { "name": "get_order", "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } }
99 | },
100 | {
101 | "type": "function",
102 | "function": { "name": "get_customer", "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } }
103 | }
104 | ]
105 | }
106 | }
107 | */
108 | Deno.test("asyncLLM - OpenAI with multiple tool calls", async () => {
109 | let index = 0;
110 | let data = {};
111 | for await (data of asyncLLM(`${BASE_URL}/openai-tools2.txt`)) {
112 | if (index === 0) {
113 | assertEquals(data.tools[0], {
114 | name: "get_order",
115 | id: "call_wnH2cswb4JAnm69pUAP4MNEN",
116 | args: "",
117 | });
118 | }
119 | if (index === 5) assertEquals(data.tools[0].args, '{"id": "123456"}');
120 | if (index === 6) {
121 | assertEquals(data.tools[1], {
122 | name: "get_customer",
123 | id: "call_f4GVABhbwSOLoaisOBOajnsm",
124 | args: '{"id',
125 | });
126 | }
127 | if (index === 9) assertEquals(data.tools[1].args, '{"id": "7890"}');
128 | index++;
129 | }
130 | assertEquals(index, 9);
131 | assertEquals(data.tools[0], { name: "get_order", id: "call_wnH2cswb4JAnm69pUAP4MNEN", args: '{"id": "123456"}' });
132 | assertEquals(data.tools[1], { name: "get_customer", id: "call_f4GVABhbwSOLoaisOBOajnsm", args: '{"id": "7890"}' });
133 | });
134 |
135 | /*
136 | curl https://api.openai.com/v1/responses \
137 | -H "Authorization: Bearer $OPENAI_API_KEY" \
138 | -H "Content-Type: application/json" \
139 | -d '{"model": "gpt-4.1-nano", "stream": true, "input": "Hello world"}'
140 | */
141 | Deno.test("asyncLLM - OpenAI Responses basic", async () => {
142 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/openai-responses.txt`));
143 |
144 | assertEquals(results.length, 9);
145 | assertEquals(results[0].content, "Hello");
146 | assertEquals(results[1].content, "Hello!");
147 | assertEquals(results[2].content, "Hello! How");
148 | assertEquals(results[8].content, "Hello! How can I assist you today?");
149 | assertEquals(results.at(-1).tools, undefined);
150 | });
151 |
152 | /*
153 | curl https://api.openai.com/v1/responses \
154 | -H "Authorization: Bearer $OPENAI_API_KEY" \
155 | -H "Content-Type: application/json" \
156 | -d '{
157 | "model": "gpt-4.1-nano",
158 | "stream": true,
159 | "input": [
160 | { "role": "system", "content": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel" },
161 | { "role": "user", "content": "Order ID: 123456, Customer ID: 7890" }
162 | ],
163 | "tool_choice": "required",
164 | "tools": [
165 | {
166 | "type": "function",
167 | "name": "get_delivery_date",
168 | "description": "Get the delivery date for a customer order.",
169 | "parameters": {
170 | "type": "object",
171 | "properties": { "order_id": { "type": "string", "description": "The customer order ID." } },
172 | "required": ["order_id"],
173 | "additionalProperties": false
174 | }
175 | }
176 | ]
177 | }'
178 | */
179 | Deno.test("asyncLLM - OpenAI Responses with tool calls", async () => {
180 | let index = 0;
181 | let data = {};
182 | for await (data of asyncLLM(`${BASE_URL}/openai-responses-tools.txt`)) {
183 | if (index == 0) {
184 | assertEquals(data.tools[0].name, "get_delivery_date");
185 | assertEquals(data.tools[0].id, "fc_6808d3a08e708192a65b2c19dbc8b9140a601c2646a05cfd");
186 | assertEquals(data.tools[0].args, "");
187 | }
188 | if (index == 1) assertEquals(data.tools[0].args, '{"');
189 | if (index == 7) assertEquals(data.tools[0].args, '{"order_id":"123456"}');
190 | if (index == 7) assertEquals(data.content, undefined);
191 | index++;
192 | }
193 | assertEquals(JSON.parse(data.tools[0].args), { order_id: "123456" });
194 | assertEquals(index, 8);
195 | });
196 |
197 | /*
198 | curl https://api.openai.com/v1/responses \
199 | -H "Authorization: Bearer $OPENAI_API_KEY" \
200 | -H "Content-Type: application/json" \
201 | -d '{
202 | "model": "gpt-4.1-nano",
203 | "stream": true,
204 | "input": [
205 | { "role": "system", "content": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel" },
206 | { "role": "user", "content": "Order ID: 123456, Customer ID: 7890" }
207 | ],
208 | "tool_choice": "required",
209 | "tools": [
210 | {
211 | "type": "function",
212 | "name": "get_order",
213 | "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] }
214 | },
215 | {
216 | "type": "function",
217 | "name": "get_customer",
218 | "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] }
219 | }
220 | ]
221 | }'
222 | */
223 | Deno.test("asyncLLM - OpenAI Responses with multiple tool calls", async () => {
224 | let index = 0;
225 | let data = {};
226 | for await (data of asyncLLM(`${BASE_URL}/openai-responses-tools2.txt`)) {
227 | if (index === 0) {
228 | assertEquals(data.tools[0], {
229 | name: "get_order",
230 | id: "fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc",
231 | args: "",
232 | });
233 | }
234 | if (index === 7) assertEquals(data.tools[0].args, '{"id":"123456"}');
235 | if (index === 10) {
236 | assertEquals(data.tools[1], {
237 | name: "get_customer",
238 | id: "fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc",
239 | args: '{"id":',
240 | });
241 | }
242 | if (index === 13) assertEquals(data.tools[1].args, '{"id":"7890"}');
243 | index++;
244 | }
245 | assertEquals(index, 14);
246 | assertEquals(data.tools[0], {
247 | name: "get_order",
248 | id: "fc_6808d34ab2748192957f518947f0e14d01c57d45ab76fecc",
249 | args: '{"id":"123456"}',
250 | });
251 | assertEquals(data.tools[1], {
252 | name: "get_customer",
253 | id: "fc_6808d34ac3548192916cd16fdad20dc101c57d45ab76fecc",
254 | args: '{"id":"7890"}',
255 | });
256 | });
257 |
258 | /*
259 | curl -X POST https://llmfoundry.straive.com/anthropic/v1/messages \
260 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
261 | -H "Content-Type: application/json" \
262 | -d '{"model": "claude-3-haiku-20240307", "stream": true, "max_tokens": 10, "messages": [{"role": "user", "content": "What is 2 + 2"}]}'
263 | */
264 | Deno.test("asyncLLM - Anthropic", async () => {
265 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/anthropic.txt`));
266 |
267 | assertEquals(results.length, 3);
268 | assertEquals(results[0].content, "2 ");
269 | assertEquals(results[1].content, "2 + 2 ");
270 | assertEquals(results[2].content, "2 + 2 = 4.");
271 | assertEquals(results.at(-1).tools, undefined);
272 | });
273 |
274 | Deno.test("asyncLLM - Anthropic with tool calls", async () => {
275 | let index = 0;
276 | let data = {};
277 | for await (data of asyncLLM(`${BASE_URL}/anthropic-tools.txt`)) {
278 | if (index === 0) assertEquals(data.content, "Okay");
279 | if (index === 12) assertEquals(data.content, "Okay, let's check the weather for San Francisco, CA:");
280 | if (index === 13) assertEquals(data.tools[0], { name: "get_weather", id: "toolu_01T1x1fJ34qAmk2tNTrN7Up6" });
281 | if (index === 14) assertEquals(data.tools[0].args, "");
282 | index++;
283 | }
284 | assertEquals(data.tools[0].name, "get_weather");
285 | assertEquals(data.tools[0].id, "toolu_01T1x1fJ34qAmk2tNTrN7Up6");
286 | assertEquals(JSON.parse(data.tools[0].args), {
287 | location: "San Francisco, CA",
288 | unit: "fahrenheit",
289 | });
290 | assertEquals(index, 23);
291 | });
292 |
293 | /*
294 | curl -X POST https://llmfoundry.straive.com/anthropic/v1/messages \
295 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
296 | -H "Content-Type: application/json" \
297 | -d '{
298 | "system": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel",
299 | "messages": [{ "role": "user", "content": "Order ID: 123456, Customer ID: 7890" }],
300 | "model": "claude-3-haiku-20240307",
301 | "max_tokens": 4096,
302 | "stream": true,
303 | "tool_choice": { "type": "any", "disable_parallel_tool_use": false },
304 | "tools": [
305 | { "name": "get_order", "input_schema": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } },
306 | { "name": "get_customer", "input_schema": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] } }
307 | ]
308 | }
309 | }`
310 | */
311 | Deno.test("asyncLLM - Anthropic with multiple tool calls", async () => {
312 | let index = 0;
313 | let data = {};
314 | for await (data of asyncLLM(`${BASE_URL}/anthropic-tools2.txt`)) {
315 | if (index === 0) assertEquals(data.tools[0], { name: "get_order", id: "toolu_015yB3TjTS1RBaM7VScM2MQY" });
316 | if (index === 2) assertEquals(data.tools[0].args, '{"id": "1');
317 | if (index === 7)
318 | assertEquals(data.tools[1], { name: "get_customer", id: "toolu_013VAZTYqMJm2JuRCqEA4kam", args: '{"id": "789' });
319 | index++;
320 | }
321 | assertEquals(index, 9);
322 | assertEquals(data.tools[0], { name: "get_order", id: "toolu_015yB3TjTS1RBaM7VScM2MQY", args: '{"id": "123456"}' });
323 | assertEquals(data.tools[1], { name: "get_customer", id: "toolu_013VAZTYqMJm2JuRCqEA4kam", args: '{"id": "7890"}' });
324 | });
325 |
326 | /*
327 | curl -X POST https://llmfoundry.straive.com/gemini/v1beta/models/gemini-1.5-flash-8b:streamGenerateContent?alt=sse \
328 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
329 | -H "Content-Type: application/json" \
330 | -d '{
331 | "system_instruction": { "parts": [{ "text": "You are a helpful assistant" }] },
332 | "contents": [{ "role": "user", "parts": [{ "text": "What is 2+2?" }] }]
333 | }'
334 | */
335 | Deno.test("asyncLLM - Gemini", async () => {
336 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/gemini.txt`));
337 |
338 | assertEquals(results.length, 3);
339 | assertEquals(results[0].content, "2");
340 | assertEquals(results[1].content, "2 + 2 = 4\n");
341 | assertEquals(results[2].content, "2 + 2 = 4\n");
342 | assertEquals(results.at(-1).tools, undefined);
343 | });
344 |
345 | /*
346 | curl -X POST https://llmfoundry.straive.com/gemini/v1beta/models/gemini-1.5-flash-latest:streamGenerateContent?alt=sse \
347 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
348 | -H "Content-Type: application/json" \
349 | -d '{
350 | "contents": { "role": "user", "parts": { "text": "Call take_notes passing it an essay about capitalism vs socialism" } },
351 | "tools": [
352 | {
353 | "function_declarations": [
354 | {
355 | "name": "take_notes",
356 | "description": "Take notes about a topic",
357 | "parameters": {
358 | "type": "object",
359 | "properties": { "note": { "type": "string" } },
360 | "required": ["note"]
361 | }
362 | }
363 | ]
364 | }
365 | ]
366 | }'
367 | */
368 | Deno.test("asyncLLM - Gemini with tool calls", async () => {
369 | let index = 0;
370 | let data = {};
371 | for await (data of asyncLLM(`${BASE_URL}/gemini-tools.txt`)) {
372 | if (index === 0) assertEquals(data.tools[0].name, "take_notes");
373 | if (index === 0) assertEquals(data.tools[0].args.startsWith('{"note":"Capitalism'), true);
374 | index++;
375 | }
376 | assertEquals(data.content, undefined);
377 | assertEquals(JSON.parse(data.tools[0].args).note.startsWith("Capitalism and socialism"), true);
378 | assertEquals(JSON.parse(data.tools[0].args).note.endsWith("specific circumstances and values."), true);
379 | assertEquals(index, 1);
380 | });
381 |
382 | /*
383 | curl -X POST https://llmfoundry.straive.com/gemini/v1beta/models/gemini-1.5-flash-latest:streamGenerateContent?alt=sse \
384 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
385 | -H "Content-Type: application/json" \
386 | -d '{
387 | "systemInstruction": {"parts": [{"text": "Call get_order({order_id}) AND get_customer({customer_id}) in parallel"}]},
388 | "contents": [{"role": "user", "parts": [{ "text": "Order ID: 123456, Customer ID: 7890" }] }],
389 | "toolConfig": { "function_calling_config": { "mode": "ANY" } },
390 | "tools": {
391 | "functionDeclarations": [
392 | {
393 | "name": "get_order",
394 | "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] }
395 | },
396 | {
397 | "name": "get_customer",
398 | "parameters": { "type": "object", "properties": { "id": { "type": "string" } }, "required": ["id"] }
399 | }
400 | ]
401 | }
402 | }
403 | }`
404 | */
405 | Deno.test("asyncLLM - Gemini with multiple tool calls", async () => {
406 | let index = 0;
407 | let data = {};
408 | for await (data of asyncLLM(`${BASE_URL}/gemini-tools2.txt`)) {
409 | if (index === 0) {
410 | assertEquals(data.tools[0], { name: "get_order", args: '{"id":"123456"}' });
411 | assertEquals(data.tools[1], { name: "get_customer", args: '{"id":"7890"}' });
412 | }
413 | index++;
414 | }
415 | assertEquals(index, 3);
416 | assertEquals(data.tools[0].name, "get_order");
417 | assertEquals(JSON.parse(data.tools[0].args), { id: "123456" });
418 | assertEquals(data.tools[1].name, "get_customer");
419 | assertEquals(JSON.parse(data.tools[1].args), { id: "7890" });
420 | });
421 |
422 | /*
423 | curl -X POST https://llmfoundry.straive.com/openrouter/v1/chat/completions \
424 | -H "Authorization: Bearer $LLMFOUNDRY_TOKEN:asyncllm" \
425 | -H "Content-Type: application/json" \
426 | -d '{"model": "meta-llama/llama-3.2-11b-vision-instruct", "stream": true, "messages": [{"role": "user", "content": "What is 2 + 2"}]}'
427 | */
428 | Deno.test("asyncLLM - OpenRouter", async () => {
429 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/openrouter.txt`));
430 |
431 | assertEquals(results.length, 64);
432 | assertEquals(results[0].content, "");
433 | assertEquals(results[1].content, " The");
434 | assertEquals(results[2].content, " The sum");
435 | assertEquals(
436 | results.at(-1).content,
437 | " The sum of 2 and 2 is 4. This is a basic arithmetic operation where you add the two numbers together to get the total. \n\nHere's the calculation:\n\n2 + 2 = 4\n\nSo, the answer to your question is 4.",
438 | );
439 | assertEquals(results.at(-1).tools, undefined);
440 | });
441 |
442 | Deno.test("asyncLLM - Error handling", async () => {
443 | const results = await Array.fromAsync(asyncLLM(`${BASE_URL}/errors.txt`));
444 |
445 | assertEquals(results.length, 6);
446 |
447 | // Malformed JSON
448 | assertEquals(results[0].error, "Unexpected token 'i', \"invalid json\" is not valid JSON");
449 |
450 | // OpenAI-style error
451 | assertEquals(results[1].error, "OpenAI API error");
452 |
453 | // Anthropic-style error
454 | assertEquals(results[2].error, "Anthropic API error");
455 |
456 | // Gemini-style error
457 | assertEquals(results[3].error, "Gemini API error");
458 |
459 | // OpenRouter-style error
460 | assertEquals(results[4].error, "OpenRouter API error");
461 |
462 | // No data
463 | assertEquals(results[5].error, "Unexpected end of JSON input");
464 | });
465 |
466 | Deno.test("asyncLLM - Config callback", async () => {
467 | let responseStatus = 0;
468 | let contentType = "";
469 |
470 | const results = await Array.fromAsync(
471 | asyncLLM(
472 | `${BASE_URL}/openai.txt`,
473 | {},
474 | {
475 | onResponse: async (response) => {
476 | responseStatus = response.status;
477 | contentType = response.headers.get("Content-Type");
478 | },
479 | },
480 | ),
481 | );
482 |
483 | assertEquals(responseStatus, 200);
484 | assertEquals(contentType, "text/event-stream");
485 | assertEquals(results.length, 10); // Verify normal operation still works
486 | });
487 |
488 | Deno.test("asyncLLM - Config callback error handling", async () => {
489 | let responseStatus = 0;
490 |
491 | const results = await Array.fromAsync(
492 | asyncLLM(
493 | `${BASE_URL}/errors.txt`,
494 | {},
495 | {
496 | onResponse: async (response) => {
497 | responseStatus = response.status;
498 | },
499 | },
500 | ),
501 | );
502 |
503 | assertEquals(responseStatus, 200);
504 | assertEquals(results[0].error, "Unexpected token 'i', \"invalid json\" is not valid JSON");
505 | });
506 |
507 | Deno.test("asyncLLM - Request object input", async () => {
508 | const request = new Request(`${BASE_URL}/openai.txt`);
509 | const results = await Array.fromAsync(asyncLLM(request));
510 |
511 | assertEquals(results.length, 10);
512 | });
513 |
--------------------------------------------------------------------------------