├── .npmrc ├── examples ├── chat-react │ ├── public │ │ ├── favicon.ico │ │ └── index.html │ ├── src │ │ ├── index.js │ │ └── App.js │ ├── .gitignore │ ├── package.json │ └── README.md ├── package.json ├── typescript │ ├── package.json │ ├── tsconfig.json │ ├── chat_with_streaming.ts │ └── package-lock.json ├── list_models.js ├── chat_no_streaming.js ├── json_format.js ├── embeddings.js ├── chat_with_streaming.js ├── file.jsonl ├── files.js ├── package-lock.json ├── jobs.js └── function_calling.js ├── .eslintrc.yml ├── src ├── files.d.ts ├── files.js ├── jobs.d.ts ├── jobs.js ├── client.d.ts └── client.js ├── package.json ├── .github └── workflows │ └── build_publish.yaml ├── tests ├── files.test.js ├── jobs.test.js ├── client.test.js └── utils.js ├── .gitignore ├── README.md └── LICENSE /.npmrc: -------------------------------------------------------------------------------- 1 | # when we run npm version, we don't want to create a git tag 2 | git-tag-version=false 3 | -------------------------------------------------------------------------------- /examples/chat-react/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mistralai/client-js/HEAD/examples/chat-react/public/favicon.ico -------------------------------------------------------------------------------- /examples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mistralai client examples", 3 | "version": "1.0.0", 4 | "description": "", 5 | "type": "module", 6 | "dependencies": { 7 | "@mistralai/mistralai": "file:../" 8 | }, 9 | "keywords": [] 10 | } -------------------------------------------------------------------------------- /examples/typescript/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mistralai/client-examples-ts", 3 | "type": "module", 4 | "dependencies": { 5 | "@mistralai/mistralai": "file:../..", 6 | "tsx": "^4.9.3" 7 | }, 8 | "devDependencies": { 9 | "typescript": "^5.4.5" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/typescript/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ESNext", 4 | "module": "NodeNext", 5 | "moduleResolution": "NodeNext", 6 | "esModuleInterop": true, 7 | "forceConsistentCasingInFileNames": true, 8 | "strict": true 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /examples/chat-react/src/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom/client'; 3 | import App from './App'; 4 | 5 | const root = ReactDOM.createRoot(document.getElementById('root')); 6 | root.render( 7 | 8 | 9 | 10 | ); 11 | -------------------------------------------------------------------------------- /examples/list_models.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | const client = new MistralClient(apiKey); 6 | 7 | const listModelsResponse = await client.listModels(); 8 | 9 | listModelsResponse.data.forEach((model) => { 10 | console.log('Model:', model); 11 | }); 12 | -------------------------------------------------------------------------------- /.eslintrc.yml: -------------------------------------------------------------------------------- 1 | env: 2 | browser: true 3 | es2021: true 4 | extends: google 5 | ignorePatterns: 6 | - examples/chat-react/ 7 | - src/client.d.ts 8 | parserOptions: 9 | ecmaVersion: latest 10 | sourceType: module 11 | rules: 12 | indent: ["error", 2] 13 | space-before-function-paren: ["error", "never"] 14 | quotes: ["error", "single"] 15 | -------------------------------------------------------------------------------- /examples/chat_no_streaming.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | const client = new MistralClient(apiKey); 6 | 7 | const chatResponse = await client.chat({ 8 | model: 'mistral-tiny', 9 | messages: [{role: 'user', content: 'What is the best French cheese?'}], 10 | }); 11 | 12 | console.log('Chat:', chatResponse.choices[0].message.content); 13 | -------------------------------------------------------------------------------- /examples/chat-react/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /examples/json_format.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | const client = new MistralClient(apiKey); 6 | 7 | const chatResponse = await client.chat({ 8 | model: 'mistral-large-latest', 9 | messages: [{role: 'user', content: 'What is the best French cheese?'}], 10 | responseFormat: {type: 'json_object'}, 11 | }); 12 | 13 | console.log('Chat:', chatResponse.choices[0].message.content); 14 | -------------------------------------------------------------------------------- /examples/embeddings.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | const client = new MistralClient(apiKey); 6 | 7 | const input = []; 8 | for (let i = 0; i < 1; i++) { 9 | input.push('What is the best French cheese?'); 10 | } 11 | 12 | const embeddingsBatchResponse = await client.embeddings({ 13 | model: 'mistral-embed', 14 | input: input, 15 | }); 16 | 17 | console.log('Embeddings Batch:', embeddingsBatchResponse.data); 18 | -------------------------------------------------------------------------------- /examples/chat_with_streaming.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | const client = new MistralClient(apiKey); 6 | 7 | const chatStreamResponse = client.chatStream({ 8 | model: 'mistral-tiny', 9 | messages: [{role: 'user', content: 'What is the best French cheese?'}], 10 | }); 11 | 12 | console.log('Chat Stream:'); 13 | for await (const chunk of chatStreamResponse) { 14 | if (chunk.choices[0].delta.content !== undefined) { 15 | const streamText = chunk.choices[0].delta.content; 16 | process.stdout.write(streamText); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/files.d.ts: -------------------------------------------------------------------------------- 1 | export enum Purpose { 2 | finetune = 'fine-tune', 3 | } 4 | 5 | export interface FileObject { 6 | id: string; 7 | object: string; 8 | bytes: number; 9 | created_at: number; 10 | filename: string; 11 | purpose?: Purpose; 12 | } 13 | 14 | export interface FileDeleted { 15 | id: string; 16 | object: string; 17 | deleted: boolean; 18 | } 19 | 20 | export class FilesClient { 21 | constructor(client: MistralClient); 22 | 23 | create(options: { file: File; purpose?: string }): Promise; 24 | 25 | retrieve(options: { fileId: string }): Promise; 26 | 27 | list(): Promise; 28 | 29 | delete(options: { fileId: string }): Promise; 30 | } 31 | -------------------------------------------------------------------------------- /examples/chat-react/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "example", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@mistralai/mistralai": "file:../../", 7 | "bulma": "^0.9.4", 8 | "react": "^18.2.0", 9 | "react-dom": "^18.2.0", 10 | "react-scripts": "5.0.1", 11 | "@babel/plugin-proposal-private-property-in-object": "^7.16.7" 12 | }, 13 | "scripts": { 14 | "start": "react-scripts start" 15 | }, 16 | "browserslist": { 17 | "production": [ 18 | ">0.2%", 19 | "not dead", 20 | "not op_mini all" 21 | ], 22 | "development": [ 23 | "last 1 chrome version", 24 | "last 1 firefox version", 25 | "last 1 safari version" 26 | ] 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /examples/file.jsonl: -------------------------------------------------------------------------------- 1 | {"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the capital of France?"}, {"role": "assistant", "content": "Paris, as if everyone doesn't know that already."}]} 2 | {"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who wrote 'Romeo and Juliet'?"}, {"role": "assistant", "content": "Oh, just some guy named William Shakespeare. Ever heard of him?"}]} 3 | {"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "How far is the Moon from Earth?"}, {"role": "assistant", "content": "Around 384,400 kilometers. Give or take a few, like that really matters.", "weight": 0}]} -------------------------------------------------------------------------------- /examples/chat-react/README.md: -------------------------------------------------------------------------------- 1 | # Mistral AI JS Client Web Demo 2 | 3 | A demo of the Mistral AI client being used in a React App. 4 | 5 | ## Installation 6 | 7 | ```bash 8 | npm install 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```bash 14 | npm start 15 | ``` 16 | 17 | This will start a web-browser instance. 18 | 19 | ### API Key Setup 20 | 21 | Running the examples requires a Mistral AI API key. 22 | 23 | 1. Get your own Mistral API Key: 24 | 2. Set your Mistral API Key as an environment variable. You only need to do this once. 25 | 26 | ```bash 27 | # set Mistral API Key (using zsh for example) 28 | $ echo 'export MISTRAL_API_KEY=[your_key_here]' >> ~/.zshenv 29 | 30 | # reload the environment (or just quit and open a new terminal) 31 | $ source ~/.zshenv 32 | ``` 33 | -------------------------------------------------------------------------------- /examples/files.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | import * as fs from 'fs'; 3 | 4 | 5 | const apiKey = process.env.MISTRAL_API_KEY; 6 | 7 | const client = new MistralClient(apiKey); 8 | 9 | // Create a new file 10 | const blob = new Blob( 11 | [fs.readFileSync('file.jsonl')], 12 | {type: 'application/json'}, 13 | ); 14 | const createdFile = await client.files.create({file: blob}); 15 | console.log(createdFile); 16 | 17 | // List files 18 | const files = await client.files.list(); 19 | console.log(files); 20 | 21 | // Retrieve a file 22 | const retrievedFile = await client.files.retrieve({fileId: createdFile.id}); 23 | console.log(retrievedFile); 24 | 25 | // Delete a file 26 | const deletedFile = await client.files.delete({fileId: createdFile.id}); 27 | console.log(deletedFile); 28 | -------------------------------------------------------------------------------- /examples/typescript/chat_with_streaming.ts: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | const client = new MistralClient(apiKey); 6 | 7 | const responseInterface = '{"best": string, "reasoning": string}'; 8 | const chatStreamResponse = client.chatStream({ 9 | model: 'open-mistral-7b', 10 | responseFormat: {type: 'json_object'}, 11 | messages: [{ 12 | role: 'user', content: ` 13 | What is the best French cheese? 14 | Answer in ${responseInterface} format`, 15 | }], 16 | }); 17 | 18 | console.log('Chat Stream:'); 19 | for await (const chunk of chatStreamResponse) { 20 | if (chunk.choices[0].delta.content !== undefined) { 21 | const streamText = chunk.choices[0].delta.content; 22 | process.stdout.write(streamText); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mistralai/mistralai", 3 | "version": "0.5.0", 4 | "description": "", 5 | "author": "bam4d@mistral.ai", 6 | "license": "ISC", 7 | "type": "module", 8 | "main": "src/client.js", 9 | "types": "src/client.d.ts", 10 | "scripts": { 11 | "lint": "./node_modules/.bin/eslint .", 12 | "test": "node --experimental-vm-modules node_modules/.bin/jest" 13 | }, 14 | "jest": { 15 | "testPathIgnorePatterns": [ 16 | "examples" 17 | ] 18 | }, 19 | "repository": { 20 | "type": "git", 21 | "url": "https://github.com/mistralai/client-js" 22 | }, 23 | "dependencies": { 24 | "node-fetch": "^2.6.7" 25 | }, 26 | "devDependencies": { 27 | "eslint": "^8.55.0", 28 | "eslint-config-google": "^0.14.0", 29 | "prettier": "2.8.8", 30 | "jest": "^29.7.0" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /examples/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mistralai client examples", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "mistralai client examples", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "@mistralai/mistralai": "file:../" 12 | } 13 | }, 14 | "..": { 15 | "name": "@mistralai/mistralai", 16 | "version": "0.4.0", 17 | "license": "ISC", 18 | "dependencies": { 19 | "node-fetch": "^2.6.7" 20 | }, 21 | "devDependencies": { 22 | "eslint": "^8.55.0", 23 | "eslint-config-google": "^0.14.0", 24 | "jest": "^29.7.0", 25 | "prettier": "2.8.8" 26 | } 27 | }, 28 | "node_modules/@mistralai/mistralai": { 29 | "resolved": "..", 30 | "link": true 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /examples/chat-react/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Mistral AI React Example 10 | 11 | 12 | 13 |
14 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /examples/jobs.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | import * as fs from 'fs'; 3 | 4 | 5 | const apiKey = process.env.MISTRAL_API_KEY; 6 | 7 | const client = new MistralClient(apiKey); 8 | 9 | // Create a new file 10 | const blob = new Blob( 11 | [fs.readFileSync('file.jsonl')], 12 | {type: 'application/json'}, 13 | ); 14 | const createdFile = await client.files.create({file: blob}); 15 | 16 | // Create a new job 17 | const hyperparameters = { 18 | training_steps: 10, 19 | learning_rate: 0.0001, 20 | }; 21 | const createdJob = await client.jobs.create({ 22 | model: 'open-mistral-7b', 23 | trainingFiles: [createdFile.id], 24 | validationFiles: [createdFile.id], 25 | hyperparameters, 26 | }); 27 | console.log(createdJob); 28 | 29 | // List jobs 30 | const jobs = await client.jobs.list(); 31 | console.log(jobs); 32 | 33 | // Retrieve a job 34 | const retrievedJob = await client.jobs.retrieve({jobId: createdJob.id}); 35 | console.log(retrievedJob); 36 | 37 | // Cancel a job 38 | const canceledJob = await client.jobs.cancel({jobId: createdJob.id}); 39 | console.log(canceledJob); 40 | -------------------------------------------------------------------------------- /.github/workflows/build_publish.yaml: -------------------------------------------------------------------------------- 1 | name: Build and Publish 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | 7 | # We only deploy on tags and main branch 8 | tags: 9 | # Only run on tags that match the following regex 10 | # This will match tags like 1.0.0, 1.0.1, etc. 11 | - "[0-9]+.[0-9]+.[0-9]+" 12 | 13 | # Build on pull requests 14 | pull_request: 15 | 16 | jobs: 17 | lint_and_test: 18 | runs-on: ubuntu-latest 19 | 20 | strategy: 21 | matrix: 22 | node-version: [18, 20, 22] 23 | 24 | steps: 25 | # Checkout the repository 26 | - name: Checkout 27 | uses: actions/checkout@v4 28 | 29 | # Set node version 30 | - name: set node version 31 | uses: actions/setup-node@v4 32 | with: 33 | node-version: ${{ matrix.node-version }} 34 | 35 | # Install Build stuff 36 | - name: Install Dependencies 37 | run: | 38 | npm install 39 | 40 | # Eslint 41 | - name: ESlint check 42 | run: | 43 | npm run lint 44 | 45 | # Run tests 46 | - name: Run tests 47 | run: | 48 | npm run test 49 | 50 | # Build TypeScript Examples 51 | - name: Build typescript examples 52 | run: | 53 | cd examples/typescript 54 | npm install 55 | npx tsc --build --verbose tsconfig.json 56 | 57 | publish: 58 | needs: lint_and_test 59 | runs-on: ubuntu-latest 60 | if: startsWith(github.ref, 'refs/tags') 61 | 62 | steps: 63 | # Checkout the repository 64 | - name: Checkout 65 | uses: actions/checkout@v4 66 | 67 | # Set node version 68 | - name: set node version 69 | uses: actions/setup-node@v4 70 | with: 71 | node-version: 18 72 | 73 | # Publish module 74 | - name: Publish 75 | run: | 76 | echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" >> .npmrc 77 | npm publish 78 | -------------------------------------------------------------------------------- /src/files.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Class representing a client for file operations. 3 | */ 4 | class FilesClient { 5 | /** 6 | * Create a FilesClient object. 7 | * @param {MistralClient} client - The client object used for making requests. 8 | */ 9 | constructor(client) { 10 | this.client = client; 11 | } 12 | 13 | /** 14 | * Create a new file. 15 | * @param {File} file - The file to be created. 16 | * @param {string} purpose - The purpose of the file. Default is 'fine-tune'. 17 | * @return {Promise<*>} A promise that resolves to a FileObject. 18 | * @throws {MistralAPIError} If no response is received from the server. 19 | */ 20 | async create({file, purpose = 'fine-tune'}) { 21 | const formData = new FormData(); 22 | formData.append('file', file); 23 | formData.append('purpose', purpose); 24 | const response = await this.client._request( 25 | 'post', 26 | 'v1/files', 27 | null, 28 | undefined, 29 | formData, 30 | ); 31 | return response; 32 | } 33 | 34 | /** 35 | * Retrieve a file. 36 | * @param {string} fileId - The ID of the file to retrieve. 37 | * @return {Promise<*>} A promise that resolves to the file data. 38 | */ 39 | async retrieve({fileId}) { 40 | const response = await this.client._request('get', `v1/files/${fileId}`); 41 | return response; 42 | } 43 | 44 | /** 45 | * List all files. 46 | * @return {Promise>} A promise that resolves to 47 | * an array of FileObject. 48 | */ 49 | async list() { 50 | const response = await this.client._request('get', 'v1/files'); 51 | return response; 52 | } 53 | 54 | /** 55 | * Delete a file. 56 | * @param {string} fileId - The ID of the file to delete. 57 | * @return {Promise<*>} A promise that resolves to the response. 58 | */ 59 | async delete({fileId}) { 60 | const response = await this.client._request('delete', `v1/files/${fileId}`); 61 | return response; 62 | } 63 | } 64 | 65 | export default FilesClient; 66 | -------------------------------------------------------------------------------- /tests/files.test.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '../src/client'; 2 | import { 3 | mockFetch, 4 | mockFileResponsePayload, 5 | mockFilesResponsePayload, 6 | mockDeletedFileResponsePayload, 7 | } from './utils'; 8 | 9 | // Test the list models endpoint 10 | describe('Mistral Client', () => { 11 | let client; 12 | beforeEach(() => { 13 | client = new MistralClient(); 14 | }); 15 | 16 | describe('create()', () => { 17 | it('should return a file response object', async() => { 18 | // Mock the fetch function 19 | const mockResponse = mockFileResponsePayload(); 20 | client._fetch = mockFetch(200, mockResponse); 21 | 22 | const response = await client.files.create({ 23 | file: null, 24 | }); 25 | expect(response).toEqual(mockResponse); 26 | }); 27 | }); 28 | 29 | describe('retrieve()', () => { 30 | it('should return a file response object', async() => { 31 | // Mock the fetch function 32 | const mockResponse = mockFileResponsePayload(); 33 | client._fetch = mockFetch(200, mockResponse); 34 | 35 | const response = await client.files.retrieve({ 36 | fileId: 'fileId', 37 | }); 38 | expect(response).toEqual(mockResponse); 39 | }); 40 | }); 41 | 42 | describe('retrieve()', () => { 43 | it('should return a list of files response object', async() => { 44 | // Mock the fetch function 45 | const mockResponse = mockFilesResponsePayload(); 46 | client._fetch = mockFetch(200, mockResponse); 47 | 48 | const response = await client.files.list(); 49 | expect(response).toEqual(mockResponse); 50 | }); 51 | }); 52 | 53 | describe('delete()', () => { 54 | it('should return a deleted file response object', async() => { 55 | // Mock the fetch function 56 | const mockResponse = mockDeletedFileResponsePayload(); 57 | client._fetch = mockFetch(200, mockResponse); 58 | 59 | const response = await client.files.delete({ 60 | fileId: 'fileId', 61 | }); 62 | expect(response).toEqual(mockResponse); 63 | }); 64 | }); 65 | }); 66 | -------------------------------------------------------------------------------- /src/jobs.d.ts: -------------------------------------------------------------------------------- 1 | export enum JobStatus { 2 | QUEUED = 'QUEUED', 3 | STARTED = 'STARTED', 4 | RUNNING = 'RUNNING', 5 | FAILED = 'FAILED', 6 | SUCCESS = 'SUCCESS', 7 | CANCELLED = 'CANCELLED', 8 | CANCELLATION_REQUESTED = 'CANCELLATION_REQUESTED', 9 | } 10 | 11 | export interface TrainingParameters { 12 | training_steps: number; 13 | learning_rate: number; 14 | } 15 | 16 | export interface WandbIntegration { 17 | type: Literal<'wandb'>; 18 | project: string; 19 | name: string | null; 20 | api_key: string | null; 21 | run_name: string | null; 22 | } 23 | 24 | export type Integration = WandbIntegration; 25 | 26 | export interface Job { 27 | id: string; 28 | hyperparameters: TrainingParameters; 29 | fine_tuned_model: string; 30 | model: string; 31 | status: JobStatus; 32 | jobType: string; 33 | created_at: number; 34 | modified_at: number; 35 | training_files: string[]; 36 | validation_files?: string[]; 37 | object: 'job'; 38 | integrations: Integration[]; 39 | } 40 | 41 | export interface Event { 42 | name: string; 43 | data?: Record; 44 | created_at: number; 45 | } 46 | 47 | export interface Metric { 48 | train_loss: float | null; 49 | valid_loss: float | null; 50 | valid_mean_token_accuracy: float | null; 51 | } 52 | 53 | export interface Checkpoint { 54 | metrics: Metric; 55 | step_number: int; 56 | created_at: int; 57 | } 58 | 59 | export interface DetailedJob extends Job { 60 | events: Event[]; 61 | checkpoints: Checkpoint[]; 62 | } 63 | 64 | export interface Jobs { 65 | data: Job[]; 66 | object: 'list'; 67 | } 68 | 69 | export class JobsClient { 70 | constructor(client: MistralClient); 71 | 72 | create(options: { 73 | model: string; 74 | trainingFiles: string[]; 75 | validationFiles?: string[]; 76 | hyperparameters?: TrainingParameters; 77 | suffix?: string; 78 | integrations?: Integration[]; 79 | }): Promise; 80 | 81 | retrieve(options: { jobId: string }): Promise; 82 | 83 | list(params?: Record): Promise; 84 | 85 | cancel(options: { jobId: string }): Promise; 86 | } 87 | -------------------------------------------------------------------------------- /tests/jobs.test.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '../src/client'; 2 | import { 3 | mockFetch, 4 | mockJobResponsePayload, 5 | mockJobsResponsePayload, 6 | mockDeletedJobResponsePayload, 7 | } from './utils'; 8 | 9 | // Test the jobs endpoint 10 | describe('Mistral Client', () => { 11 | let client; 12 | beforeEach(() => { 13 | client = new MistralClient(); 14 | }); 15 | 16 | describe('createJob()', () => { 17 | it('should return a job response object', async() => { 18 | // Mock the fetch function 19 | const mockResponse = mockJobResponsePayload(); 20 | client._fetch = mockFetch(200, mockResponse); 21 | 22 | const response = await client.jobs.create({ 23 | model: 'mistral-medium', 24 | trainingFiles: [], 25 | validationFiles: [], 26 | hyperparameters: { 27 | training_steps: 1800, 28 | learning_rate: 1.0e-4, 29 | }, 30 | }); 31 | expect(response).toEqual(mockResponse); 32 | }); 33 | }); 34 | 35 | describe('retrieveJob()', () => { 36 | it('should return a job response object', async() => { 37 | // Mock the fetch function 38 | const mockResponse = mockJobResponsePayload(); 39 | client._fetch = mockFetch(200, mockResponse); 40 | 41 | const response = await client.jobs.retrieve({ 42 | jobId: 'jobId', 43 | }); 44 | expect(response).toEqual(mockResponse); 45 | }); 46 | }); 47 | 48 | describe('listJobs()', () => { 49 | it('should return a list of jobs response object', async() => { 50 | // Mock the fetch function 51 | const mockResponse = mockJobsResponsePayload(); 52 | client._fetch = mockFetch(200, mockResponse); 53 | 54 | const response = await client.jobs.list(); 55 | expect(response).toEqual(mockResponse); 56 | }); 57 | }); 58 | 59 | describe('cancelJob()', () => { 60 | it('should return a deleted job response object', async() => { 61 | // Mock the fetch function 62 | const mockResponse = mockDeletedJobResponsePayload(); 63 | client._fetch = mockFetch(200, mockResponse); 64 | 65 | const response = await client.jobs.cancel({ 66 | jobId: 'jobId', 67 | }); 68 | expect(response).toEqual(mockResponse); 69 | }); 70 | }); 71 | }); 72 | -------------------------------------------------------------------------------- /examples/chat-react/src/App.js: -------------------------------------------------------------------------------- 1 | import MistralClient from "@mistralai/mistralai"; 2 | 3 | function App() { 4 | 5 | const doChatStream = async function() { 6 | 7 | const apiKey = document.getElementById("apiKey").value; 8 | const chat = document.getElementById("chat").value; 9 | 10 | const client = new MistralClient(apiKey); 11 | 12 | document.getElementById("output").innerHTML = ""; 13 | document.getElementById("error").innerHTML = ""; 14 | 15 | try { 16 | const chatStreamResponse = await client.chatStream({ 17 | model: 'mistral-tiny', 18 | messages: [{role: 'user', content: chat}], 19 | }); 20 | 21 | for await (const chunk of chatStreamResponse) { 22 | if (chunk.choices[0].delta.content !== undefined) { 23 | let streamText = chunk.choices[0].delta.content; 24 | streamText = streamText.replace(/(?:\r\n|\r|\n)/g, '
'); 25 | document.getElementById("output").innerHTML += streamText; 26 | } 27 | } 28 | } 29 | catch (e) { 30 | document.getElementById("error").innerHTML += e; 31 | } 32 | }; 33 | 34 | return ( 35 |
36 |
37 |
38 |

Web Stream Example

39 |
40 | 41 |
42 | 43 |
44 |
45 |
46 | 47 |
48 | 49 |
50 | 51 |
52 |
53 |
54 |
55 | 56 |
57 |
58 | 59 |
60 |
61 |
62 |
63 | ); 64 | } 65 | 66 | export default App; 67 | -------------------------------------------------------------------------------- /src/jobs.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Class representing a client for job operations. 3 | */ 4 | class JobsClient { 5 | /** 6 | * Create a JobsClient object. 7 | * @param {MistralClient} client - The client object used for making requests. 8 | */ 9 | constructor(client) { 10 | this.client = client; 11 | } 12 | 13 | /** 14 | * Create a new job. 15 | * @param {string} model - The model to be used for the job. 16 | * @param {Array} trainingFiles - The list of training files. 17 | * @param {Array} validationFiles - The list of validation files. 18 | * @param {TrainingParameters} hyperparameters - The hyperparameters. 19 | * @param {string} suffix - The suffix for the job. 20 | * @param {Array} integrations - The integrations for the job. 21 | * @return {Promise<*>} A promise that resolves to a Job object. 22 | * @throws {MistralAPIError} If no response is received from the server. 23 | */ 24 | async create({ 25 | model, 26 | trainingFiles, 27 | validationFiles = [], 28 | hyperparameters = { 29 | training_steps: 1800, 30 | learning_rate: 1.0e-4, 31 | }, 32 | suffix = null, 33 | integrations = null, 34 | }) { 35 | const response = await this.client._request('post', 'v1/fine_tuning/jobs', { 36 | model, 37 | training_files: trainingFiles, 38 | validation_files: validationFiles, 39 | hyperparameters, 40 | suffix, 41 | integrations, 42 | }); 43 | return response; 44 | } 45 | 46 | /** 47 | * Retrieve a job. 48 | * @param {string} jobId - The ID of the job to retrieve. 49 | * @return {Promise<*>} A promise that resolves to the job data. 50 | */ 51 | async retrieve({jobId}) { 52 | const response = await this.client._request( 53 | 'get', `v1/fine_tuning/jobs/${jobId}`, {}, 54 | ); 55 | return response; 56 | } 57 | 58 | /** 59 | * List all jobs. 60 | * @return {Promise>} A promise that resolves to an array of Job. 61 | */ 62 | async list() { 63 | const response = await this.client._request( 64 | 'get', 'v1/fine_tuning/jobs', {}, 65 | ); 66 | return response; 67 | } 68 | 69 | /** 70 | * Cancel a job. 71 | * @param {string} jobId - The ID of the job to cancel. 72 | * @return {Promise<*>} A promise that resolves to the response. 73 | */ 74 | async cancel({jobId}) { 75 | const response = await this.client._request( 76 | 'post', `v1/fine_tuning/jobs/${jobId}/cancel`, {}, 77 | ); 78 | return response; 79 | } 80 | } 81 | 82 | export default JobsClient; 83 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | changes.diff -------------------------------------------------------------------------------- /examples/function_calling.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '@mistralai/mistralai'; 2 | 3 | const apiKey = process.env.MISTRAL_API_KEY; 4 | 5 | // Assuming we have the following data 6 | const data = { 7 | transactionId: ['T1001', 'T1002', 'T1003', 'T1004', 'T1005'], 8 | customerId: ['C001', 'C002', 'C003', 'C002', 'C001'], 9 | paymentAmount: [125.5, 89.99, 120.0, 54.3, 210.2], 10 | paymentDate: [ 11 | '2021-10-05', 12 | '2021-10-06', 13 | '2021-10-07', 14 | '2021-10-05', 15 | '2021-10-08', 16 | ], 17 | paymentStatus: ['Paid', 'Unpaid', 'Paid', 'Paid', 'Pending'], 18 | }; 19 | 20 | /** 21 | * This function retrieves the payment status of a transaction id. 22 | * @param {object} data - The data object. 23 | * @param {string} transactionId - The transaction id. 24 | * @return {string} - The payment status. 25 | */ 26 | function retrievePaymentStatus({data, transactionId}) { 27 | const transactionIndex = data.transactionId.indexOf(transactionId); 28 | if (transactionIndex != -1) { 29 | return JSON.stringify({status: data.paymentStatus[transactionIndex]}); 30 | } else { 31 | return JSON.stringify({status: 'error - transaction id not found.'}); 32 | } 33 | } 34 | 35 | /** 36 | * This function retrieves the payment date of a transaction id. 37 | * @param {object} data - The data object. 38 | * @param {string} transactionId - The transaction id. 39 | * @return {string} - The payment date. 40 | * 41 | */ 42 | function retrievePaymentDate({data, transactionId}) { 43 | const transactionIndex = data.transactionId.indexOf(transactionId); 44 | if (transactionIndex != -1) { 45 | return JSON.stringify({status: data.payment_date[transactionIndex]}); 46 | } else { 47 | return JSON.stringify({status: 'error - transaction id not found.'}); 48 | } 49 | } 50 | 51 | const namesToFunctions = { 52 | retrievePaymentStatus: (transactionId) => 53 | retrievePaymentStatus({data, ...transactionId}), 54 | retrievePaymentDate: (transactionId) => 55 | retrievePaymentDate({data, ...transactionId}), 56 | }; 57 | 58 | const tools = [ 59 | { 60 | type: 'function', 61 | function: { 62 | name: 'retrievePaymentStatus', 63 | description: 'Get payment status of a transaction id', 64 | parameters: { 65 | type: 'object', 66 | required: ['transactionId'], 67 | properties: { 68 | transactionId: {type: 'string', description: 'The transaction id.'}, 69 | }, 70 | }, 71 | }, 72 | }, 73 | { 74 | type: 'function', 75 | function: { 76 | name: 'retrievePaymentDate', 77 | description: 'Get payment date of a transaction id', 78 | parameters: { 79 | type: 'object', 80 | required: ['transactionId'], 81 | properties: { 82 | transactionId: {type: 'string', description: 'The transaction id.'}, 83 | }, 84 | }, 85 | }, 86 | }, 87 | ]; 88 | 89 | const model = 'mistral-small-latest'; 90 | 91 | const client = new MistralClient(apiKey); 92 | 93 | const messages = [ 94 | {role: 'user', content: 'What\'s the status of my transaction?'}, 95 | ]; 96 | 97 | let response = await client.chat({ 98 | model: model, 99 | messages: messages, 100 | tools: tools, 101 | }); 102 | 103 | console.log(response.choices[0].message.content); 104 | 105 | messages.push({ 106 | role: 'assistant', 107 | content: response.choices[0].message.content, 108 | }); 109 | messages.push({role: 'user', content: 'My transaction ID is T1001.'}); 110 | 111 | response = await client.chat({ 112 | model: model, 113 | messages: messages, 114 | tools: tools, 115 | }); 116 | 117 | const toolCall = response.choices[0].message.tool_calls[0]; 118 | const functionName = toolCall.function.name; 119 | const functionParams = JSON.parse(toolCall.function.arguments); 120 | 121 | console.log(`calling functionName: ${functionName}`); 122 | console.log(`functionParams: ${toolCall.function.arguments}`); 123 | 124 | const functionResult = namesToFunctions[functionName](functionParams); 125 | 126 | messages.push(response.choices[0].message); 127 | messages.push({ 128 | role: 'tool', 129 | name: functionName, 130 | content: functionResult, 131 | tool_call_id: toolCall.id, 132 | }); 133 | 134 | response = await client.chat({ 135 | model: model, 136 | messages: messages, 137 | tools: tools, 138 | }); 139 | 140 | console.log(response.choices[0].message.content); 141 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 📢🚨 This repository is now archived 📢🚨 2 | 3 | We have deprecated this package in favor of [mistralai/client-ts](https://github.com/mistralai/client-ts), which is the new official Mistral client, compatibile with both Typescript and Javascript. 4 | 5 | You can find all installation information [here](https://github.com/mistralai/client-ts?tab=readme-ov-file#sdk-installation). 6 | 7 | This change is effective starting with version 1.0.0 of the [npm package](https://www.npmjs.com/package/@mistralai/mistralai?activeTab=readme). 8 | 9 | --- 10 | 11 | This JavaScript client is inspired from [cohere-typescript](https://github.com/cohere-ai/cohere-typescript) 12 | 13 | # Mistral JavaScript Client 14 | 15 | You can use the Mistral JavaScript client to interact with the Mistral AI API. 16 | 17 | ## Installing 18 | 19 | You can install the library in your project using: 20 | 21 | `npm install @mistralai/mistralai` 22 | 23 | ## Usage 24 | 25 | You can watch a free course on using the Mistral JavaScript client [here.](https://scrimba.com/links/mistral) 26 | 27 | ### Set up 28 | 29 | ```typescript 30 | import MistralClient from '@mistralai/mistralai'; 31 | 32 | const apiKey = process.env.MISTRAL_API_KEY || 'your_api_key'; 33 | 34 | const client = new MistralClient(apiKey); 35 | ``` 36 | 37 | ### List models 38 | 39 | ```typescript 40 | const listModelsResponse = await client.listModels(); 41 | const listModels = listModelsResponse.data; 42 | listModels.forEach((model) => { 43 | console.log('Model:', model); 44 | }); 45 | ``` 46 | 47 | ### Chat with streaming 48 | 49 | ```typescript 50 | const chatStreamResponse = await client.chatStream({ 51 | model: 'mistral-tiny', 52 | messages: [{role: 'user', content: 'What is the best French cheese?'}], 53 | }); 54 | 55 | console.log('Chat Stream:'); 56 | for await (const chunk of chatStreamResponse) { 57 | if (chunk.choices[0].delta.content !== undefined) { 58 | const streamText = chunk.choices[0].delta.content; 59 | process.stdout.write(streamText); 60 | } 61 | } 62 | ``` 63 | 64 | ### Chat without streaming 65 | 66 | ```typescript 67 | const chatResponse = await client.chat({ 68 | model: 'mistral-tiny', 69 | messages: [{role: 'user', content: 'What is the best French cheese?'}], 70 | }); 71 | 72 | console.log('Chat:', chatResponse.choices[0].message.content); 73 | ``` 74 | 75 | ### Embeddings 76 | 77 | ```typescript 78 | const input = []; 79 | for (let i = 0; i < 1; i++) { 80 | input.push('What is the best French cheese?'); 81 | } 82 | 83 | const embeddingsBatchResponse = await client.embeddings({ 84 | model: 'mistral-embed', 85 | input: input, 86 | }); 87 | 88 | console.log('Embeddings Batch:', embeddingsBatchResponse.data); 89 | ``` 90 | 91 | ### Files 92 | 93 | ```typescript 94 | // Create a new file 95 | const file = fs.readFileSync('file.jsonl'); 96 | const createdFile = await client.files.create({ file }); 97 | 98 | // List files 99 | const files = await client.files.list(); 100 | 101 | // Retrieve a file 102 | const retrievedFile = await client.files.retrieve({ fileId: createdFile.id }); 103 | 104 | // Delete a file 105 | const deletedFile = await client.files.delete({ fileId: createdFile.id }); 106 | ``` 107 | 108 | ### Fine-tuning Jobs 109 | 110 | ```typescript 111 | // Create a new job 112 | const createdJob = await client.jobs.create({ 113 | model: 'open-mistral-7B', 114 | trainingFiles: [trainingFile.id], 115 | validationFiles: [validationFile.id], 116 | hyperparameters: { 117 | trainingSteps: 10, 118 | learningRate: 0.0001, 119 | }, 120 | }); 121 | 122 | // List jobs 123 | const jobs = await client.jobs.list(); 124 | 125 | // Retrieve a job 126 | const retrievedJob = await client.jobs.retrieve({ jobId: createdJob.id }); 127 | 128 | // Cancel a job 129 | const canceledJob = await client.jobs.cancel({ jobId: createdJob.id }); 130 | ``` 131 | 132 | ## Run examples 133 | 134 | You can run the examples in the examples directory by installing them locally: 135 | 136 | ```bash 137 | cd examples 138 | npm install . 139 | ``` 140 | 141 | ### API key setup 142 | 143 | Running the examples requires a Mistral AI API key. 144 | 145 | Get your own Mistral API Key: 146 | 147 | ### Run the examples 148 | 149 | ```bash 150 | MISTRAL_API_KEY='your_api_key' node chat_with_streaming.js 151 | ``` 152 | 153 | ### Persisting the API key in environment 154 | 155 | Set your Mistral API Key as an environment variable. You only need to do this once. 156 | 157 | ```bash 158 | # set Mistral API Key (using zsh for example) 159 | $ echo 'export MISTRAL_API_KEY=[your_api_key]' >> ~/.zshenv 160 | 161 | # reload the environment (or just quit and open a new terminal) 162 | $ source ~/.zshenv 163 | ``` 164 | 165 | You can then run the examples without appending the API key: 166 | 167 | ```bash 168 | node chat_with_streaming.js 169 | ``` 170 | After the env variable setup the client will find the `MISTRAL_API_KEY` by itself 171 | 172 | ```typescript 173 | import MistralClient from '@mistralai/mistralai'; 174 | 175 | const client = new MistralClient(); 176 | ``` 177 | -------------------------------------------------------------------------------- /src/client.d.ts: -------------------------------------------------------------------------------- 1 | declare module "@mistralai/mistralai" { 2 | export interface ModelPermission { 3 | id: string; 4 | object: "model_permission"; 5 | created: number; 6 | allow_create_engine: boolean; 7 | allow_sampling: boolean; 8 | allow_logprobs: boolean; 9 | allow_search_indices: boolean; 10 | allow_view: boolean; 11 | allow_fine_tuning: boolean; 12 | organization: string; 13 | group: string | null; 14 | is_blocking: boolean; 15 | } 16 | 17 | export interface Model { 18 | id: string; 19 | object: "model"; 20 | created: number; 21 | owned_by: string; 22 | root: string | null; 23 | parent: string | null; 24 | permission: ModelPermission[]; 25 | } 26 | 27 | export interface ListModelsResponse { 28 | object: "list"; 29 | data: Model[]; 30 | } 31 | 32 | export interface Function { 33 | name: string; 34 | description: string; 35 | parameters: object; 36 | } 37 | 38 | export interface FunctionCall { 39 | name: string; 40 | arguments: string; 41 | } 42 | 43 | export interface ToolCalls { 44 | id: string; 45 | function: FunctionCall; 46 | } 47 | 48 | export interface ResponseFormat { 49 | type: "json_object"; 50 | } 51 | 52 | export interface TokenUsage { 53 | prompt_tokens: number; 54 | completion_tokens: number; 55 | total_tokens: number; 56 | } 57 | 58 | export interface ChatCompletionResponseChoice { 59 | index: number; 60 | message: { 61 | role: string; 62 | content: string; 63 | tool_calls: null | ToolCalls[]; 64 | }; 65 | finish_reason: string; 66 | } 67 | 68 | export interface ChatCompletionResponseChunkChoice { 69 | index: number; 70 | delta: { 71 | role?: string; 72 | content?: string; 73 | tool_calls?: ToolCalls[]; 74 | }; 75 | finish_reason: string; 76 | } 77 | 78 | export interface ChatCompletionResponse { 79 | id: string; 80 | object: "chat.completion"; 81 | created: number; 82 | model: string; 83 | choices: ChatCompletionResponseChoice[]; 84 | usage: TokenUsage; 85 | } 86 | 87 | export interface ChatCompletionResponseChunk { 88 | id: string; 89 | object: "chat.completion.chunk"; 90 | created: number; 91 | model: string; 92 | choices: ChatCompletionResponseChunkChoice[]; 93 | usage: TokenUsage | null; 94 | } 95 | 96 | export interface Embedding { 97 | id: string; 98 | object: "embedding"; 99 | embedding: number[]; 100 | } 101 | 102 | export interface EmbeddingResponse { 103 | id: string; 104 | object: "list"; 105 | data: Embedding[]; 106 | model: string; 107 | usage: TokenUsage; 108 | } 109 | 110 | export type Message = 111 | | { 112 | role: "system" | "user" | "assistant"; 113 | content: string | string[]; 114 | } 115 | | { 116 | role: "tool"; 117 | content: string | string[]; 118 | name: string; 119 | tool_call_id: string; 120 | }; 121 | 122 | export interface Tool { 123 | type: "function"; 124 | function: Function; 125 | } 126 | 127 | export interface ChatRequest { 128 | model: string; 129 | messages: Array; 130 | tools?: Array; 131 | temperature?: number; 132 | maxTokens?: number; 133 | topP?: number; 134 | randomSeed?: number; 135 | /** 136 | * @deprecated use safePrompt instead 137 | */ 138 | safeMode?: boolean; 139 | safePrompt?: boolean; 140 | toolChoice?: "auto" | "any" | "none"; 141 | responseFormat?: ResponseFormat; 142 | } 143 | 144 | export interface CompletionRequest { 145 | model: string; 146 | prompt: string; 147 | suffix?: string; 148 | temperature?: number; 149 | maxTokens?: number; 150 | topP?: number; 151 | randomSeed?: number; 152 | stop?: string | string[]; 153 | } 154 | 155 | export interface ChatRequestOptions { 156 | signal?: AbortSignal; 157 | } 158 | 159 | class MistralClient { 160 | apiKey: string; 161 | endpoint: string; 162 | maxRetries: number; 163 | timeout: number; 164 | 165 | constructor( 166 | apiKey?: string, 167 | endpoint?: string, 168 | maxRetries?: number, 169 | timeout?: number 170 | ); 171 | 172 | listModels(): Promise; 173 | 174 | chat( 175 | request: ChatRequest, 176 | options?: ChatRequestOptions 177 | ): Promise; 178 | 179 | chatStream( 180 | request: ChatRequest, 181 | options?: ChatRequestOptions 182 | ): AsyncGenerator; 183 | 184 | completion( 185 | request: CompletionRequest, 186 | options?: ChatRequestOptions 187 | ): Promise; 188 | 189 | completionStream( 190 | request: CompletionRequest, 191 | options?: ChatRequestOptions 192 | ): AsyncGenerator; 193 | 194 | embeddings(options: { 195 | model: string; 196 | input: string | string[]; 197 | }): Promise; 198 | } 199 | 200 | export default MistralClient; 201 | } 202 | -------------------------------------------------------------------------------- /tests/client.test.js: -------------------------------------------------------------------------------- 1 | import MistralClient from '../src/client'; 2 | import { 3 | mockListModels, 4 | mockFetch, 5 | mockChatResponseStreamingPayload, 6 | mockEmbeddingRequest, 7 | mockEmbeddingResponsePayload, 8 | mockChatResponsePayload, 9 | mockFetchStream, 10 | } from './utils'; 11 | 12 | // Test the list models endpoint 13 | describe('Mistral Client', () => { 14 | let client; 15 | beforeEach(() => { 16 | client = new MistralClient(); 17 | }); 18 | 19 | describe('chat()', () => { 20 | it('should return a chat response object', async() => { 21 | // Mock the fetch function 22 | const mockResponse = mockChatResponsePayload(); 23 | client._fetch = mockFetch(200, mockResponse); 24 | 25 | const response = await client.chat({ 26 | model: 'mistral-small-latest', 27 | messages: [ 28 | { 29 | role: 'user', 30 | content: 'What is the best French cheese?', 31 | }, 32 | ], 33 | }); 34 | expect(response).toEqual(mockResponse); 35 | }); 36 | 37 | it('should return a chat response object if safeMode is set', async() => { 38 | // Mock the fetch function 39 | const mockResponse = mockChatResponsePayload(); 40 | client._fetch = mockFetch(200, mockResponse); 41 | 42 | const response = await client.chat({ 43 | model: 'mistral-small-latest', 44 | messages: [ 45 | { 46 | role: 'user', 47 | content: 'What is the best French cheese?', 48 | }, 49 | ], 50 | safeMode: true, 51 | }); 52 | expect(response).toEqual(mockResponse); 53 | }); 54 | 55 | it('should return a chat response object if safePrompt is set', async() => { 56 | // Mock the fetch function 57 | const mockResponse = mockChatResponsePayload(); 58 | client._fetch = mockFetch(200, mockResponse); 59 | 60 | const response = await client.chat({ 61 | model: 'mistral-small-latest', 62 | messages: [ 63 | { 64 | role: 'user', 65 | content: 'What is the best French cheese?', 66 | }, 67 | ], 68 | safePrompt: true, 69 | }); 70 | expect(response).toEqual(mockResponse); 71 | }); 72 | }); 73 | 74 | describe('chatStream()', () => { 75 | it('should return parsed, streamed response', async() => { 76 | // Mock the fetch function 77 | const mockResponse = mockChatResponseStreamingPayload(); 78 | client._fetch = mockFetchStream(200, mockResponse); 79 | 80 | const response = await client.chatStream({ 81 | model: 'mistral-small-latest', 82 | messages: [ 83 | { 84 | role: 'user', 85 | content: 'What is the best French cheese?', 86 | }, 87 | ], 88 | }); 89 | 90 | const parsedResponse = []; 91 | for await (const r of response) { 92 | parsedResponse.push(r); 93 | } 94 | 95 | expect(parsedResponse.length).toEqual(11); 96 | }); 97 | 98 | it('should return parsed, streamed response with safeMode', async() => { 99 | // Mock the fetch function 100 | const mockResponse = mockChatResponseStreamingPayload(); 101 | client._fetch = mockFetchStream(200, mockResponse); 102 | 103 | const response = await client.chatStream({ 104 | model: 'mistral-small-latest', 105 | messages: [ 106 | { 107 | role: 'user', 108 | content: 'What is the best French cheese?', 109 | }, 110 | ], 111 | safeMode: true, 112 | }); 113 | 114 | const parsedResponse = []; 115 | for await (const r of response) { 116 | parsedResponse.push(r); 117 | } 118 | 119 | expect(parsedResponse.length).toEqual(11); 120 | }); 121 | 122 | it('should return parsed, streamed response with safePrompt', async() => { 123 | // Mock the fetch function 124 | const mockResponse = mockChatResponseStreamingPayload(); 125 | client._fetch = mockFetchStream(200, mockResponse); 126 | 127 | const response = await client.chatStream({ 128 | model: 'mistral-small-latest', 129 | messages: [ 130 | { 131 | role: 'user', 132 | content: 'What is the best French cheese?', 133 | }, 134 | ], 135 | safePrompt: true, 136 | }); 137 | 138 | const parsedResponse = []; 139 | for await (const r of response) { 140 | parsedResponse.push(r); 141 | } 142 | 143 | expect(parsedResponse.length).toEqual(11); 144 | }); 145 | }); 146 | 147 | describe('embeddings()', () => { 148 | it('should return embeddings', async() => { 149 | // Mock the fetch function 150 | const mockResponse = mockEmbeddingResponsePayload(); 151 | client._fetch = mockFetch(200, mockResponse); 152 | 153 | const response = await client.embeddings(mockEmbeddingRequest); 154 | expect(response).toEqual(mockResponse); 155 | }); 156 | }); 157 | 158 | describe('embeddings() batched', () => { 159 | it('should return batched embeddings', async() => { 160 | // Mock the fetch function 161 | const mockResponse = mockEmbeddingResponsePayload(10); 162 | client._fetch = mockFetch(200, mockResponse); 163 | 164 | const response = await client.embeddings(mockEmbeddingRequest); 165 | expect(response).toEqual(mockResponse); 166 | }); 167 | }); 168 | 169 | describe('listModels()', () => { 170 | it('should return a list of models', async() => { 171 | // Mock the fetch function 172 | const mockResponse = mockListModels(); 173 | client._fetch = mockFetch(200, mockResponse); 174 | 175 | const response = await client.listModels(); 176 | expect(response).toEqual(mockResponse); 177 | }); 178 | }); 179 | 180 | describe('completion()', () => { 181 | it('should return a chat response object', async() => { 182 | // Mock the fetch function 183 | const mockResponse = mockChatResponsePayload(); 184 | client._fetch = mockFetch(200, mockResponse); 185 | 186 | const response = await client.completion({ 187 | model: 'mistral-small-latest', 188 | prompt: '# this is a', 189 | }); 190 | expect(response).toEqual(mockResponse); 191 | }); 192 | }); 193 | }); 194 | -------------------------------------------------------------------------------- /tests/utils.js: -------------------------------------------------------------------------------- 1 | import jest from 'jest-mock'; 2 | 3 | /** 4 | * Mock the fetch function 5 | * @param {*} status 6 | * @param {*} payload 7 | * @return {Object} 8 | */ 9 | export function mockFetch(status, payload) { 10 | return jest.fn(() => 11 | Promise.resolve({ 12 | json: () => Promise.resolve(payload), 13 | text: () => Promise.resolve(JSON.stringify(payload)), 14 | status, 15 | ok: status >= 200 && status < 300, 16 | }), 17 | ); 18 | } 19 | 20 | /** 21 | * Mock fetch stream 22 | * @param {*} status 23 | * @param {*} payload 24 | * @return {Object} 25 | */ 26 | export function mockFetchStream(status, payload) { 27 | const asyncIterator = async function* () { 28 | while (true) { 29 | // Read from the stream 30 | const value = payload.shift(); 31 | // Exit if we're done 32 | if (!value) return; 33 | // Else yield the chunk 34 | yield value; 35 | } 36 | }; 37 | 38 | return jest.fn(() => 39 | Promise.resolve({ 40 | // body is a ReadableStream of the objects in payload list 41 | body: asyncIterator(), 42 | status, 43 | ok: status >= 200 && status < 300, 44 | }), 45 | ); 46 | } 47 | 48 | /** 49 | * Mock models list 50 | * @return {Object} 51 | */ 52 | export function mockListModels() { 53 | return { 54 | object: 'list', 55 | data: [ 56 | { 57 | id: 'mistral-medium', 58 | object: 'model', 59 | created: 1703186988, 60 | owned_by: 'mistralai', 61 | root: null, 62 | parent: null, 63 | permission: [ 64 | { 65 | id: 'modelperm-15bebaf316264adb84b891bf06a84933', 66 | object: 'model_permission', 67 | created: 1703186988, 68 | allow_create_engine: false, 69 | allow_sampling: true, 70 | allow_logprobs: false, 71 | allow_search_indices: false, 72 | allow_view: true, 73 | allow_fine_tuning: false, 74 | organization: '*', 75 | group: null, 76 | is_blocking: false, 77 | }, 78 | ], 79 | }, 80 | { 81 | id: 'mistral-small-latest', 82 | object: 'model', 83 | created: 1703186988, 84 | owned_by: 'mistralai', 85 | root: null, 86 | parent: null, 87 | permission: [ 88 | { 89 | id: 'modelperm-d0dced5c703242fa862f4ca3f241c00e', 90 | object: 'model_permission', 91 | created: 1703186988, 92 | allow_create_engine: false, 93 | allow_sampling: true, 94 | allow_logprobs: false, 95 | allow_search_indices: false, 96 | allow_view: true, 97 | allow_fine_tuning: false, 98 | organization: '*', 99 | group: null, 100 | is_blocking: false, 101 | }, 102 | ], 103 | }, 104 | { 105 | id: 'mistral-tiny', 106 | object: 'model', 107 | created: 1703186988, 108 | owned_by: 'mistralai', 109 | root: null, 110 | parent: null, 111 | permission: [ 112 | { 113 | id: 'modelperm-0e64e727c3a94f17b29f8895d4be2910', 114 | object: 'model_permission', 115 | created: 1703186988, 116 | allow_create_engine: false, 117 | allow_sampling: true, 118 | allow_logprobs: false, 119 | allow_search_indices: false, 120 | allow_view: true, 121 | allow_fine_tuning: false, 122 | organization: '*', 123 | group: null, 124 | is_blocking: false, 125 | }, 126 | ], 127 | }, 128 | { 129 | id: 'mistral-embed', 130 | object: 'model', 131 | created: 1703186988, 132 | owned_by: 'mistralai', 133 | root: null, 134 | parent: null, 135 | permission: [ 136 | { 137 | id: 'modelperm-ebdff9046f524e628059447b5932e3ad', 138 | object: 'model_permission', 139 | created: 1703186988, 140 | allow_create_engine: false, 141 | allow_sampling: true, 142 | allow_logprobs: false, 143 | allow_search_indices: false, 144 | allow_view: true, 145 | allow_fine_tuning: false, 146 | organization: '*', 147 | group: null, 148 | is_blocking: false, 149 | }, 150 | ], 151 | }, 152 | ], 153 | }; 154 | } 155 | 156 | /** 157 | * Mock chat completion object 158 | * @return {Object} 159 | */ 160 | export function mockChatResponsePayload() { 161 | return { 162 | id: 'chat-98c8c60e3fbf4fc49658eddaf447357c', 163 | object: 'chat.completion', 164 | created: 1703165682, 165 | choices: [ 166 | { 167 | finish_reason: 'stop', 168 | message: { 169 | role: 'assistant', 170 | content: 'What is the best French cheese?', 171 | }, 172 | index: 0, 173 | }, 174 | ], 175 | model: 'mistral-small-latest', 176 | usage: {prompt_tokens: 90, total_tokens: 90, completion_tokens: 0}, 177 | }; 178 | } 179 | 180 | /** 181 | * Mock chat completion stream 182 | * @return {Object} 183 | */ 184 | export function mockChatResponseStreamingPayload() { 185 | const encoder = new TextEncoder(); 186 | const firstMessage = [ 187 | encoder.encode( 188 | 'data: ' + 189 | JSON.stringify({ 190 | id: 'cmpl-8cd9019d21ba490aa6b9740f5d0a883e', 191 | model: 'mistral-small-latest', 192 | choices: [ 193 | { 194 | index: 0, 195 | delta: {role: 'assistant'}, 196 | finish_reason: null, 197 | }, 198 | ], 199 | }) + 200 | '\n\n', 201 | ), 202 | ]; 203 | const lastMessage = [encoder.encode('data: [DONE]\n\n')]; 204 | 205 | const dataMessages = []; 206 | for (let i = 0; i < 10; i++) { 207 | dataMessages.push( 208 | encoder.encode( 209 | 'data: ' + 210 | JSON.stringify({ 211 | id: 'cmpl-8cd9019d21ba490aa6b9740f5d0a883e', 212 | object: 'chat.completion.chunk', 213 | created: 1703168544, 214 | model: 'mistral-small-latest', 215 | choices: [ 216 | { 217 | index: i, 218 | delta: {content: `stream response ${i}`}, 219 | finish_reason: null, 220 | }, 221 | ], 222 | }) + 223 | '\n\n', 224 | ), 225 | ); 226 | } 227 | 228 | return firstMessage.concat(dataMessages).concat(lastMessage); 229 | } 230 | 231 | /** 232 | * Mock embeddings response 233 | * @param {number} batchSize 234 | * @return {Object} 235 | */ 236 | export function mockEmbeddingResponsePayload(batchSize = 1) { 237 | return { 238 | id: 'embd-98c8c60e3fbf4fc49658eddaf447357c', 239 | object: 'list', 240 | data: 241 | [ 242 | { 243 | object: 'embedding', 244 | embedding: [-0.018585205078125, 0.027099609375, 0.02587890625], 245 | index: 0, 246 | }, 247 | ] * batchSize, 248 | model: 'mistral-embed', 249 | usage: {prompt_tokens: 90, total_tokens: 90, completion_tokens: 0}, 250 | }; 251 | } 252 | 253 | /** 254 | * Mock embeddings request payload 255 | * @return {Object} 256 | */ 257 | export function mockEmbeddingRequest() { 258 | return { 259 | model: 'mistral-embed', 260 | input: 'embed', 261 | }; 262 | } 263 | 264 | /** 265 | * Mock file response payload 266 | * @return {Object} 267 | */ 268 | export function mockFileResponsePayload() { 269 | return { 270 | id: 'fileId', 271 | object: 'file', 272 | bytes: 0, 273 | created_at: 1633046400000, 274 | filename: 'file.jsonl', 275 | purpose: 'fine-tune', 276 | }; 277 | } 278 | 279 | /** 280 | * Mock files response payload 281 | * @return {Object} 282 | */ 283 | export function mockFilesResponsePayload() { 284 | return { 285 | data: [ 286 | { 287 | id: 'fileId', 288 | object: 'file', 289 | bytes: 0, 290 | created_at: 1633046400000, 291 | filename: 'file.jsonl', 292 | purpose: 'fine-tune', 293 | }, 294 | ], 295 | object: 'list', 296 | }; 297 | } 298 | 299 | /** 300 | * Mock deleted file response payload 301 | * @return {Object} 302 | */ 303 | export function mockDeletedFileResponsePayload() { 304 | return { 305 | id: 'fileId', 306 | object: 'file', 307 | deleted: true, 308 | }; 309 | } 310 | 311 | /** 312 | * Mock job response payload 313 | * @return {Object} 314 | */ 315 | export function mockJobResponsePayload() { 316 | return { 317 | id: 'jobId', 318 | hyperparameters: { 319 | training_steps: 1800, 320 | learning_rate: 1.0e-4, 321 | }, 322 | fine_tuned_model: 'fine_tuned_model_id', 323 | model: 'mistral-medium', 324 | status: 'QUEUED', 325 | job_type: 'fine_tuning', 326 | created_at: 1633046400000, 327 | modified_at: 1633046400000, 328 | training_files: ['file1.jsonl', 'file2.jsonl'], 329 | validation_files: ['file3.jsonl', 'file4.jsonl'], 330 | object: 'job', 331 | }; 332 | } 333 | 334 | /** 335 | * Mock jobs response payload 336 | * @return {Object} 337 | */ 338 | export function mockJobsResponsePayload() { 339 | return { 340 | data: [ 341 | { 342 | id: 'jobId1', 343 | hyperparameters: { 344 | training_steps: 1800, 345 | learning_rate: 1.0e-4, 346 | }, 347 | fine_tuned_model: 'fine_tuned_model_id1', 348 | model: 'mistral-medium', 349 | status: 'QUEUED', 350 | job_type: 'fine_tuning', 351 | created_at: 1633046400000, 352 | modified_at: 1633046400000, 353 | training_files: ['file1.jsonl', 'file2.jsonl'], 354 | validation_files: ['file3.jsonl', 'file4.jsonl'], 355 | object: 'job', 356 | }, 357 | { 358 | id: 'jobId2', 359 | hyperparameters: { 360 | training_steps: 1800, 361 | learning_rate: 1.0e-4, 362 | }, 363 | fine_tuned_model: 'fine_tuned_model_id2', 364 | model: 'mistral-medium', 365 | status: 'RUNNING', 366 | job_type: 'fine_tuning', 367 | created_at: 1633046400000, 368 | modified_at: 1633046400000, 369 | training_files: ['file5.jsonl', 'file6.jsonl'], 370 | validation_files: ['file7.jsonl', 'file8.jsonl'], 371 | object: 'job', 372 | }, 373 | ], 374 | object: 'list', 375 | }; 376 | } 377 | 378 | /** 379 | * Mock deleted job response payload 380 | * @return {Object} 381 | */ 382 | export function mockDeletedJobResponsePayload() { 383 | return { 384 | id: 'jobId', 385 | object: 'job', 386 | deleted: true, 387 | }; 388 | } 389 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /examples/typescript/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mistralai/client-examples-ts", 3 | "lockfileVersion": 3, 4 | "requires": true, 5 | "packages": { 6 | "": { 7 | "name": "@mistralai/client-examples-ts", 8 | "dependencies": { 9 | "@mistralai/mistralai": "file:../..", 10 | "tsx": "^4.9.3" 11 | }, 12 | "devDependencies": { 13 | "typescript": "^5.4.5" 14 | } 15 | }, 16 | "../..": { 17 | "name": "@mistralai/mistralai", 18 | "version": "0.0.1", 19 | "license": "ISC", 20 | "dependencies": { 21 | "node-fetch": "^2.6.7" 22 | }, 23 | "devDependencies": { 24 | "eslint": "^8.55.0", 25 | "eslint-config-google": "^0.14.0", 26 | "jest": "^29.7.0", 27 | "prettier": "2.8.8" 28 | } 29 | }, 30 | "node_modules/@esbuild/aix-ppc64": { 31 | "version": "0.20.2", 32 | "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", 33 | "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", 34 | "cpu": [ 35 | "ppc64" 36 | ], 37 | "optional": true, 38 | "os": [ 39 | "aix" 40 | ], 41 | "engines": { 42 | "node": ">=12" 43 | } 44 | }, 45 | "node_modules/@esbuild/android-arm": { 46 | "version": "0.20.2", 47 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", 48 | "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", 49 | "cpu": [ 50 | "arm" 51 | ], 52 | "optional": true, 53 | "os": [ 54 | "android" 55 | ], 56 | "engines": { 57 | "node": ">=12" 58 | } 59 | }, 60 | "node_modules/@esbuild/android-arm64": { 61 | "version": "0.20.2", 62 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", 63 | "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", 64 | "cpu": [ 65 | "arm64" 66 | ], 67 | "optional": true, 68 | "os": [ 69 | "android" 70 | ], 71 | "engines": { 72 | "node": ">=12" 73 | } 74 | }, 75 | "node_modules/@esbuild/android-x64": { 76 | "version": "0.20.2", 77 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", 78 | "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", 79 | "cpu": [ 80 | "x64" 81 | ], 82 | "optional": true, 83 | "os": [ 84 | "android" 85 | ], 86 | "engines": { 87 | "node": ">=12" 88 | } 89 | }, 90 | "node_modules/@esbuild/darwin-arm64": { 91 | "version": "0.20.2", 92 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", 93 | "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", 94 | "cpu": [ 95 | "arm64" 96 | ], 97 | "optional": true, 98 | "os": [ 99 | "darwin" 100 | ], 101 | "engines": { 102 | "node": ">=12" 103 | } 104 | }, 105 | "node_modules/@esbuild/darwin-x64": { 106 | "version": "0.20.2", 107 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", 108 | "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", 109 | "cpu": [ 110 | "x64" 111 | ], 112 | "optional": true, 113 | "os": [ 114 | "darwin" 115 | ], 116 | "engines": { 117 | "node": ">=12" 118 | } 119 | }, 120 | "node_modules/@esbuild/freebsd-arm64": { 121 | "version": "0.20.2", 122 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", 123 | "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", 124 | "cpu": [ 125 | "arm64" 126 | ], 127 | "optional": true, 128 | "os": [ 129 | "freebsd" 130 | ], 131 | "engines": { 132 | "node": ">=12" 133 | } 134 | }, 135 | "node_modules/@esbuild/freebsd-x64": { 136 | "version": "0.20.2", 137 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", 138 | "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", 139 | "cpu": [ 140 | "x64" 141 | ], 142 | "optional": true, 143 | "os": [ 144 | "freebsd" 145 | ], 146 | "engines": { 147 | "node": ">=12" 148 | } 149 | }, 150 | "node_modules/@esbuild/linux-arm": { 151 | "version": "0.20.2", 152 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", 153 | "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", 154 | "cpu": [ 155 | "arm" 156 | ], 157 | "optional": true, 158 | "os": [ 159 | "linux" 160 | ], 161 | "engines": { 162 | "node": ">=12" 163 | } 164 | }, 165 | "node_modules/@esbuild/linux-arm64": { 166 | "version": "0.20.2", 167 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", 168 | "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", 169 | "cpu": [ 170 | "arm64" 171 | ], 172 | "optional": true, 173 | "os": [ 174 | "linux" 175 | ], 176 | "engines": { 177 | "node": ">=12" 178 | } 179 | }, 180 | "node_modules/@esbuild/linux-ia32": { 181 | "version": "0.20.2", 182 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", 183 | "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", 184 | "cpu": [ 185 | "ia32" 186 | ], 187 | "optional": true, 188 | "os": [ 189 | "linux" 190 | ], 191 | "engines": { 192 | "node": ">=12" 193 | } 194 | }, 195 | "node_modules/@esbuild/linux-loong64": { 196 | "version": "0.20.2", 197 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", 198 | "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", 199 | "cpu": [ 200 | "loong64" 201 | ], 202 | "optional": true, 203 | "os": [ 204 | "linux" 205 | ], 206 | "engines": { 207 | "node": ">=12" 208 | } 209 | }, 210 | "node_modules/@esbuild/linux-mips64el": { 211 | "version": "0.20.2", 212 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", 213 | "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", 214 | "cpu": [ 215 | "mips64el" 216 | ], 217 | "optional": true, 218 | "os": [ 219 | "linux" 220 | ], 221 | "engines": { 222 | "node": ">=12" 223 | } 224 | }, 225 | "node_modules/@esbuild/linux-ppc64": { 226 | "version": "0.20.2", 227 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", 228 | "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", 229 | "cpu": [ 230 | "ppc64" 231 | ], 232 | "optional": true, 233 | "os": [ 234 | "linux" 235 | ], 236 | "engines": { 237 | "node": ">=12" 238 | } 239 | }, 240 | "node_modules/@esbuild/linux-riscv64": { 241 | "version": "0.20.2", 242 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", 243 | "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", 244 | "cpu": [ 245 | "riscv64" 246 | ], 247 | "optional": true, 248 | "os": [ 249 | "linux" 250 | ], 251 | "engines": { 252 | "node": ">=12" 253 | } 254 | }, 255 | "node_modules/@esbuild/linux-s390x": { 256 | "version": "0.20.2", 257 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", 258 | "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", 259 | "cpu": [ 260 | "s390x" 261 | ], 262 | "optional": true, 263 | "os": [ 264 | "linux" 265 | ], 266 | "engines": { 267 | "node": ">=12" 268 | } 269 | }, 270 | "node_modules/@esbuild/linux-x64": { 271 | "version": "0.20.2", 272 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", 273 | "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", 274 | "cpu": [ 275 | "x64" 276 | ], 277 | "optional": true, 278 | "os": [ 279 | "linux" 280 | ], 281 | "engines": { 282 | "node": ">=12" 283 | } 284 | }, 285 | "node_modules/@esbuild/netbsd-x64": { 286 | "version": "0.20.2", 287 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", 288 | "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", 289 | "cpu": [ 290 | "x64" 291 | ], 292 | "optional": true, 293 | "os": [ 294 | "netbsd" 295 | ], 296 | "engines": { 297 | "node": ">=12" 298 | } 299 | }, 300 | "node_modules/@esbuild/openbsd-x64": { 301 | "version": "0.20.2", 302 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", 303 | "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", 304 | "cpu": [ 305 | "x64" 306 | ], 307 | "optional": true, 308 | "os": [ 309 | "openbsd" 310 | ], 311 | "engines": { 312 | "node": ">=12" 313 | } 314 | }, 315 | "node_modules/@esbuild/sunos-x64": { 316 | "version": "0.20.2", 317 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", 318 | "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", 319 | "cpu": [ 320 | "x64" 321 | ], 322 | "optional": true, 323 | "os": [ 324 | "sunos" 325 | ], 326 | "engines": { 327 | "node": ">=12" 328 | } 329 | }, 330 | "node_modules/@esbuild/win32-arm64": { 331 | "version": "0.20.2", 332 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", 333 | "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", 334 | "cpu": [ 335 | "arm64" 336 | ], 337 | "optional": true, 338 | "os": [ 339 | "win32" 340 | ], 341 | "engines": { 342 | "node": ">=12" 343 | } 344 | }, 345 | "node_modules/@esbuild/win32-ia32": { 346 | "version": "0.20.2", 347 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", 348 | "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", 349 | "cpu": [ 350 | "ia32" 351 | ], 352 | "optional": true, 353 | "os": [ 354 | "win32" 355 | ], 356 | "engines": { 357 | "node": ">=12" 358 | } 359 | }, 360 | "node_modules/@esbuild/win32-x64": { 361 | "version": "0.20.2", 362 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", 363 | "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", 364 | "cpu": [ 365 | "x64" 366 | ], 367 | "optional": true, 368 | "os": [ 369 | "win32" 370 | ], 371 | "engines": { 372 | "node": ">=12" 373 | } 374 | }, 375 | "node_modules/@mistralai/mistralai": { 376 | "resolved": "../..", 377 | "link": true 378 | }, 379 | "node_modules/esbuild": { 380 | "version": "0.20.2", 381 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", 382 | "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", 383 | "hasInstallScript": true, 384 | "bin": { 385 | "esbuild": "bin/esbuild" 386 | }, 387 | "engines": { 388 | "node": ">=12" 389 | }, 390 | "optionalDependencies": { 391 | "@esbuild/aix-ppc64": "0.20.2", 392 | "@esbuild/android-arm": "0.20.2", 393 | "@esbuild/android-arm64": "0.20.2", 394 | "@esbuild/android-x64": "0.20.2", 395 | "@esbuild/darwin-arm64": "0.20.2", 396 | "@esbuild/darwin-x64": "0.20.2", 397 | "@esbuild/freebsd-arm64": "0.20.2", 398 | "@esbuild/freebsd-x64": "0.20.2", 399 | "@esbuild/linux-arm": "0.20.2", 400 | "@esbuild/linux-arm64": "0.20.2", 401 | "@esbuild/linux-ia32": "0.20.2", 402 | "@esbuild/linux-loong64": "0.20.2", 403 | "@esbuild/linux-mips64el": "0.20.2", 404 | "@esbuild/linux-ppc64": "0.20.2", 405 | "@esbuild/linux-riscv64": "0.20.2", 406 | "@esbuild/linux-s390x": "0.20.2", 407 | "@esbuild/linux-x64": "0.20.2", 408 | "@esbuild/netbsd-x64": "0.20.2", 409 | "@esbuild/openbsd-x64": "0.20.2", 410 | "@esbuild/sunos-x64": "0.20.2", 411 | "@esbuild/win32-arm64": "0.20.2", 412 | "@esbuild/win32-ia32": "0.20.2", 413 | "@esbuild/win32-x64": "0.20.2" 414 | } 415 | }, 416 | "node_modules/fsevents": { 417 | "version": "2.3.3", 418 | "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", 419 | "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", 420 | "hasInstallScript": true, 421 | "optional": true, 422 | "os": [ 423 | "darwin" 424 | ], 425 | "engines": { 426 | "node": "^8.16.0 || ^10.6.0 || >=11.0.0" 427 | } 428 | }, 429 | "node_modules/get-tsconfig": { 430 | "version": "4.7.4", 431 | "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.4.tgz", 432 | "integrity": "sha512-ofbkKj+0pjXjhejr007J/fLf+sW+8H7K5GCm+msC8q3IpvgjobpyPqSRFemNyIMxklC0zeJpi7VDFna19FacvQ==", 433 | "dependencies": { 434 | "resolve-pkg-maps": "^1.0.0" 435 | }, 436 | "funding": { 437 | "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" 438 | } 439 | }, 440 | "node_modules/resolve-pkg-maps": { 441 | "version": "1.0.0", 442 | "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", 443 | "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", 444 | "funding": { 445 | "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" 446 | } 447 | }, 448 | "node_modules/tsx": { 449 | "version": "4.9.3", 450 | "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.9.3.tgz", 451 | "integrity": "sha512-czVbetlILiyJZI5zGlj2kw9vFiSeyra9liPD4nG+Thh4pKTi0AmMEQ8zdV/L2xbIVKrIqif4sUNrsMAOksx9Zg==", 452 | "dependencies": { 453 | "esbuild": "~0.20.2", 454 | "get-tsconfig": "^4.7.3" 455 | }, 456 | "bin": { 457 | "tsx": "dist/cli.mjs" 458 | }, 459 | "engines": { 460 | "node": ">=18.0.0" 461 | }, 462 | "optionalDependencies": { 463 | "fsevents": "~2.3.3" 464 | } 465 | }, 466 | "node_modules/typescript": { 467 | "version": "5.4.5", 468 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", 469 | "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", 470 | "dev": true, 471 | "bin": { 472 | "tsc": "bin/tsc", 473 | "tsserver": "bin/tsserver" 474 | }, 475 | "engines": { 476 | "node": ">=14.17" 477 | } 478 | } 479 | } 480 | } 481 | -------------------------------------------------------------------------------- /src/client.js: -------------------------------------------------------------------------------- 1 | import FilesClient from './files.js'; 2 | import JobsClient from './jobs.js'; 3 | 4 | const VERSION = '0.5.0'; 5 | const RETRY_STATUS_CODES = [429, 500, 502, 503, 504]; 6 | const ENDPOINT = 'https://api.mistral.ai'; 7 | 8 | // We can't use a top level await if eventually this is to be converted 9 | // to typescript and compiled to commonjs, or similarly using babel. 10 | const configuredFetch = Promise.resolve( 11 | globalThis.fetch ?? import('node-fetch').then((m) => m.default), 12 | ); 13 | 14 | /** 15 | * MistralAPIError 16 | * @return {MistralAPIError} 17 | * @extends {Error} 18 | */ 19 | class MistralAPIError extends Error { 20 | /** 21 | * A simple error class for Mistral API errors 22 | * @param {*} message 23 | */ 24 | constructor(message) { 25 | super(message); 26 | this.name = 'MistralAPIError'; 27 | } 28 | } 29 | 30 | /** 31 | * @param {Array} signals to merge 32 | * @return {AbortSignal} signal which will abort when any of signals abort 33 | */ 34 | function combineSignals(signals) { 35 | const controller = new AbortController(); 36 | signals.forEach((signal) => { 37 | if (!signal) { 38 | return; 39 | } 40 | 41 | signal.addEventListener( 42 | 'abort', 43 | () => { 44 | controller.abort(signal.reason); 45 | }, 46 | {once: true}, 47 | ); 48 | 49 | if (signal.aborted) { 50 | controller.abort(signal.reason); 51 | } 52 | }); 53 | 54 | return controller.signal; 55 | } 56 | 57 | /** 58 | * MistralClient 59 | * @return {MistralClient} 60 | */ 61 | class MistralClient { 62 | /** 63 | * A simple and lightweight client for the Mistral API 64 | * @param {*} apiKey can be set as an environment variable MISTRAL_API_KEY, 65 | * or provided in this parameter 66 | * @param {*} endpoint defaults to https://api.mistral.ai 67 | * @param {*} maxRetries defaults to 5 68 | * @param {*} timeout defaults to 120 seconds 69 | */ 70 | constructor( 71 | apiKey = process.env.MISTRAL_API_KEY, 72 | endpoint = ENDPOINT, 73 | maxRetries = 5, 74 | timeout = 120, 75 | ) { 76 | this.endpoint = endpoint; 77 | this.apiKey = apiKey; 78 | 79 | this.maxRetries = maxRetries; 80 | this.timeout = timeout; 81 | 82 | if (this.endpoint.indexOf('inference.azure.com')) { 83 | this.modelDefault = 'mistral'; 84 | } 85 | 86 | this.files = new FilesClient(this); 87 | this.jobs = new JobsClient(this); 88 | } 89 | 90 | /** 91 | * @return {Promise} 92 | * @private 93 | * @param {...*} args - fetch args 94 | * hook point for non-global fetch override 95 | */ 96 | async _fetch(...args) { 97 | const fetchFunc = await configuredFetch; 98 | return fetchFunc(...args); 99 | } 100 | 101 | /** 102 | * 103 | * @param {*} method 104 | * @param {*} path 105 | * @param {*} request 106 | * @param {*} signal 107 | * @param {*} formData 108 | * @return {Promise<*>} 109 | */ 110 | _request = async function(method, path, request, signal, formData = null) { 111 | const url = `${this.endpoint}/${path}`; 112 | const options = { 113 | method: method, 114 | headers: { 115 | 'User-Agent': `mistral-client-js/${VERSION}`, 116 | 'Accept': request?.stream ? 'text/event-stream' : 'application/json', 117 | 'Content-Type': 'application/json', 118 | 'Authorization': `Bearer ${this.apiKey}`, 119 | }, 120 | signal: combineSignals([ 121 | AbortSignal.timeout(this.timeout * 1000), 122 | signal, 123 | ]), 124 | body: method !== 'get' ? formData ?? JSON.stringify(request) : null, 125 | timeout: this.timeout * 1000, 126 | }; 127 | 128 | if (formData) { 129 | delete options.headers['Content-Type']; 130 | } 131 | 132 | for (let attempts = 0; attempts < this.maxRetries; attempts++) { 133 | try { 134 | const response = await this._fetch(url, options); 135 | 136 | if (response.ok) { 137 | if (request?.stream) { 138 | // When using node-fetch or test mocks, getReader is not defined 139 | if (typeof response.body.getReader === 'undefined') { 140 | return response.body; 141 | } else { 142 | const reader = response.body.getReader(); 143 | // Chrome does not support async iterators yet, so polyfill it 144 | const asyncIterator = async function* () { 145 | try { 146 | while (true) { 147 | // Read from the stream 148 | const {done, value} = await reader.read(); 149 | // Exit if we're done 150 | if (done) return; 151 | // Else yield the chunk 152 | yield value; 153 | } 154 | } finally { 155 | reader.releaseLock(); 156 | } 157 | }; 158 | 159 | return asyncIterator(); 160 | } 161 | } 162 | return await response.json(); 163 | } else if (RETRY_STATUS_CODES.includes(response.status)) { 164 | console.debug( 165 | `Retrying request on response status: ${response.status}`, 166 | `Response: ${await response.text()}`, 167 | `Attempt: ${attempts + 1}`, 168 | ); 169 | // eslint-disable-next-line max-len 170 | await new Promise((resolve) => 171 | setTimeout(resolve, Math.pow(2, attempts + 1) * 500), 172 | ); 173 | } else { 174 | throw new MistralAPIError( 175 | `HTTP error! status: ${response.status} ` + 176 | `Response: \n${await response.text()}`, 177 | ); 178 | } 179 | } catch (error) { 180 | console.error(`Request failed: ${error.message}`); 181 | if (error.name === 'MistralAPIError') { 182 | throw error; 183 | } 184 | if (attempts === this.maxRetries - 1) throw error; 185 | // eslint-disable-next-line max-len 186 | await new Promise((resolve) => 187 | setTimeout(resolve, Math.pow(2, attempts + 1) * 500), 188 | ); 189 | } 190 | } 191 | throw new Error('Max retries reached'); 192 | }; 193 | 194 | /** 195 | * Creates a chat completion request 196 | * @param {*} model 197 | * @param {*} messages 198 | * @param {*} tools 199 | * @param {*} temperature 200 | * @param {*} maxTokens 201 | * @param {*} topP 202 | * @param {*} randomSeed 203 | * @param {*} stream 204 | * @param {*} safeMode deprecated use safePrompt instead 205 | * @param {*} safePrompt 206 | * @param {*} toolChoice 207 | * @param {*} responseFormat 208 | * @return {Promise} 209 | */ 210 | _makeChatCompletionRequest = function( 211 | model, 212 | messages, 213 | tools, 214 | temperature, 215 | maxTokens, 216 | topP, 217 | randomSeed, 218 | stream, 219 | safeMode, 220 | safePrompt, 221 | toolChoice, 222 | responseFormat, 223 | ) { 224 | // if modelDefault and model are undefined, throw an error 225 | if (!model && !this.modelDefault) { 226 | throw new MistralAPIError('You must provide a model name'); 227 | } 228 | return { 229 | model: model ?? this.modelDefault, 230 | messages: messages, 231 | tools: tools ?? undefined, 232 | temperature: temperature ?? undefined, 233 | max_tokens: maxTokens ?? undefined, 234 | top_p: topP ?? undefined, 235 | random_seed: randomSeed ?? undefined, 236 | stream: stream ?? undefined, 237 | safe_prompt: (safeMode || safePrompt) ?? undefined, 238 | tool_choice: toolChoice ?? undefined, 239 | response_format: responseFormat ?? undefined, 240 | }; 241 | }; 242 | 243 | /** 244 | * Creates a completion request 245 | * @param {*} model 246 | * @param {*} prompt 247 | * @param {*} suffix 248 | * @param {*} temperature 249 | * @param {*} maxTokens 250 | * @param {*} topP 251 | * @param {*} randomSeed 252 | * @param {*} stop 253 | * @param {*} stream 254 | * @return {Promise} 255 | */ 256 | _makeCompletionRequest = function( 257 | model, 258 | prompt, 259 | suffix, 260 | temperature, 261 | maxTokens, 262 | topP, 263 | randomSeed, 264 | stop, 265 | stream, 266 | ) { 267 | // if modelDefault and model are undefined, throw an error 268 | if (!model && !this.modelDefault) { 269 | throw new MistralAPIError('You must provide a model name'); 270 | } 271 | return { 272 | model: model ?? this.modelDefault, 273 | prompt: prompt, 274 | suffix: suffix ?? undefined, 275 | temperature: temperature ?? undefined, 276 | max_tokens: maxTokens ?? undefined, 277 | top_p: topP ?? undefined, 278 | random_seed: randomSeed ?? undefined, 279 | stop: stop ?? undefined, 280 | stream: stream ?? undefined, 281 | }; 282 | }; 283 | 284 | /** 285 | * Returns a list of the available models 286 | * @return {Promise} 287 | */ 288 | listModels = async function() { 289 | const response = await this._request('get', 'v1/models'); 290 | return response; 291 | }; 292 | 293 | /** 294 | * A chat endpoint without streaming. 295 | * 296 | * @param {Object} data - The main chat configuration. 297 | * @param {*} data.model - the name of the model to chat with, 298 | * e.g. mistral-tiny 299 | * @param {*} data.messages - an array of messages to chat with, e.g. 300 | * [{role: 'user', content: 'What is the best 301 | * French cheese?'}] 302 | * @param {*} data.tools - a list of tools to use. 303 | * @param {*} data.temperature - the temperature to use for sampling, e.g. 0.5 304 | * @param {*} data.maxTokens - the maximum number of tokens to generate, 305 | * e.g. 100 306 | * @param {*} data.topP - the cumulative probability of tokens to generate, 307 | * e.g. 0.9 308 | * @param {*} data.randomSeed - the random seed to use for sampling, e.g. 42 309 | * @param {*} data.safeMode - deprecated use safePrompt instead 310 | * @param {*} data.safePrompt - whether to use safe mode, e.g. true 311 | * @param {*} data.toolChoice - the tool to use, e.g. 'auto' 312 | * @param {*} data.responseFormat - the format of the response, 313 | * e.g. 'json_format' 314 | * @param {Object} options - Additional operational options. 315 | * @param {*} [options.signal] - optional AbortSignal instance to control 316 | * request The signal will be combined with 317 | * default timeout signal 318 | * @return {Promise} 319 | */ 320 | chat = async function( 321 | { 322 | model, 323 | messages, 324 | tools, 325 | temperature, 326 | maxTokens, 327 | topP, 328 | randomSeed, 329 | safeMode, 330 | safePrompt, 331 | toolChoice, 332 | responseFormat, 333 | }, 334 | {signal} = {}, 335 | ) { 336 | const request = this._makeChatCompletionRequest( 337 | model, 338 | messages, 339 | tools, 340 | temperature, 341 | maxTokens, 342 | topP, 343 | randomSeed, 344 | false, 345 | safeMode, 346 | safePrompt, 347 | toolChoice, 348 | responseFormat, 349 | ); 350 | const response = await this._request( 351 | 'post', 352 | 'v1/chat/completions', 353 | request, 354 | signal, 355 | ); 356 | return response; 357 | }; 358 | 359 | /** 360 | * A chat endpoint that streams responses. 361 | * 362 | * @param {Object} data - The main chat configuration. 363 | * @param {*} data.model - the name of the model to chat with, 364 | * e.g. mistral-tiny 365 | * @param {*} data.messages - an array of messages to chat with, e.g. 366 | * [{role: 'user', content: 'What is the best 367 | * French cheese?'}] 368 | * @param {*} data.tools - a list of tools to use. 369 | * @param {*} data.temperature - the temperature to use for sampling, e.g. 0.5 370 | * @param {*} data.maxTokens - the maximum number of tokens to generate, 371 | * e.g. 100 372 | * @param {*} data.topP - the cumulative probability of tokens to generate, 373 | * e.g. 0.9 374 | * @param {*} data.randomSeed - the random seed to use for sampling, e.g. 42 375 | * @param {*} data.safeMode - deprecated use safePrompt instead 376 | * @param {*} data.safePrompt - whether to use safe mode, e.g. true 377 | * @param {*} data.toolChoice - the tool to use, e.g. 'auto' 378 | * @param {*} data.responseFormat - the format of the response, 379 | * e.g. 'json_format' 380 | * @param {Object} options - Additional operational options. 381 | * @param {*} [options.signal] - optional AbortSignal instance to control 382 | * request The signal will be combined with 383 | * default timeout signal 384 | * @return {Promise} 385 | */ 386 | chatStream = async function* ( 387 | { 388 | model, 389 | messages, 390 | tools, 391 | temperature, 392 | maxTokens, 393 | topP, 394 | randomSeed, 395 | safeMode, 396 | safePrompt, 397 | toolChoice, 398 | responseFormat, 399 | }, 400 | {signal} = {}, 401 | ) { 402 | const request = this._makeChatCompletionRequest( 403 | model, 404 | messages, 405 | tools, 406 | temperature, 407 | maxTokens, 408 | topP, 409 | randomSeed, 410 | true, 411 | safeMode, 412 | safePrompt, 413 | toolChoice, 414 | responseFormat, 415 | ); 416 | const response = await this._request( 417 | 'post', 418 | 'v1/chat/completions', 419 | request, 420 | signal, 421 | ); 422 | 423 | let buffer = ''; 424 | const decoder = new TextDecoder(); 425 | for await (const chunk of response) { 426 | buffer += decoder.decode(chunk, {stream: true}); 427 | let firstNewline; 428 | while ((firstNewline = buffer.indexOf('\n')) !== -1) { 429 | const chunkLine = buffer.substring(0, firstNewline); 430 | buffer = buffer.substring(firstNewline + 1); 431 | if (chunkLine.startsWith('data:')) { 432 | const json = chunkLine.substring(6).trim(); 433 | if (json !== '[DONE]') { 434 | yield JSON.parse(json); 435 | } 436 | } 437 | } 438 | } 439 | }; 440 | 441 | /** 442 | * An embeddings endpoint that returns embeddings for a single, 443 | * or batch of inputs 444 | * @param {*} model The embedding model to use, e.g. mistral-embed 445 | * @param {*} input The input to embed, 446 | * e.g. ['What is the best French cheese?'] 447 | * @return {Promise} 448 | */ 449 | embeddings = async function({model, input}) { 450 | const request = { 451 | model: model, 452 | input: input, 453 | }; 454 | const response = await this._request('post', 'v1/embeddings', request); 455 | return response; 456 | }; 457 | 458 | /** 459 | * A completion endpoint without streaming. 460 | * 461 | * @param {Object} data - The main completion configuration. 462 | * @param {*} data.model - the name of the model to chat with, 463 | * e.g. mistral-tiny 464 | * @param {*} data.prompt - the prompt to complete, 465 | * e.g. 'def fibonacci(n: int):' 466 | * @param {*} data.temperature - the temperature to use for sampling, e.g. 0.5 467 | * @param {*} data.maxTokens - the maximum number of tokens to generate, 468 | * e.g. 100 469 | * @param {*} data.topP - the cumulative probability of tokens to generate, 470 | * e.g. 0.9 471 | * @param {*} data.randomSeed - the random seed to use for sampling, e.g. 42 472 | * @param {*} data.stop - the stop sequence to use, e.g. ['\n'] 473 | * @param {*} data.suffix - the suffix to append to the prompt, 474 | * e.g. 'n = int(input(\'Enter a number: \'))' 475 | * @param {Object} options - Additional operational options. 476 | * @param {*} [options.signal] - optional AbortSignal instance to control 477 | * request The signal will be combined with 478 | * default timeout signal 479 | * @return {Promise} 480 | */ 481 | completion = async function( 482 | {model, prompt, suffix, temperature, maxTokens, topP, randomSeed, stop}, 483 | {signal} = {}, 484 | ) { 485 | const request = this._makeCompletionRequest( 486 | model, 487 | prompt, 488 | suffix, 489 | temperature, 490 | maxTokens, 491 | topP, 492 | randomSeed, 493 | stop, 494 | false, 495 | ); 496 | const response = await this._request( 497 | 'post', 498 | 'v1/fim/completions', 499 | request, 500 | signal, 501 | ); 502 | return response; 503 | }; 504 | 505 | /** 506 | * A completion endpoint that streams responses. 507 | * 508 | * @param {Object} data - The main completion configuration. 509 | * @param {*} data.model - the name of the model to chat with, 510 | * e.g. mistral-tiny 511 | * @param {*} data.prompt - the prompt to complete, 512 | * e.g. 'def fibonacci(n: int):' 513 | * @param {*} data.temperature - the temperature to use for sampling, e.g. 0.5 514 | * @param {*} data.maxTokens - the maximum number of tokens to generate, 515 | * e.g. 100 516 | * @param {*} data.topP - the cumulative probability of tokens to generate, 517 | * e.g. 0.9 518 | * @param {*} data.randomSeed - the random seed to use for sampling, e.g. 42 519 | * @param {*} data.stop - the stop sequence to use, e.g. ['\n'] 520 | * @param {*} data.suffix - the suffix to append to the prompt, 521 | * e.g. 'n = int(input(\'Enter a number: \'))' 522 | * @param {Object} options - Additional operational options. 523 | * @param {*} [options.signal] - optional AbortSignal instance to control 524 | * request The signal will be combined with 525 | * default timeout signal 526 | * @return {Promise} 527 | */ 528 | completionStream = async function* ( 529 | {model, prompt, suffix, temperature, maxTokens, topP, randomSeed, stop}, 530 | {signal} = {}, 531 | ) { 532 | const request = this._makeCompletionRequest( 533 | model, 534 | prompt, 535 | suffix, 536 | temperature, 537 | maxTokens, 538 | topP, 539 | randomSeed, 540 | stop, 541 | true, 542 | ); 543 | const response = await this._request( 544 | 'post', 545 | 'v1/fim/completions', 546 | request, 547 | signal, 548 | ); 549 | 550 | let buffer = ''; 551 | const decoder = new TextDecoder(); 552 | for await (const chunk of response) { 553 | buffer += decoder.decode(chunk, {stream: true}); 554 | let firstNewline; 555 | while ((firstNewline = buffer.indexOf('\n')) !== -1) { 556 | const chunkLine = buffer.substring(0, firstNewline); 557 | buffer = buffer.substring(firstNewline + 1); 558 | if (chunkLine.startsWith('data:')) { 559 | const json = chunkLine.substring(6).trim(); 560 | if (json !== '[DONE]') { 561 | yield JSON.parse(json); 562 | } 563 | } 564 | } 565 | } 566 | }; 567 | } 568 | 569 | export default MistralClient; 570 | --------------------------------------------------------------------------------