;
23 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/images/logos/ruby.svg:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
--------------------------------------------------------------------------------
/examples/astro/src/components/pages/pipe-run/index.tsx:
--------------------------------------------------------------------------------
1 | import GoHome from '../../ui/go-home';
2 | import PipeRunExample from '../../pipe-run';
3 |
4 | export const PipeRun = () => {
5 | return (
6 |
7 |
8 |
9 |
10 | ⌘ Langbase AI Agent Pipe: Run
11 |
12 |
13 |
14 | Run a pipe to generate a text completion
15 |
16 |
17 |
18 |
19 | );
20 | };
21 |
--------------------------------------------------------------------------------
/examples/nextjs/app/api/langbase/pipes/run-pipes-as-tools/route.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import {NextRequest} from 'next/server';
3 | import pipeWithPipeAsTool from '../../../../../baseai/pipes/pipe-with-pipes-as-tools';
4 |
5 | export async function POST(req: NextRequest) {
6 | const runOptions = await req.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(pipeWithPipeAsTool());
10 |
11 | // 2. Run the pipe
12 | const result = await pipe.run(runOptions);
13 |
14 | // 3. Return the response stringified.
15 | return new Response(JSON.stringify(result));
16 | }
17 |
--------------------------------------------------------------------------------
/examples/nextjs/app/demo/pipe-run/page.tsx:
--------------------------------------------------------------------------------
1 | import PipeRunExample from '@/components/pipe-run';
2 | import GoHome from '@/components/ui/go-home';
3 |
4 | export default function Page() {
5 | return (
6 |
7 |
8 |
9 |
10 | ⌘ Langbase AI Agent Pipe: Run
11 |
12 |
13 |
14 | Run a pipe to generate a text completion
15 |
16 |
17 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/examples/nextjs/app/demo/tool-calling/page.tsx:
--------------------------------------------------------------------------------
1 | import PipeRunToolExample from '@/components/pipe-run-with-tool';
2 | import GoHome from '@/components/ui/go-home';
3 |
4 | export default function Page() {
5 | return (
6 |
7 |
8 |
9 |
10 | AI Agent Pipes: Tool Calling
11 |
12 |
13 | Run a pipe with tool calling.
14 |
15 |
16 |
17 | );
18 | }
19 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2023 Langbase, Inc.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconFewShot.tsx:
--------------------------------------------------------------------------------
1 | export function IconFewShot(
2 | props: JSX.IntrinsicElements['svg']
3 | ) {
4 | return (
5 |
13 |
17 |
18 |
19 |
20 | );
21 | }
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/TwitterIcon.tsx:
--------------------------------------------------------------------------------
1 | export function TwitterIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
10 |
14 |
15 | );
16 | }
17 |
--------------------------------------------------------------------------------
/examples/nextjs/app/api/langbase/pipes/run-tool/route.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import {NextRequest} from 'next/server';
3 | import pipeWithTools from '../../../../../baseai/pipes/pipe-with-tool';
4 |
5 | export async function POST(req: NextRequest) {
6 | const runOptions = await req.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(pipeWithTools());
10 |
11 | // 2. Run the pipe with user messages and other run options.
12 | let result = await pipe.run(runOptions);
13 |
14 | // 2. Return the response stringified.
15 | return new Response(JSON.stringify(result));
16 | }
17 |
--------------------------------------------------------------------------------
/examples/nextjs/app/demo/chat-advanced/page.tsx:
--------------------------------------------------------------------------------
1 | import ChatAdvanced from '@/components/chat-advanced';
2 | import GoHome from '@/components/ui/go-home';
3 |
4 | export default function Page() {
5 | return (
6 |
7 |
8 |
9 |
10 | `usePipe()`: Chat Advanced
11 |
12 |
13 | A kitchen sink example with all `usePipe()` chat features
14 |
15 |
16 |
17 | );
18 | }
19 |
--------------------------------------------------------------------------------
/examples/nextjs/app/demo/pipe-run-stream/page.tsx:
--------------------------------------------------------------------------------
1 | import PipeStreamExample from '@/components/pipe-stream';
2 | import GoHome from '@/components/ui/go-home';
3 |
4 | export default function Page() {
5 | return (
6 |
7 |
8 |
9 |
10 | ⌘ Langbase AI Agent Pipe: Stream
11 |
12 |
13 | Stream a pipe to stream a text completion
14 |
15 |
16 |
17 | );
18 | }
19 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/BellIcon.tsx:
--------------------------------------------------------------------------------
1 | export function BellIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
9 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/EnvelopeIcon.tsx:
--------------------------------------------------------------------------------
1 | export function EnvelopeIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/examples/astro/src/components/pages/pipe-stream/index.tsx:
--------------------------------------------------------------------------------
1 | import GoHome from '../../ui/go-home';
2 | import PipeStreamExample from '../../pipe-stream';
3 |
4 | export const PipeStream = () => {
5 | return (
6 |
7 |
8 |
9 |
10 | ⌘ Langbase AI Agent Pipe: Stream
11 |
12 |
13 |
14 | Stream a pipe to stream a text completion
15 |
16 |
17 |
18 |
19 | );
20 | };
21 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/middleware/logger.ts:
--------------------------------------------------------------------------------
1 | import type { MiddlewareHandler } from 'hono';
2 |
3 | export const logger = (): MiddlewareHandler => {
4 | return async function logger(c, next) {
5 | await next();
6 | c.req.matchedRoutes.forEach(({ handler, method, path }, i) => {
7 | const name =
8 | handler.name ||
9 | (handler.length < 2 ? '[handler]' : '[middleware]');
10 | console.log(
11 | method,
12 | ' ',
13 | path,
14 | ' '.repeat(Math.max(10 - path.length, 0)),
15 | name,
16 | i === c.req.routeIndex ? '<- respond from here' : ''
17 | );
18 | });
19 | };
20 | };
21 |
--------------------------------------------------------------------------------
/tools/eslint-config/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@baseai/eslint-config",
3 | "version": "0.0.2",
4 | "license": "Apache-2.0",
5 | "private": true,
6 | "files": [
7 | "library.js",
8 | "next.js",
9 | "react-internal.js"
10 | ],
11 | "devDependencies": {
12 | "@next/eslint-plugin-next": "^14.1.4",
13 | "@typescript-eslint/eslint-plugin": "^7.1.0",
14 | "@typescript-eslint/parser": "^7.1.0",
15 | "@vercel/style-guide": "^5.2.0",
16 | "eslint-config-prettier": "^9.1.0",
17 | "eslint-config-turbo": "^2.0.0",
18 | "eslint-plugin-only-warn": "^1.1.0",
19 | "typescript": "^5.3.3"
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconVersions.tsx:
--------------------------------------------------------------------------------
1 | export function IconVersions(
2 | props: JSX.IntrinsicElements['svg']
3 | ) {
4 | return (
5 |
13 |
19 |
20 | );
21 | }
22 |
--------------------------------------------------------------------------------
/examples/astro/src/components/pages/tool-calling/index.tsx:
--------------------------------------------------------------------------------
1 | import GoHome from '../../ui/go-home';
2 | import PipeRunToolExample from '../../pipe-run-with-tool';
3 |
4 | export const ToolCalling = () => {
5 | return (
6 |
7 |
8 |
9 | ⌘ Langbase: Composable Pipe Run
10 |
11 |
12 |
13 | Run a pipe that can call another pipe.
14 |
15 |
16 |
17 |
18 | );
19 | };
20 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/ts-format-code.ts:
--------------------------------------------------------------------------------
1 | import * as prettier from 'prettier';
2 |
3 | /**
4 | * Formats the given TypeScript code string using Prettier with specified options.
5 | *
6 | * @param code - The TypeScript code to format.
7 | * @returns A promise that resolves to the formatted code string.
8 | */
9 | export async function formatCode(code: string) {
10 | return await prettier.format(code, {
11 | parser: 'typescript',
12 | singleQuote: true,
13 | trailingComma: 'none',
14 | arrowParens: 'avoid',
15 | printWidth: 80,
16 | useTabs: true,
17 | semi: true,
18 | tabWidth: 4
19 | });
20 | }
21 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconStoreMessages.tsx:
--------------------------------------------------------------------------------
1 | export function IconStoreMessages(
2 | props: JSX.IntrinsicElements['svg']
3 | ) {
4 | return (
5 |
13 |
14 |
15 |
21 |
22 | );
23 | }
24 |
--------------------------------------------------------------------------------
/examples/nextjs/app/demo/tool-calling-stream/page.tsx:
--------------------------------------------------------------------------------
1 | import PipeRunToolStreamExample from '@/components/pipe-run-with-tool-stream';
2 | import GoHome from '@/components/ui/go-home';
3 |
4 | export default function Page() {
5 | return (
6 |
7 |
8 |
9 |
10 | AI Agent Pipes: Tool Calling
11 |
12 |
13 | Run a pipe with tool calling.
14 |
15 |
16 |
17 | );
18 | }
19 |
--------------------------------------------------------------------------------
/apps/baseai.dev/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 |
22 | # debug
23 | npm-debug.log*
24 | yarn-debug.log*
25 | yarn-error.log*
26 |
27 | # local env files
28 | .env*.local
29 |
30 | # vercel
31 | .vercel
32 |
33 | # typescript
34 | *.tsbuildinfo
35 | next-env.d.ts
36 |
37 | # content
38 | content/docs/docs.json
39 | content/learn/learn.json
40 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/CartIcon.tsx:
--------------------------------------------------------------------------------
1 | export function CartIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
8 |
13 |
14 | );
15 | }
16 |
--------------------------------------------------------------------------------
/examples/nextjs/app/demo/pipe-run-pipes-as-tools/page.tsx:
--------------------------------------------------------------------------------
1 | import PipeRunPipesAsTools from '@/components/pipe-run-pipes-as-tools';
2 | import GoHome from '@/components/ui/go-home';
3 |
4 | export default function Page() {
5 | return (
6 |
7 |
8 |
9 |
10 | ⌘ Langbase: Composable Pipe Run
11 |
12 |
13 | Run a pipe that can call another pipe.
14 |
15 |
16 |
17 | );
18 | }
19 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/cohere/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import CohereAPIConfig from './api';
3 | import {
4 | CohereChatCompleteConfig,
5 | CohereChatCompleteResponseTransform,
6 | CohereChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const CohereConfig: ProviderConfigs = {
10 | chatComplete: CohereChatCompleteConfig,
11 | api: CohereAPIConfig,
12 | responseTransforms: {
13 | chatComplete: CohereChatCompleteResponseTransform,
14 | 'stream-chatComplete': CohereChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default CohereConfig;
19 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/google/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import GoogleApiConfig from './api';
3 | import {
4 | GoogleChatCompleteConfig,
5 | GoogleChatCompleteResponseTransform,
6 | GoogleChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const GoogleConfig: ProviderConfigs = {
10 | api: GoogleApiConfig,
11 | chatComplete: GoogleChatCompleteConfig,
12 | responseTransforms: {
13 | chatComplete: GoogleChatCompleteResponseTransform,
14 | 'stream-chatComplete': GoogleChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default GoogleConfig;
19 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/ollama/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import OllamaAPIConfig from './api';
3 | import {
4 | OllamaChatCompleteConfig,
5 | OllamaChatCompleteResponseTransform,
6 | OllamaChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const OllamaConfig: ProviderConfigs = {
10 | api: OllamaAPIConfig,
11 | chatComplete: OllamaChatCompleteConfig,
12 | responseTransforms: {
13 | chatComplete: OllamaChatCompleteResponseTransform,
14 | 'stream-chatComplete': OllamaChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default OllamaConfig;
19 |
--------------------------------------------------------------------------------
/examples/agents/readme-writer-agent/utils/generate-embeddings.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 | import {execAsync} from './exec-sync';
3 | import {handleError} from './handle-error';
4 |
5 | export async function generateEmbeddings({dirName}: {dirName: string}) {
6 | const spinner = p.spinner();
7 | spinner.start('Understanding your project codebase...');
8 |
9 | try {
10 | await execAsync(`npx baseai@latest embed -m code-files`, {
11 | cwd: dirName,
12 | });
13 |
14 | spinner.stop('Developed understanding of your project codebase.');
15 | } catch (error) {
16 | handleError({spinner, error});
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/examples/astro/baseai/tools/weather.ts:
--------------------------------------------------------------------------------
1 | const getWeather = ({location}: {location: string}) => {
2 | return `Weather of ${location} is 16`;
3 | };
4 |
5 | const toolGetWeather = () => ({
6 | run: getWeather,
7 | type: 'function',
8 | function: {
9 | name: 'getWeather',
10 | description: 'Get the current weather of a given location',
11 | parameters: {
12 | type: 'object',
13 | required: ['location'],
14 | properties: {
15 | location: {
16 | type: 'string',
17 | description: 'The city and state, e.g. San Francisco, CA',
18 | },
19 | },
20 | },
21 | },
22 | });
23 | export default toolGetWeather;
24 |
--------------------------------------------------------------------------------
/examples/astro/src/components/pages/chat-advanced/index.tsx:
--------------------------------------------------------------------------------
1 | import GoHome from '../../ui/go-home';
2 | import ChatAdvancedExample from '../../chat-advanced';
3 |
4 | export const ChatAdvanced = () => {
5 | return (
6 |
7 |
8 |
9 |
10 | `usePipe()`: Chat Advanced
11 |
12 |
13 |
14 | A kitchen sink example with all `usePipe()` chat features
15 |
16 |
17 |
18 |
19 | );
20 | };
21 |
--------------------------------------------------------------------------------
/examples/astro/src/pages/api/langbase/pipes/run.ts:
--------------------------------------------------------------------------------
1 | import {mapMetaEnvToProcessEnv} from './../../../../lib/utils';
2 | import getPipeSummary from '../../../../../baseai/pipes/summary';
3 | import {Pipe} from '@baseai/core';
4 | import type {APIRoute} from 'astro';
5 |
6 | export const POST: APIRoute = async ({request}) => {
7 | const runOptions = await request.json();
8 |
9 | // 1. Initiate the Pipe.
10 | const pipe = new Pipe(getPipeSummary());
11 |
12 | // 2. Run the pipe
13 | const result = await pipe.run(runOptions);
14 |
15 | // 3. Return the response stringified.
16 | return new Response(JSON.stringify(result));
17 | };
18 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/api.langbase.pipes.run.ts:
--------------------------------------------------------------------------------
1 | import type {ActionFunction} from '@remix-run/node';
2 | import getPipeSummary from '~/../baseai/pipes/summary';
3 | import {Pipe} from '@baseai/core';
4 |
5 | export const action: ActionFunction = async ({request}) => {
6 | const runOptions = await request.json();
7 |
8 | // 1. Initiate the Pipe.
9 | // const pipe = new Pipe(getPipeTinyLlama());
10 | const pipe = new Pipe(getPipeSummary());
11 |
12 | // 2. Run the pipe
13 | const result = await pipe.run(runOptions);
14 |
15 | // 3. Return the response stringified.
16 | return new Response(JSON.stringify(result));
17 | };
18 |
--------------------------------------------------------------------------------
/examples/remix/baseai/tools/weather.ts:
--------------------------------------------------------------------------------
1 | const getWeather = ({location}: {location: string}) => {
2 | return `Weather of ${location} is 16`;
3 | };
4 |
5 | const toolGetWeather = () => ({
6 | run: getWeather,
7 | type: 'function',
8 | function: {
9 | name: 'getWeather',
10 | description: 'Get the current weather of a given location',
11 | parameters: {
12 | type: 'object',
13 | required: ['location'],
14 | properties: {
15 | location: {
16 | type: 'string',
17 | description: 'The city and state, e.g. San Francisco, CA',
18 | },
19 | },
20 | },
21 | },
22 | });
23 | export default toolGetWeather;
24 |
--------------------------------------------------------------------------------
/apps/baseai.dev/content/docs/memory/list.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: 'List'
3 | description: "List all memory in your current project directory."
4 | tags:
5 | - baseai
6 | - memory
7 | - langbase
8 | - list
9 | section: 'Memory'
10 | published: 2024-09-24
11 | modified: 2024-09-24
12 | ---
13 |
14 | # List all memory
15 |
16 | List all memory in your current project directory.
17 |
18 | ---
19 |
20 | Use the `memory list` command to list all memory in your current project directory.
21 |
22 | ```bash
23 | npx baseai@latest memory --list
24 | ```
25 |
26 | It will list all memory in the current project directory.
27 |
28 | ---
29 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/ChatBubbleIcon.tsx:
--------------------------------------------------------------------------------
1 | export function ChatBubbleIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
9 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/examples/astro/src/components/pages/pipe-run-pipes-as-tools/index.tsx:
--------------------------------------------------------------------------------
1 | import GoHome from '../../ui/go-home';
2 | import PipeRunToolExample from '../../pipe-run-pipes-as-tools';
3 |
4 | export const PipeRunPipesAsTools = () => {
5 | return (
6 |
7 |
8 |
9 | ⌘ Langbase: Composable Pipe Run
10 |
11 |
12 |
13 | Run a pipe that can call another pipe.
14 |
15 |
16 |
17 |
18 | );
19 | };
20 |
--------------------------------------------------------------------------------
/examples/nextjs/app/api/langbase/pipes/run-memory/route.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import {NextRequest} from 'next/server';
3 | import pipeWithMemory from '../../../../../baseai/pipes/pipe-with-memory';
4 |
5 | export async function POST(req: NextRequest) {
6 | const runOptions = await req.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(pipeWithMemory());
10 |
11 | // 2. Run the pipe with user messages and other run options.
12 | const {stream} = await pipe.run(runOptions);
13 |
14 | // 3. Return the ReadableStream directly.
15 | return new Response(stream, {
16 | status: 200,
17 | });
18 | }
19 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/tools/weather.ts:
--------------------------------------------------------------------------------
1 | const getWeather = ({location}: {location: string}) => {
2 | return `Weather of ${location} is 16`;
3 | };
4 |
5 | const toolGetWeather = () => ({
6 | run: getWeather,
7 | type: 'function',
8 | function: {
9 | name: 'getWeather',
10 | description: 'Get the current weather of a given location',
11 | parameters: {
12 | type: 'object',
13 | required: ['location'],
14 | properties: {
15 | location: {
16 | type: 'string',
17 | description: 'The city and state, e.g. San Francisco, CA',
18 | },
19 | },
20 | },
21 | },
22 | });
23 | export default toolGetWeather;
24 |
--------------------------------------------------------------------------------
/examples/nextjs/public/vercel.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/api.langbase.pipes.run-tool.ts:
--------------------------------------------------------------------------------
1 | import type {ActionFunction} from '@remix-run/node';
2 | import getPipeWithTool from '~/../baseai/pipes/pipe-with-tool';
3 | import {Pipe} from '@baseai/core';
4 |
5 | export const action: ActionFunction = async ({request}) => {
6 | const runOptions = await request.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(getPipeWithTool());
10 |
11 | // 2. Run the pipe with user messages and other run options.
12 | const result = await pipe.run(runOptions);
13 |
14 | // 2. Return the response stringified.
15 | return new Response(JSON.stringify(result));
16 | };
17 |
--------------------------------------------------------------------------------
/examples/astro/src/pages/api/langbase/pipes/run-tool.ts:
--------------------------------------------------------------------------------
1 | import {mapMetaEnvToProcessEnv} from './../../../../lib/utils';
2 | import getPipeWithTool from '../../../../../baseai/pipes/summary';
3 | import {Pipe} from '@baseai/core';
4 | import type {APIRoute} from 'astro';
5 |
6 | export const POST: APIRoute = async ({request}) => {
7 | const runOptions = await request.json();
8 |
9 | // 1. Initiate the Pipe.
10 | const pipe = new Pipe(getPipeWithTool());
11 |
12 | // 2. Run the pipe
13 | const result = await pipe.run(runOptions);
14 |
15 | // 3. Return the response stringified.
16 | return new Response(JSON.stringify(result));
17 | };
18 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/fireworks-ai/api.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderAPIConfig } from 'types/providers';
2 | const FireworksAIAPIConfig: ProviderAPIConfig = {
3 | baseURL: 'https://api.fireworks.ai/inference/v1',
4 | headers: ({ llmApiKey }: { llmApiKey: string }) => {
5 | return {
6 | Authorization: `Bearer ${llmApiKey}`,
7 | Accept: 'application/json'
8 | };
9 | },
10 | getEndpoint: ({ endpoint }: { endpoint: string }) => {
11 | switch (endpoint) {
12 | case 'chatComplete':
13 | return '/chat/completions';
14 | default:
15 | return '';
16 | }
17 | }
18 | };
19 |
20 | export default FireworksAIAPIConfig;
21 |
--------------------------------------------------------------------------------
/tools/tsconfig/base.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://json.schemastore.org/tsconfig",
3 | "display": "Default",
4 | "compilerOptions": {
5 | "strict": true,
6 | "composite": false,
7 | "declaration": true,
8 | "skipLibCheck": true,
9 | "inlineSources": false,
10 | "declarationMap": true,
11 | "esModuleInterop": true,
12 | "noUnusedLocals": false,
13 | "isolatedModules": true,
14 | "moduleResolution": "node",
15 | "noUnusedParameters": false,
16 | "preserveWatchOutput": true,
17 | "forceConsistentCasingInFileNames": true,
18 | "types": ["@types/node", "vitest/globals"]
19 | },
20 | "exclude": ["node_modules"]
21 | }
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconDoc.tsx:
--------------------------------------------------------------------------------
1 | export function IconDoc(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/ShapesIcon.tsx:
--------------------------------------------------------------------------------
1 | export function ShapesIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/examples/nodejs/baseai/memory/ai-agent-memory/index.ts:
--------------------------------------------------------------------------------
1 | import {MemoryI} from '@baseai/core';
2 |
3 | const memoryAiAgentMemory = (): MemoryI => ({
4 | name: 'ai-agent-memory',
5 | description: 'My list of docs as memory for an AI agent pipe',
6 | git: {
7 | enabled: true,
8 | include: ['**/*'],
9 | gitignore: true,
10 | deployedAt: '',
11 | embeddedAt: '',
12 | },
13 | documents: {
14 | meta: doc => {
15 | // generate a URL for each document
16 | const url = `https://example.com/${doc.path}`;
17 | return {
18 | url,
19 | name: doc.name,
20 | };
21 | },
22 | },
23 | });
24 |
25 | export default memoryAiAgentMemory;
26 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconJsonMode.tsx:
--------------------------------------------------------------------------------
1 | export function IconJsonMode(
2 | props: JSX.IntrinsicElements['svg']
3 | ) {
4 | return (
5 |
13 |
19 |
20 | );
21 | }
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconUsage.tsx:
--------------------------------------------------------------------------------
1 | export function IconUsage(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/memory/get-available-memories.ts:
--------------------------------------------------------------------------------
1 | import path from 'path';
2 | import fs from 'fs';
3 |
4 | export async function getAvailableMemories() {
5 | try {
6 | // Construct the path containing all memories folders.
7 | const memoryPath = path.join(process.cwd(), 'baseai', 'memory');
8 |
9 | // Check if the baseai directory exists.
10 | if (!fs.existsSync(memoryPath)) return [];
11 |
12 | // Get all directories names in the memory path.
13 | const memoryNames = await fs.promises.readdir(memoryPath);
14 |
15 | // Return the memory names.
16 | return memoryNames;
17 | } catch (error: any) {
18 | return [];
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/ui/iconists/icon-usage.tsx:
--------------------------------------------------------------------------------
1 | export function IconUsage(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/examples/nextjs/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["dom", "dom.iterable", "esnext"],
4 | "allowJs": true,
5 | "skipLibCheck": true,
6 | "strict": true,
7 | "noEmit": true,
8 | "esModuleInterop": true,
9 | "module": "esnext",
10 | "moduleResolution": "bundler",
11 | "resolveJsonModule": true,
12 | "isolatedModules": true,
13 | "jsx": "preserve",
14 | "incremental": true,
15 | "plugins": [
16 | {
17 | "name": "next"
18 | }
19 | ],
20 | "paths": {
21 | "@/*": ["./*"]
22 | }
23 | },
24 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
25 | "exclude": ["node_modules"]
26 | }
27 |
--------------------------------------------------------------------------------
/examples/remix/app/entry.client.tsx:
--------------------------------------------------------------------------------
1 | /**
2 | * By default, Remix will handle hydrating your app on the client for you.
3 | * You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
4 | * For more information, see https://remix.run/file-conventions/entry.client
5 | */
6 |
7 | import { RemixBrowser } from "@remix-run/react";
8 | import { startTransition, StrictMode } from "react";
9 | import { hydrateRoot } from "react-dom/client";
10 |
11 | startTransition(() => {
12 | hydrateRoot(
13 | document,
14 |
15 |
16 |
17 | );
18 | });
19 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/anthropic/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import AnthropicAPIConfig from './api';
3 | import {
4 | AnthropicChatCompleteConfig,
5 | AnthropicChatCompleteResponseTransform,
6 | AnthropicChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const AnthropicConfig: ProviderConfigs = {
10 | chatComplete: AnthropicChatCompleteConfig,
11 | api: AnthropicAPIConfig,
12 | responseTransforms: {
13 | chatComplete: AnthropicChatCompleteResponseTransform,
14 | 'stream-chatComplete': AnthropicChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default AnthropicConfig;
19 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/mistral-ai/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import MistralAIAPIConfig from './api';
3 | import {
4 | MistralAIChatCompleteConfig,
5 | MistralAIChatCompleteResponseTransform,
6 | MistralAIChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const MistralAIConfig: ProviderConfigs = {
10 | chatComplete: MistralAIChatCompleteConfig,
11 | api: MistralAIAPIConfig,
12 | responseTransforms: {
13 | chatComplete: MistralAIChatCompleteResponseTransform,
14 | 'stream-chatComplete': MistralAIChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default MistralAIConfig;
19 |
--------------------------------------------------------------------------------
/examples/nextjs/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 | .yarn/install-state.gz
8 |
9 | # testing
10 | /coverage
11 |
12 | # next.js
13 | /.next/
14 | /out/
15 |
16 | # production
17 | /build
18 |
19 | # misc
20 | .DS_Store
21 | *.pem
22 |
23 | # debug
24 | npm-debug.log*
25 | yarn-debug.log*
26 | yarn-error.log*
27 |
28 | # local env files
29 | .env*.local
30 |
31 | # vercel
32 | .vercel
33 |
34 | # typescript
35 | *.tsbuildinfo
36 | next-env.d.ts
37 |
38 | # baseai
39 | /.baseai/
40 | # baseai
41 | **/.baseai/
42 | # env file
43 | .env
44 |
--------------------------------------------------------------------------------
/examples/astro/src/pages/api/langbase/pipes/run-pipes-as-tools.ts:
--------------------------------------------------------------------------------
1 | import {mapMetaEnvToProcessEnv} from './../../../../lib/utils';
2 | import getPipeWithPipesAsTools from '../../../../../baseai/pipes/pipe-with-pipes-as-tools';
3 | import {Pipe} from '@baseai/core';
4 | import type {APIRoute} from 'astro';
5 |
6 | export const POST: APIRoute = async ({request}) => {
7 | const runOptions = await request.json();
8 |
9 | // 1. Initiate the Pipe.
10 | const pipe = new Pipe(getPipeWithPipesAsTools());
11 |
12 | // 2. Run the pipe
13 | const result = await pipe.run(runOptions);
14 |
15 | // 3. Return the response stringified.
16 | return new Response(JSON.stringify(result));
17 | };
18 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/api.langbase.pipes.run-pipes-as-tools.ts:
--------------------------------------------------------------------------------
1 | import type {ActionFunction} from '@remix-run/node';
2 | import getPipeWithPipesAsTools from '~/../baseai/pipes/pipe-with-pipes-as-tools';
3 | import {Pipe} from '@baseai/core';
4 |
5 | export const action: ActionFunction = async ({request}) => {
6 | const runOptions = await request.json();
7 |
8 | // 1. Initiate the Pipe.
9 | // const pipe = new Pipe(getPipeTinyLlama());
10 | const pipe = new Pipe(getPipeWithPipesAsTools());
11 |
12 | // 2. Run the pipe
13 | const result = await pipe.run(runOptions);
14 |
15 | // 3. Return the response stringified.
16 | return new Response(JSON.stringify(result));
17 | };
18 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/api.langbase.pipes.run-stream.ts:
--------------------------------------------------------------------------------
1 | import getPipeSummary from '~/../baseai/pipes/summary';
2 | import {Pipe} from '@baseai/core';
3 | import {ActionFunction} from '@remix-run/node';
4 |
5 | export const action: ActionFunction = async ({request}) => {
6 | const runOptions = await request.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(getPipeSummary());
10 |
11 | // 2. Run the Pipe.
12 | const {stream, threadId} = await pipe.run(runOptions);
13 |
14 | // 3. Return the ReadableStream directly.
15 | return new Response(stream, {
16 | status: 200,
17 | headers: {
18 | 'lb-thread-id': threadId ?? '',
19 | },
20 | });
21 | };
22 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/fireworks-ai/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import FireworksAIAPIConfig from './api';
3 | import {
4 | FireworksAIChatCompleteConfig,
5 | FireworksAIChatCompleteResponseTransform,
6 | FireworksAIChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const FireworksAIConfig: ProviderConfigs = {
10 | chatComplete: FireworksAIChatCompleteConfig,
11 | api: FireworksAIAPIConfig,
12 | responseTransforms: {
13 | chatComplete: FireworksAIChatCompleteResponseTransform,
14 | 'stream-chatComplete': FireworksAIChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default FireworksAIConfig;
19 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/get-available-tools.ts:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import path from 'path';
3 |
4 | export async function getAvailableTools() {
5 | // Construct the path containing all tools folders.
6 | const toolsPath = path.join(process.cwd(), 'baseai', 'tools');
7 |
8 | // Check if the baseai directory exists.
9 | if (!fs.existsSync(toolsPath)) return [];
10 |
11 | // Get all directories names in the tool path.
12 | const toolsNames = await fs.promises.readdir(toolsPath);
13 |
14 | // Make complete paths for each tool.
15 | const toolsPaths = toolsNames.map(toolName => toolName.replace('.ts', ''));
16 |
17 | // Return the tool names.
18 | return toolsPaths;
19 | }
20 |
--------------------------------------------------------------------------------
/packages/core/src/utils/is-prod.ts:
--------------------------------------------------------------------------------
1 | const FORCE_PROD = false;
2 | const TEST_PROD_LOCALLY = FORCE_PROD;
3 |
4 | export function isProd() {
5 | if (TEST_PROD_LOCALLY) return true;
6 | return process.env.NODE_ENV === 'production';
7 | }
8 |
9 | export function isLocal() {
10 | return process.env.NODE_ENV !== 'production';
11 | }
12 |
13 | export function getApiUrl(prod?: boolean) {
14 | if (prod) return 'https://api.langbase.com';
15 | else return 'http://localhost:9000';
16 |
17 | // TODO: Make local port configurable.
18 | // return isProd() ? 'https://api.langbase.com' : 'http://localhost:9000';
19 | // return isProd() ? 'http://localhost:8787' : 'http://localhost:9000';
20 | }
21 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/anthropic/api.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderAPIConfig } from 'types/providers';
2 |
3 | const AnthropicAPIConfig: ProviderAPIConfig = {
4 | baseURL: 'https://api.anthropic.com/v1',
5 | headers: ({
6 | llmApiKey,
7 | endpoint
8 | }: {
9 | llmApiKey: string;
10 | endpoint: string;
11 | }) => {
12 | const headers: Record = {
13 | 'X-API-Key': `${llmApiKey}`,
14 | 'anthropic-version': '2023-06-01'
15 | };
16 | if (endpoint === 'chatComplete') {
17 | headers['anthropic-beta'] = 'messages-2023-12-15';
18 | }
19 | return headers;
20 | },
21 | chatComplete: '/messages'
22 | };
23 |
24 | export default AnthropicAPIConfig;
25 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/perplexity/index.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfigs } from 'types/providers';
2 | import PerplexityAIApiConfig from './api';
3 | import {
4 | PerplexityAIChatCompleteConfig,
5 | PerplexityAIChatCompleteResponseTransform,
6 | PerplexityAIChatCompleteStreamChunkTransform
7 | } from './chatComplete';
8 |
9 | const PerplexityAIConfig: ProviderConfigs = {
10 | chatComplete: PerplexityAIChatCompleteConfig,
11 | api: PerplexityAIApiConfig,
12 | responseTransforms: {
13 | chatComplete: PerplexityAIChatCompleteResponseTransform,
14 | 'stream-chatComplete': PerplexityAIChatCompleteStreamChunkTransform
15 | }
16 | };
17 |
18 | export default PerplexityAIConfig;
19 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/SquaresPlusIcon.tsx:
--------------------------------------------------------------------------------
1 | export function SquaresPlusIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
9 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/examples/agents/readme-writer-agent/utils/questions.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 |
3 | export async function questions() {
4 | const readme = await p.group(
5 | {
6 | level: () =>
7 | p.select({
8 | message:
9 | 'Choose the level of detail you want in the README.',
10 | options: [
11 | {label: 'Simple', value: 'simple' as unknown as any},
12 | {
13 | label: 'Detailed',
14 | value: 'detailed' as unknown as any,
15 | },
16 | ],
17 | }),
18 | },
19 | {
20 | onCancel: () => {
21 | p.cancel('Operation cancelled.');
22 | process.exit(0);
23 | },
24 | },
25 | );
26 |
27 | return {level: readme.level};
28 | }
29 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/hono/validate.ts:
--------------------------------------------------------------------------------
1 | import {ApiErrorZod, ErrorCodesType} from './errors';
2 |
3 | async function validate({
4 | schema,
5 | input,
6 | code = `BAD_REQUEST`,
7 | message,
8 | }: {
9 | schema: any;
10 | input: any;
11 | code?: ErrorCodesType;
12 | message?: string;
13 | }) {
14 | const result = await schema.safeParseAsync(input);
15 | ApiErrorZod.handle({code, result, message});
16 |
17 | // Since ApiError.handle throws, TypeScript may not infer that execution ends above
18 | // and result.data is now avaiable.
19 | // @ts-ignore - TS doesn't know that the function ends above.
20 | return result.data; // Return the validated data
21 | }
22 |
23 | export default validate;
24 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/middleware/debug-base.ts:
--------------------------------------------------------------------------------
1 | import type { MiddlewareHandler } from 'hono';
2 | import { dlog } from '../utils/dlog';
3 |
4 | export const debugBase = (): MiddlewareHandler => {
5 | return async function debugUrl(c, next) {
6 | const url = new URL(c.req.url);
7 | const path = url.pathname;
8 |
9 | dlog('API HIT', {
10 | url: c.req.url,
11 | path: path,
12 | method: c.req.method
13 | });
14 |
15 | if (c.req.method.toUpperCase() === 'POST') {
16 | try {
17 | const req = await c.req.json();
18 | dlog('REQUEST BODY', req);
19 | } catch (error) {
20 | dlog('ERROR PARSING REQUEST BODY', error);
21 | }
22 | }
23 |
24 | await next();
25 | };
26 | };
27 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/get-available-pipes.ts:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import path from 'path';
3 |
4 | export async function getAvailablePipes() {
5 | // Construct the path containing all pipes folders.
6 | const pipesPath = path.join(process.cwd(), 'baseai', 'pipes');
7 |
8 | // Check if the baseai directory exists.
9 | if (!fs.existsSync(pipesPath)) return [];
10 |
11 | // Get all directories names in the pipe path.
12 | const pipeNames = await fs.promises.readdir(pipesPath);
13 |
14 | // Make complete paths for each pipe.
15 | const slugifiedPipes = pipeNames.map(pipeName =>
16 | pipeName.replace('.ts', '')
17 | );
18 |
19 | // Return the pipe names.
20 | return slugifiedPipes;
21 | }
22 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/is-tool-present.ts:
--------------------------------------------------------------------------------
1 | import slugify from '@sindresorhus/slugify';
2 |
3 | /**
4 | * Checks if a tool with the given name is present in the list of all tools.
5 | *
6 | * @param {Object} params - The parameters for the function.
7 | * @param {string} params.name - The name of the tool to check for.
8 | * @param {string[]} params.allTools - The list of all tools.
9 | * @returns {boolean} - Returns `true` if the tool is present, otherwise `false`.
10 | */
11 | export function isToolPresent({
12 | name,
13 | allTools
14 | }: {
15 | name: string;
16 | allTools: string[];
17 | }) {
18 | return allTools.some(tool => {
19 | return slugify(tool) === slugify(name);
20 | });
21 | }
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/MapPinIcon.tsx:
--------------------------------------------------------------------------------
1 | export function MapPinIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
16 |
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/api.langbase.pipes.run-memory.ts:
--------------------------------------------------------------------------------
1 | import type {ActionFunction} from '@remix-run/node';
2 | import getPipeWithMemory from '~/../baseai/pipes/pipe-with-memory';
3 | import {Pipe} from '@baseai/core';
4 |
5 | export const action: ActionFunction = async ({request}) => {
6 | const runOptions = await request.json();
7 | console.log('runOptions:', runOptions);
8 |
9 | // 1. Initiate the Pipe.
10 | const pipe = new Pipe(getPipeWithMemory());
11 |
12 | // 2. Run the pipe with user messages and other run options.
13 | const {stream} = await pipe.run(runOptions);
14 |
15 | // 3. Return the ReadableStream directly.
16 | return new Response(stream, {
17 | status: 200,
18 | });
19 | };
20 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/CalendarIcon.tsx:
--------------------------------------------------------------------------------
1 | export function CalendarIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
15 |
21 |
22 | );
23 | }
24 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/ClipboardIcon.tsx:
--------------------------------------------------------------------------------
1 | export function ClipboardIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/examples/agents/readme-writer-agent/utils/exit-server.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 | import {spawn} from 'child_process';
3 |
4 | export async function exitServer() {
5 | const spinner = p.spinner();
6 | spinner.start('Stopping AI server...');
7 | // Spawn the server process detached from the parent
8 | const serverProcess = spawn('npx', ['kill-port', '9000'], {
9 | // Detach the process so it runs independently
10 | detached: true,
11 | // Pipe stdout/stderr to files or ignore them
12 | stdio: 'ignore',
13 | shell: process.platform === 'win32',
14 | });
15 |
16 | // Unref the process so it won't keep the parent alive
17 | serverProcess.unref();
18 | spinner.stop('AI server stopped.');
19 | }
20 |
--------------------------------------------------------------------------------
/examples/astro/src/layouts/Layout.astro:
--------------------------------------------------------------------------------
1 | ---
2 | import './globals.css';
3 |
4 | interface Props {
5 | title: string;
6 | }
7 |
8 | const {title} = Astro.props;
9 | ---
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 | {title}
19 |
20 |
21 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/tools/eslint-config/library.js:
--------------------------------------------------------------------------------
1 | const {resolve} = require('node:path');
2 |
3 | const project = resolve(process.cwd(), 'tsconfig.json');
4 |
5 | /** @type {import("eslint").Linter.Config} */
6 | module.exports = {
7 | extends: ['eslint:recommended', 'prettier', 'turbo'],
8 | plugins: ['only-warn'],
9 | globals: {
10 | React: true,
11 | JSX: true,
12 | },
13 | env: {
14 | node: true,
15 | },
16 | settings: {
17 | 'import/resolver': {
18 | typescript: {
19 | project,
20 | },
21 | },
22 | },
23 | ignorePatterns: [
24 | // Ignore dotfiles
25 | '.*.js',
26 | 'node_modules/',
27 | 'dist/',
28 | ],
29 | overrides: [
30 | {
31 | files: ['*.js?(x)', '*.ts?(x)'],
32 | },
33 | ],
34 | };
35 |
--------------------------------------------------------------------------------
/packages/baseai/src/memory/list.ts:
--------------------------------------------------------------------------------
1 | import { heading } from '@/utils/heading';
2 | import icons from '@/utils/icons';
3 | import { getAvailableMemories } from '@/utils/memory/get-available-memories';
4 | import * as p from '@clack/prompts';
5 |
6 | export async function listMemory() {
7 | const availableMemories = await getAvailableMemories();
8 | if (availableMemories.length === 0) {
9 | p.log.message('No memory available.');
10 | return;
11 | }
12 | p.intro(
13 | heading({
14 | text: 'MEMORY',
15 | sub: 'List of all available memory sets'
16 | })
17 | );
18 | console.log('');
19 | availableMemories.forEach(item => {
20 | console.log(`${icons.memory} ${item}`);
21 | });
22 |
23 | process.exit(0);
24 | }
25 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconApi.tsx:
--------------------------------------------------------------------------------
1 | export function IconApi(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/ui/iconists/icon-api.tsx:
--------------------------------------------------------------------------------
1 | export function IconApi(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/examples/astro/src/pages/api/langbase/pipes/run-stream.ts:
--------------------------------------------------------------------------------
1 | import {mapMetaEnvToProcessEnv} from './../../../../lib/utils';
2 | import getPipeSummary from '../../../../../baseai/pipes/summary';
3 | import {Pipe} from '@baseai/core';
4 | import type {APIRoute} from 'astro';
5 |
6 | export const POST: APIRoute = async ({request}) => {
7 | const runOptions = await request.json();
8 |
9 | // 1. Initiate the Pipe.
10 | const pipe = new Pipe(getPipeSummary());
11 |
12 | // 2. Run the Pipe.
13 | const {stream, threadId} = await pipe.run(runOptions);
14 |
15 | // 3. Return the ReadableStream directly.
16 | return new Response(stream, {
17 | status: 200,
18 | headers: {
19 | 'lb-thread-id': threadId ?? '',
20 | },
21 | });
22 | };
23 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/mdx/spoiler.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Accordion,
3 | AccordionContent,
4 | AccordionItem,
5 | AccordionTrigger
6 | } from '@/components/ui/accordion';
7 |
8 | export function Spoiler({
9 | title,
10 | children
11 | }: {
12 | title: string;
13 | children: React.ReactNode;
14 | }) {
15 | return (
16 |
21 |
22 |
23 | {title}
24 |
25 | {children}
26 |
27 |
28 | );
29 | }
30 |
--------------------------------------------------------------------------------
/examples/astro/src/pages/api/langbase/pipes/run-memory.ts:
--------------------------------------------------------------------------------
1 | import {mapMetaEnvToProcessEnv} from './../../../../lib/utils';
2 | import getPipeWithMemory from '../../../../../baseai/pipes/pipe-with-memory';
3 | import {Pipe} from '@baseai/core';
4 | import type {APIRoute} from 'astro';
5 |
6 | export const POST: APIRoute = async ({request}) => {
7 | const runOptions = await request.json();
8 |
9 | // 1. Initiate the Pipe.
10 | const pipe = new Pipe(getPipeWithMemory());
11 |
12 | // 2. Run the Pipe.
13 | const {stream, threadId} = await pipe.run(runOptions);
14 |
15 | // 3. Return the ReadableStream directly.
16 | return new Response(stream, {
17 | status: 200,
18 | headers: {
19 | 'lb-thread-id': threadId ?? '',
20 | },
21 | });
22 | };
23 |
--------------------------------------------------------------------------------
/tools/eslint-config/next.js:
--------------------------------------------------------------------------------
1 | const {resolve} = require('node:path');
2 |
3 | const project = resolve(process.cwd(), 'tsconfig.json');
4 |
5 | /** @type {import("eslint").Linter.Config} */
6 | module.exports = {
7 | extends: [
8 | 'eslint:recommended',
9 | 'prettier',
10 | require.resolve('@vercel/style-guide/eslint/next'),
11 | 'turbo',
12 | ],
13 | globals: {
14 | React: true,
15 | JSX: true,
16 | },
17 | env: {
18 | node: true,
19 | },
20 | plugins: ['only-warn'],
21 | settings: {
22 | 'import/resolver': {
23 | typescript: {
24 | project,
25 | },
26 | },
27 | },
28 | ignorePatterns: [
29 | // Ignore dotfiles
30 | '.*.js',
31 | 'node_modules/',
32 | ],
33 | overrides: [{files: ['*.js?(x)', '*.ts?(x)']}],
34 | };
35 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/content.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import { MarkdownComponents } from '@/components/mdx';
4 | import { MDXRemoteSerializeResult } from 'next-mdx-remote';
5 | import { Wrapper } from './mdx/Wrapper';
6 | import dynamic from 'next/dynamic';
7 |
8 | const MDXRemote = dynamic(
9 | () => import('next-mdx-remote').then(mod => mod.MDXRemote),
10 | { ssr: false }
11 | );
12 |
13 | export default function Content({
14 | content
15 | }: {
16 | content?: MDXRemoteSerializeResult<
17 | Record,
18 | Record
19 | >;
20 | }) {
21 |
22 |
23 | return (
24 |
25 | {content && (
26 |
27 | )}
28 |
29 | );
30 | }
31 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconKeyset.tsx:
--------------------------------------------------------------------------------
1 | export function IconKeyset(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/scripts/remove-runtime.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 |
3 | function getPath(type) {
4 | return [
5 | `${process.cwd()}/src/app/${type}/[section]/page.tsx`,
6 | `${process.cwd()}/src/app/${type}/[section]/[slug]/page.tsx`
7 | ];
8 | }
9 |
10 | async function main() {
11 | const runTime = `export const runtime = 'edge';\n\n`;
12 |
13 | getPath('docs').forEach(async path => {
14 | const docContent = fs.readFileSync(path, 'utf-8');
15 |
16 | await fs.promises.writeFile(path, docContent.replace(runTime, ''));
17 | });
18 |
19 | getPath('learn').forEach(async path => {
20 | const learnContent = fs.readFileSync(path, 'utf-8');
21 | await fs.promises.writeFile(path, learnContent.replace(runTime, ''));
22 | });
23 | }
24 |
25 | main();
26 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/memory/check-memory-exist.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 | import fs from 'fs/promises';
3 | import path from 'path';
4 |
5 | export const checkMemoryExists = async (memoryName: string) => {
6 | const memoryDir = path.join(process.cwd(), 'baseai', 'memory', memoryName);
7 | const indexFilePath = path.join(memoryDir, 'index.ts');
8 |
9 | try {
10 | await fs.access(memoryDir);
11 | } catch (error) {
12 | p.cancel(`Memory '${memoryName}' does not exist.`);
13 | process.exit(1);
14 | }
15 |
16 | try {
17 | await fs.access(indexFilePath);
18 | } catch (error) {
19 | p.cancel(
20 | `Index file for memory '${memoryName}/index.ts' does not exist.`
21 | );
22 | process.exit(1);
23 | }
24 |
25 | return true;
26 | };
27 |
--------------------------------------------------------------------------------
/examples/nodejs/baseai/pipes/summary.ts:
--------------------------------------------------------------------------------
1 | import {PipeI} from '@baseai/core';
2 |
3 | const buildPipe = (): PipeI => ({
4 | apiKey: process.env.LANGBASE_API_KEY!, // Replace with your API key https://langbase.com/docs/api-reference/api-keys
5 | name: 'summary',
6 | description: '',
7 | status: 'private',
8 | model: 'openai:gpt-4o-mini',
9 | stream: true,
10 | json: false,
11 | store: true,
12 | moderate: true,
13 | top_p: 1,
14 | max_tokens: 1000,
15 | temperature: 0.7,
16 | presence_penalty: 1,
17 | frequency_penalty: 1,
18 | stop: [],
19 | tool_choice: 'auto',
20 | parallel_tool_calls: false,
21 | messages: [{role: 'system', content: `You are a helpful AI assistant.`}],
22 | variables: [],
23 | memory: [],
24 | tools: [],
25 | });
26 |
27 | export default buildPipe;
28 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/FolderIcon.tsx:
--------------------------------------------------------------------------------
1 | export function FolderIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
14 |
20 |
21 | );
22 | }
23 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/Prose.tsx:
--------------------------------------------------------------------------------
1 | import clsx from 'clsx';
2 |
3 | export function Prose({
4 | as,
5 | className,
6 | ...props
7 | }: Omit, 'as' | 'className'> & {
8 | as?: T;
9 | className?: string;
10 | }) {
11 | let Component = as ?? 'div';
12 |
13 | return (
14 | *)` is used to select all direct children without an increase in specificity like you'd get from just `& > *`
19 | '[html_:where(&>*)]:mx-auto [html_:where(&>*)]:max-w-2xl [html_:where(&>*)]:lg:mx-[calc(50%-min(50%,theme(maxWidth.lg)))] [html_:where(&>*)]:lg:max-w-3xl'
20 | )}
21 | {...props}
22 | />
23 | );
24 | }
25 |
--------------------------------------------------------------------------------
/examples/astro/src/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import {type ClassValue, clsx} from 'clsx';
2 | import {twMerge} from 'tailwind-merge';
3 |
4 | export function cn(...inputs: ClassValue[]) {
5 | return twMerge(clsx(inputs));
6 | }
7 |
8 | /**
9 | * Maps environment variables from `import.meta.env` to `process.env`.
10 | *
11 | * This function iterates over all keys in `import.meta.env` and assigns
12 | * each value to the corresponding key in `process.env`. This is useful
13 | * for ensuring that environment variables are accessible in a Node.js
14 | * environment when using tools like Vite or Astro that provide `import.meta.env`.
15 | */
16 | export function mapMetaEnvToProcessEnv() {
17 | Object.keys(import.meta.env).forEach(key => {
18 | process.env[key] = import.meta.env[key];
19 | });
20 | }
21 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/styles/tailwind.css:
--------------------------------------------------------------------------------
1 | @layer base {
2 | :root {
3 | /* --shiki-color-text: theme('colors.white');
4 | --shiki-token-constant: theme('colors.emerald.300');
5 | --shiki-token-string: theme('colors.emerald.300');
6 | --shiki-token-comment: theme('colors.zinc.500');
7 | --shiki-token-keyword: theme('colors.sky.300');
8 | --shiki-token-parameter: theme('colors.pink.300');
9 | --shiki-token-function: theme('colors.violet.300');
10 | --shiki-token-string-expression: theme('colors.emerald.300');
11 | --shiki-token-punctuation: theme('colors.zinc.200'); */
12 | }
13 |
14 | [inert] ::-webkit-scrollbar {
15 | display: none;
16 | }
17 | }
18 |
19 | @tailwind base;
20 | @tailwind components;
21 | @tailwind utilities;
22 | @import './global.css';
23 |
--------------------------------------------------------------------------------
/examples/astro/public/favicon.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
--------------------------------------------------------------------------------
/examples/nextjs/app/api/langbase/pipes/run-tool-stream/route.ts:
--------------------------------------------------------------------------------
1 | import pipeWithToolsStream from '@/baseai/pipes/pipe-with-tool-stream';
2 | import {Pipe, RunResponseStream} from '@baseai/core';
3 | import {NextRequest} from 'next/server';
4 |
5 | export async function POST(req: NextRequest) {
6 | const runOptions = await req.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(pipeWithToolsStream());
10 |
11 | // 2. Run the pipe with user messages and other run options.
12 | let {stream, threadId} = (await pipe.run({
13 | ...runOptions,
14 | stream: true,
15 | })) as unknown as RunResponseStream;
16 |
17 | // 3. Stream the response.
18 | return new Response(stream, {
19 | status: 200,
20 | headers: {
21 | 'lb-thread-id': threadId ?? '',
22 | },
23 | });
24 | }
25 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/pipes/summary.ts:
--------------------------------------------------------------------------------
1 | import {PipeI} from '@baseai/core';
2 |
3 | const pipeSummary = (): PipeI => ({
4 | apiKey: process.env.LANGBASE_API_KEY!,
5 | name: 'summary',
6 | description: 'AI Summary agent',
7 | status: 'private',
8 | model: 'openai:gpt-4o-mini',
9 | stream: true,
10 | json: false,
11 | store: true,
12 | moderate: true,
13 | top_p: 1,
14 | max_tokens: 100,
15 | temperature: 0.7,
16 | presence_penalty: 1,
17 | frequency_penalty: 1,
18 | stop: [],
19 | tool_choice: 'auto',
20 | parallel_tool_calls: false,
21 | messages: [
22 | {
23 | role: 'system',
24 | content: `You are a helpful AI assistant. Make everything Less wordy.`,
25 | },
26 | ],
27 | variables: [],
28 | tools: [],
29 | memory: [],
30 | });
31 |
32 | export default pipeSummary;
33 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.chat-simple.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import ChatSimple from '~/components/chat-simple';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 | export const meta: MetaFunction = () => {
6 | return [
7 | { title: 'Simple Chat Pipe ⌘' },
8 | { name: "description", content: "Chat with the AI agent" },
9 | ];
10 | };
11 |
12 | export default function Page() {
13 | return (
14 |
15 |
16 |
17 |
18 | `usePipe()`: Chat
19 |
20 |
21 | Chat with the AI agent
22 |
23 |
24 |
25 | );
26 | }
27 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/debug-mode.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 | import { heading } from './heading';
3 |
4 | export default function debugMode(cli: any) {
5 | p.intro(
6 | heading({
7 | text: 'DEBUG MODE',
8 | sub: 'logs will be verbose...',
9 | dim: true
10 | })
11 | );
12 | console.log();
13 |
14 | p.intro(
15 | heading({
16 | text: 'cwd',
17 | dim: true
18 | })
19 | );
20 | console.log(process.cwd());
21 | console.log();
22 |
23 | p.intro(
24 | heading({
25 | text: 'cli.flags',
26 | dim: true
27 | })
28 | );
29 | console.log(cli.flags);
30 | console.log();
31 |
32 | p.intro(
33 | heading({
34 | text: 'cli.input',
35 | sub: 'commands',
36 | dim: true
37 | })
38 | );
39 | console.log(cli.input);
40 | console.log();
41 | }
42 |
--------------------------------------------------------------------------------
/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/hooks/use-copy-to-clipboard.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import * as React from 'react';
4 |
5 | export interface useCopyToClipboardProps {
6 | timeout?: number;
7 | }
8 |
9 | export function useCopyToClipboard({
10 | timeout = 2000
11 | }: useCopyToClipboardProps) {
12 | const [isCopied, setIsCopied] = React.useState(false);
13 |
14 | const copyToClipboard = (value: string) => {
15 | if (typeof window === 'undefined' || !navigator.clipboard?.writeText) {
16 | return;
17 | }
18 |
19 | if (!value) {
20 | return;
21 | }
22 |
23 | navigator.clipboard.writeText(value).then(() => {
24 | setIsCopied(true);
25 |
26 | setTimeout(() => {
27 | setIsCopied(false);
28 | }, timeout);
29 | });
30 | };
31 |
32 | return { isCopied, copyToClipboard };
33 | }
34 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.memory.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import PipeRunMemory from '~/components/pipe-run-with-memory';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 | export const meta: MetaFunction = () => {
6 | return [
7 | { title: 'AI Agent with Memory ⌘' },
8 | { name: "description", content: "Run an AI agent with memory" },
9 | ];
10 | };
11 | export default function Page() {
12 | return (
13 |
14 |
15 |
16 |
17 | AI Agent Pipes: Memory
18 |
19 |
20 | Run a pipe with memory.
21 |
22 |
23 |
24 | );
25 | }
26 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/app/not-found.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import { Button } from '@/components/Button';
4 | import { HeroPattern } from '@/components/HeroPattern';
5 | import { useRouter } from 'next/navigation';
6 |
7 | export default function NotFound() {
8 | const router = useRouter();
9 |
10 | return (
11 |
12 |
13 |
14 |
404
15 |
Page not found
16 |
Sorry, we couldn’t find the page you’re looking for.
17 |
router.push('/docs')}
19 | arrow="right"
20 | className="mt-8"
21 | >
22 | Back to docs
23 |
24 |
25 |
26 | );
27 | }
28 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconExperiments.tsx:
--------------------------------------------------------------------------------
1 | export function IconExperiments(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/ui/iconists/icon-chat.tsx:
--------------------------------------------------------------------------------
1 | export function IconChat(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
18 |
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/packages/baseai/types/config.ts:
--------------------------------------------------------------------------------
1 | // Define the specific log categories as a type
2 | export type LogCategories =
3 | | 'pipe'
4 | | 'pipe.completion'
5 | | 'pipe.request'
6 | | 'pipe.response'
7 | | 'tool'
8 | | 'tool.calls'
9 | | 'memory'
10 | | 'memory.similarChunks'
11 | | 'memory.augmentedContext';
12 |
13 | // Define a recursive type for nested categories
14 | type NestedCategories = {
15 | [key in LogCategories]?: boolean | NestedCategories;
16 | };
17 |
18 | // Logger config
19 | export type LoggerConfig = {
20 | isEnabled: boolean;
21 | logSensitiveData: boolean;
22 | } & NestedCategories;
23 |
24 | export interface MemoryConfig {
25 | useLocalEmbeddings: boolean;
26 | }
27 |
28 | export interface BaseAIConfig {
29 | log: LoggerConfig;
30 | memory: MemoryConfig;
31 | envFilePath: string;
32 | }
33 |
--------------------------------------------------------------------------------
/packages/core/types/config.ts:
--------------------------------------------------------------------------------
1 | // Define the specific log categories as a type
2 | export type LogCategories =
3 | | 'pipe'
4 | | 'pipe.completion'
5 | | 'pipe.request'
6 | | 'pipe.response'
7 | | 'tool'
8 | | 'tool.calls'
9 | | 'memory'
10 | | 'memory.similarChunks'
11 | | 'memory.augmentedContext';
12 |
13 | // Define a recursive type for nested categories
14 | type NestedCategories = {
15 | [key in LogCategories]?: boolean | NestedCategories;
16 | };
17 |
18 | // Logger config
19 | export type LoggerConfig = {
20 | isEnabled: boolean;
21 | logSensitiveData: boolean;
22 | } & NestedCategories;
23 |
24 | export interface BaseAIConfig {
25 | log: LoggerConfig;
26 | memory: {
27 | useLocalEmbeddings: boolean;
28 | };
29 | envFilePath: string;
30 | // Other configuration options can be added here
31 | }
32 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/scripts/set-runtime.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 |
3 | function getPath(type) {
4 | return [
5 | `${process.cwd()}/src/app/${type}/[section]/page.tsx`,
6 | `${process.cwd()}/src/app/${type}/[section]/[slug]/page.tsx`
7 | ];
8 | }
9 |
10 | async function main() {
11 | const runTime = `export const runtime = 'edge';\n\n`;
12 |
13 | getPath('docs').forEach(async path => {
14 | const file = fs.readFileSync(path, 'utf-8');
15 |
16 | if (!file.includes(runTime)) {
17 | await fs.promises.writeFile(path, runTime.concat(file));
18 | }
19 | });
20 |
21 | getPath('learn').forEach(async path => {
22 | const file = fs.readFileSync(path, 'utf-8');
23 | if (!file.includes(runTime)) {
24 | await fs.promises.writeFile(path, runTime.concat(file));
25 | }
26 | });
27 | }
28 |
29 | main();
30 |
--------------------------------------------------------------------------------
/examples/astro/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 |
--------------------------------------------------------------------------------
/examples/astro/baseai/tools/calculator.ts:
--------------------------------------------------------------------------------
1 | import * as math from 'mathjs';
2 |
3 | export async function calculator({expression}: {expression: string}) {
4 | return math.evaluate(expression);
5 | }
6 |
7 | const toolCalculator = () => ({
8 | run: calculator,
9 | type: 'function' as const,
10 | function: {
11 | name: 'calculator',
12 | description:
13 | `A tool that can evaluate mathematical expressions. ` +
14 | `Example expressions: ` +
15 | `'5.6 * (5 + 10.5)', '7.86 cm to inch', 'cos(80 deg) ^ 4'.`,
16 | parameters: {
17 | type: 'object',
18 | required: ['expression'],
19 | properties: {
20 | expression: {
21 | type: 'string',
22 | description: 'The mathematical expression to evaluate.',
23 | },
24 | },
25 | },
26 | },
27 | });
28 |
29 | export default toolCalculator;
30 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/tools/calculator.ts:
--------------------------------------------------------------------------------
1 | import * as math from 'mathjs';
2 |
3 | export async function calculator({expression}: {expression: string}) {
4 | return math.evaluate(expression);
5 | }
6 |
7 | const toolCalculator = () => ({
8 | run: calculator,
9 | type: 'function' as const,
10 | function: {
11 | name: 'calculator',
12 | description:
13 | `A tool that can evaluate mathematical expressions. ` +
14 | `Example expressions: ` +
15 | `'5.6 * (5 + 10.5)', '7.86 cm to inch', 'cos(80 deg) ^ 4'.`,
16 | parameters: {
17 | type: 'object',
18 | required: ['expression'],
19 | properties: {
20 | expression: {
21 | type: 'string',
22 | description: 'The mathematical expression to evaluate.',
23 | },
24 | },
25 | },
26 | },
27 | });
28 |
29 | export default toolCalculator;
30 |
--------------------------------------------------------------------------------
/examples/nodejs/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 |
--------------------------------------------------------------------------------
/examples/remix/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.pipe-run.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import PipeRunExample from '~/components/pipe-run';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 |
6 | export const meta: MetaFunction = () => {
7 | return [
8 | { title: 'Pipe ⌘ ' },
9 | { name: "description", content: "Run a pipe" },
10 | ];
11 | };
12 |
13 | export default function Page() {
14 | return (
15 |
16 |
17 |
18 |
19 | ⌘ Langbase AI Agent Pipe: Run
20 |
21 |
22 |
23 | Run a pipe to generate a text completion
24 |
25 |
26 |
27 |
28 | );
29 | }
30 |
--------------------------------------------------------------------------------
/examples/remix/baseai/tools/calculator.ts:
--------------------------------------------------------------------------------
1 | import * as math from 'mathjs';
2 |
3 | export async function calculator({expression}: {expression: string}) {
4 | return math.evaluate(expression);
5 | }
6 |
7 | const toolCalculator = () => ({
8 | run: calculator,
9 | type: 'function' as const,
10 | function: {
11 | name: 'calculator',
12 | description:
13 | `A tool that can evaluate mathematical expressions. ` +
14 | `Example expressions: ` +
15 | `'5.6 * (5 + 10.5)', '7.86 cm to inch', 'cos(80 deg) ^ 4'.`,
16 | parameters: {
17 | type: 'object',
18 | required: ['expression'],
19 | properties: {
20 | expression: {
21 | type: 'string',
22 | description: 'The mathematical expression to evaluate.',
23 | },
24 | },
25 | },
26 | },
27 | });
28 |
29 | export default toolCalculator;
30 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/app/docs/page.tsx:
--------------------------------------------------------------------------------
1 | import Content from '@/components/content';
2 | import { getDocsBySlug } from '@/lib/get-docs-by-slug';
3 | import { ContentT } from '@/types/markdown';
4 |
5 | export default async function DocPage() {
6 | let content: ContentT;
7 |
8 | if (process.env.NODE_ENV === 'production') {
9 | const data = await getDocsBySlug({
10 | section: 'docs',
11 | slug: 'index'
12 | });
13 |
14 | content = data.content;
15 | } else {
16 | // dynamically import the file
17 | const { getContentBySlugOnDev } = await import('@/lib/get-content-by-slug-on-dev');
18 |
19 | const data = await getContentBySlugOnDev({
20 | type: 'docs',
21 | slug: 'index',
22 | section: 'docs',
23 | });
24 |
25 | content = data.content;
26 | }
27 |
28 | return ;
29 | }
30 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/OpenLink.tsx:
--------------------------------------------------------------------------------
1 | export function OpenLink(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
10 |
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/examples/remix/baseai/pipes/summary.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '../../../../packages/core/types/pipes';
2 |
3 | const getPipeSummary = (): Pipe => ({
4 | apiKey: process.env.LANGBASE_USER_API_KEY!,
5 | name: 'summary',
6 | description: 'AI Summary agent',
7 | status: 'private',
8 | model: 'openai:gpt-4o-mini',
9 | stream: true,
10 | json: false,
11 | store: true,
12 | moderate: true,
13 | top_p: 1,
14 | max_tokens: 100,
15 | temperature: 0.7,
16 | presence_penalty: 1,
17 | frequency_penalty: 1,
18 | stop: [],
19 | tool_choice: 'auto',
20 | parallel_tool_calls: false,
21 | messages: [
22 | {
23 | role: 'system',
24 | content: `You are a helpful AI assistant. Make everything Less wordy.`,
25 | },
26 | ],
27 | variables: [],
28 | tools: [],
29 | memory: [],
30 | });
31 |
32 | export default getPipeSummary;
33 |
--------------------------------------------------------------------------------
/packages/baseai/src/dev/providers/groq/chatComplete.ts:
--------------------------------------------------------------------------------
1 | import type { ProviderConfig } from 'types/providers';
2 | export const GroqChatCompleteConfig: ProviderConfig = {
3 | model: {
4 | param: 'model',
5 | required: true,
6 | default: 'mixtral-8x7b-32768'
7 | },
8 | messages: {
9 | param: 'messages',
10 | default: ''
11 | },
12 | max_tokens: {
13 | param: 'max_tokens',
14 | default: 100,
15 | min: 0
16 | },
17 | temperature: {
18 | param: 'temperature',
19 | default: 1,
20 | min: 0,
21 | max: 2
22 | },
23 | top_p: {
24 | param: 'top_p',
25 | default: 1,
26 | min: 0,
27 | max: 1
28 | },
29 | stream: {
30 | param: 'stream',
31 | default: false
32 | },
33 | stop: {
34 | param: 'stop'
35 | },
36 | n: {
37 | param: 'n',
38 | default: 1,
39 | max: 1,
40 | min: 1
41 | }
42 | };
43 |
--------------------------------------------------------------------------------
/.github/.roadmap/playwright.yml:
--------------------------------------------------------------------------------
1 | name: Playwright Tests
2 | on:
3 | push:
4 | branches: [ main, master ]
5 | pull_request:
6 | branches: [ main, master ]
7 | jobs:
8 | test:
9 | timeout-minutes: 60
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 | - uses: actions/setup-node@v4
14 | with:
15 | node-version: lts/*
16 | - name: Install dependencies
17 | run: npm install -g pnpm && pnpm install
18 | - name: Install Playwright Browsers
19 | run: pnpm exec playwright install --with-deps
20 | - name: Run Playwright tests
21 | run: pnpm exec playwright test
22 | - uses: actions/upload-artifact@v4
23 | if: always()
24 | with:
25 | name: playwright-report
26 | path: playwright-report/
27 | retention-days: 30
28 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/app/learn/page.tsx:
--------------------------------------------------------------------------------
1 | import Content from '@/components/content';
2 | import { getLearnBySlug } from '@/lib/get-learn-by-slug';
3 | import { ContentT } from '@/types/markdown';
4 |
5 | export default async function LearnPage() {
6 | let content: ContentT;
7 |
8 | if (process.env.NODE_ENV === 'production') {
9 | const data = await getLearnBySlug({
10 | slug: 'index',
11 | section: 'learn',
12 | });
13 |
14 | content = data.content;
15 | } else {
16 | // dynamically import the file
17 | const { getContentBySlugOnDev } = await import('@/lib/get-content-by-slug-on-dev');
18 |
19 | const data = await getContentBySlugOnDev({
20 | section: 'learn',
21 | slug: 'index',
22 | type: 'learn'
23 | });
24 |
25 | content = data.content;
26 | }
27 |
28 | return ;
29 | }
30 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/icon-up.tsx:
--------------------------------------------------------------------------------
1 | export function IconUp(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
17 |
18 | );
19 | }
20 |
21 | export { IconUp as ChevronUpIcon };
22 |
--------------------------------------------------------------------------------
/examples/nextjs/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 | XAI_API_KEY=
23 |
--------------------------------------------------------------------------------
/examples/agents/it-systems-triage-agent/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/icon-down.tsx:
--------------------------------------------------------------------------------
1 | export function IconDown(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
17 |
18 | );
19 | }
20 |
21 | export { IconDown as ChevronDownIcon };
22 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.tool-calling.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import PipeRunToolExample from '~/components/pipe-run-with-tool';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 | export const meta: MetaFunction = () => {
6 | return [
7 | { title: 'AI Agent Pipe Tool ⌘' },
8 | { name: "description", content: "Run an AI agent with a tool" },
9 | ];
10 | };
11 |
12 |
13 | export default function Page() {
14 | return (
15 |
16 |
17 |
18 |
19 | AI Agent Pipes: Tool Calling
20 |
21 |
22 | Run a pipe with tool calling.
23 |
24 |
25 |
26 | );
27 | }
28 |
--------------------------------------------------------------------------------
/examples/agents/readme-writer-agent/.env.baseai.example:
--------------------------------------------------------------------------------
1 | # !! SERVER SIDE ONLY !!
2 | # Keep all your API keys secret — use only on the server side.
3 |
4 | # TODO: ADD: Both in your production and local env files.
5 | # Langbase API key for your User or Org account.
6 | # How to get this API key https://langbase.com/docs/api-reference/api-keys
7 | LANGBASE_API_KEY=
8 |
9 | # TODO: ADD: LOCAL ONLY. Add only to local env files.
10 | # Following keys are needed for local pipe runs. For providers you are using.
11 | # For Langbase, please add the key to your LLM keysets.
12 | # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13 | OPENAI_API_KEY=
14 | ANTHROPIC_API_KEY=
15 | COHERE_API_KEY=
16 | FIREWORKS_API_KEY=
17 | GOOGLE_API_KEY=
18 | GROQ_API_KEY=
19 | MISTRAL_API_KEY=
20 | PERPLEXITY_API_KEY=
21 | TOGETHER_API_KEY=
22 | XAI_API_KEY=
23 |
--------------------------------------------------------------------------------
/examples/agents/readme-writer-agent/utils/copy-project-files.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 | import path from 'path';
3 | import {execAsync} from './exec-sync';
4 | import {handleError} from './handle-error';
5 |
6 | export async function copyProjectFiles({dirName}: {dirName: string}) {
7 | const spinner = p.spinner();
8 | spinner.start('Copying project files...');
9 |
10 | const source = process.cwd();
11 | const destination = path.join(
12 | dirName,
13 | 'baseai',
14 | 'memory',
15 | 'code-files',
16 | 'documents',
17 | );
18 |
19 | try {
20 | await execAsync(`rm -rf ${destination}`);
21 | await execAsync(`mkdir -p ${destination}`);
22 | await execAsync(`cp -rp ${source}/* ${destination}`);
23 | spinner.stop('Project files copied successfully.');
24 | } catch (error) {
25 | handleError({spinner, error});
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/ui/icons/info-circle.tsx:
--------------------------------------------------------------------------------
1 | export function IconInfoCircle(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
17 |
18 | )
19 | }
20 |
--------------------------------------------------------------------------------
/examples/astro/baseai/pipes/summary.ts:
--------------------------------------------------------------------------------
1 | import type {PipeI} from '@baseai/core';
2 |
3 | const pipeSummary = (): PipeI => ({
4 | // Replace with your API key https://langbase.com/docs/api-reference/api-keys
5 | apiKey: process.env.LANGBASE_API_KEY!,
6 | name: 'summary',
7 | description: '',
8 | status: 'private',
9 | model: 'openai:gpt-4o-mini',
10 | stream: true,
11 | json: false,
12 | store: true,
13 | moderate: true,
14 | top_p: 1,
15 | max_tokens: 1000,
16 | temperature: 0.7,
17 | presence_penalty: 1,
18 | frequency_penalty: 1,
19 | stop: [],
20 | tool_choice: 'auto',
21 | parallel_tool_calls: false,
22 | messages: [
23 | {
24 | role: 'system',
25 | content: `You are a helpful AI assistant. Make everything Less wordy.`,
26 | },
27 | ],
28 | variables: [],
29 | memory: [],
30 | tools: [],
31 | });
32 |
33 | export default pipeSummary;
34 |
--------------------------------------------------------------------------------
/examples/nextjs/app/api/langbase/pipes/run/route.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import {NextRequest} from 'next/server';
3 | import pipeSummary from '../../../../../baseai/pipes/summary';
4 |
5 | export async function POST(req: NextRequest) {
6 | const runOptions = await req.json();
7 |
8 | // 1. Initiate the Pipe.
9 | const pipe = new Pipe(pipeSummary());
10 |
11 | // 2. Run the pipe
12 | try {
13 | const result = await pipe.run(runOptions);
14 |
15 | // 3. Return the response stringified.
16 | return new Response(JSON.stringify(result));
17 | } catch (error: any) {
18 | // 4. Return the error response
19 |
20 | return new Response(
21 | JSON.stringify({
22 | error,
23 | }),
24 | {
25 | status: error.status || 500,
26 | headers: {
27 | 'Content-Type': 'application/json',
28 | },
29 | },
30 | );
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/mdx/Property.tsx:
--------------------------------------------------------------------------------
1 | export function Property({
2 | name,
3 | children,
4 | type
5 | }: {
6 | name: string;
7 | children: React.ReactNode;
8 | type?: string;
9 | }) {
10 | return (
11 |
12 |
13 | Name
14 |
15 | {name}
16 |
17 | {type && (
18 | <>
19 | Type
20 |
21 | {type}
22 |
23 | >
24 | )}
25 | Description
26 |
27 | {children}
28 |
29 |
30 |
31 | );
32 | }
33 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/mdx/sub-property.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Accordion,
3 | AccordionContent,
4 | AccordionItem,
5 | AccordionTrigger
6 | } from '@/components/ui/accordion';
7 | import { Property } from './Property';
8 |
9 | export function Sub({
10 | name,
11 | type,
12 | children
13 | }: {
14 | name: string;
15 | type: string;
16 | children: React.ReactNode;
17 | }) {
18 | return (
19 |
24 |
25 | {name}
26 |
27 |
28 | {children}
29 |
30 |
31 |
32 |
33 | );
34 | }
35 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/tools/pipe-call.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import pipeSummary from '../pipes/summary';
3 |
4 | const runSummaryPipe = async ({prompt}: {prompt: string}) => {
5 | const pipe = new Pipe(pipeSummary());
6 | const result = await pipe.run({
7 | messages: [{role: 'user', content: `${prompt} — please max one line`}],
8 | });
9 |
10 | return result.completion;
11 | };
12 |
13 | const toolPipeCall = () => ({
14 | run: runSummaryPipe,
15 | type: 'function',
16 | function: {
17 | name: 'runSummaryPipe',
18 | description: `Call a pipe that can summarize text.`,
19 | parameters: {
20 | type: 'object',
21 | required: ['prompt'],
22 | properties: {
23 | prompt: {
24 | type: 'string',
25 | description: 'User input to summarize',
26 | },
27 | },
28 | },
29 | },
30 | });
31 |
32 | export default toolPipeCall;
33 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.pipe-run-stream.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import PipeStreamExample from '~/components/pipe-stream';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 | export const meta: MetaFunction = () => {
6 | return [
7 | { title: 'AI Agent Pipe Stream ⌘' },
8 | { name: "description", content: "AI Agent pipe to stream a text completion" },
9 | ];
10 | };
11 |
12 |
13 | export default function Page() {
14 | return (
15 |
16 |
17 |
18 |
19 | ⌘ Langbase AI Agent Pipe: Stream
20 |
21 |
22 | Stream a pipe to stream a text completion
23 |
24 |
25 |
26 | );
27 | }
28 |
--------------------------------------------------------------------------------
/examples/astro/baseai/tools/pipe-call.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import getPipeSummary from '../pipes/summary';
3 |
4 | const runSummaryPipe = async ({prompt}: {prompt: string}) => {
5 | const pipe = new Pipe(getPipeSummary());
6 | const result = await pipe.run({
7 | messages: [{role: 'user', content: `${prompt} — please max one line`}],
8 | });
9 |
10 | return result.completion;
11 | };
12 |
13 | const toolPipeCall = () => ({
14 | run: runSummaryPipe,
15 | type: 'function',
16 | function: {
17 | name: 'runSummaryPipe',
18 | description: `Call a pipe that can summarize text.`,
19 | parameters: {
20 | type: 'object',
21 | required: ['prompt'],
22 | properties: {
23 | prompt: {
24 | type: 'string',
25 | description: 'User input to summarize',
26 | },
27 | },
28 | },
29 | },
30 | });
31 |
32 | export default toolPipeCall;
33 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.chat-advanced.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import ChatAdvanced from '~/components/chat-advanced';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 | export const meta: MetaFunction = () => {
6 | return [
7 | { title: 'Advanced Chat Pipe ⌘' },
8 | { name: "description", content: "A kitchen sink example with all `usePipe()` chat features" },
9 | ];
10 | };
11 |
12 | export default function Page() {
13 | return (
14 |
15 |
16 |
17 |
18 | `usePipe()`: Chat Advanced
19 |
20 |
21 | A kitchen sink example with all `usePipe()` chat features
22 |
23 |
24 |
25 | );
26 | }
27 |
--------------------------------------------------------------------------------
/examples/remix/baseai/tools/pipe-call.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import getPipeSummary from '../pipes/summary';
3 |
4 | const runSummaryPipe = async ({prompt}: {prompt: string}) => {
5 | const pipe = new Pipe(getPipeSummary());
6 | const result = await pipe.run({
7 | messages: [{role: 'user', content: `${prompt} — please max one line`}],
8 | });
9 |
10 | return result.completion;
11 | };
12 |
13 | const toolPipeCall = () => ({
14 | run: runSummaryPipe,
15 | type: 'function',
16 | function: {
17 | name: 'runSummaryPipe',
18 | description: `Call a pipe that can summarize text.`,
19 | parameters: {
20 | type: 'object',
21 | required: ['prompt'],
22 | properties: {
23 | prompt: {
24 | type: 'string',
25 | description: 'User input to summarize',
26 | },
27 | },
28 | },
29 | },
30 | });
31 |
32 | export default toolPipeCall;
33 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/HeroPattern.tsx:
--------------------------------------------------------------------------------
1 | export function HeroPattern() {
2 | return (
3 |
15 | );
16 | }
17 |
--------------------------------------------------------------------------------
/examples/nextjs/components/ui/input.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | import {cn} from '@/lib/utils';
4 |
5 | export interface InputProps extends React.InputHTMLAttributes {}
6 |
7 | const Input = React.forwardRef(({className, type, ...props}, ref) => {
8 | return (
9 |
18 | );
19 | });
20 | Input.displayName = 'Input';
21 |
22 | export {Input};
23 |
--------------------------------------------------------------------------------
/examples/remix/app/routes/demo.pipe-run-pipes-as-tools.tsx:
--------------------------------------------------------------------------------
1 | import { MetaFunction } from '@remix-run/node';
2 | import PipeRunPipesAsTools from '~/components/pipe-run-pipes-as-tools';
3 | import GoHome from '~/components/ui/go-home';
4 |
5 | export const meta: MetaFunction = () => {
6 | return [
7 | { title: 'AI Agent with Pipes as Tools ⌘' },
8 | { name: "description", content: "Run an AI agent with pipes as tools" },
9 | ];
10 | };
11 |
12 | export default function Page() {
13 | return (
14 |
15 |
16 |
17 |
18 | ⌘ Langbase: Composable Pipe Run
19 |
20 |
21 | Run a pipe that can call another pipe.
22 |
23 |
24 |
25 | );
26 | }
27 |
--------------------------------------------------------------------------------
/examples/remix/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": [
3 | "**/*.ts",
4 | "**/*.tsx",
5 | "**/.server/**/*.ts",
6 | "**/.server/**/*.tsx",
7 | "**/.client/**/*.ts",
8 | "**/.client/**/*.tsx"
9 | ],
10 | "compilerOptions": {
11 | "lib": ["DOM", "DOM.Iterable", "ES2022"],
12 | "types": ["@remix-run/node", "vite/client"],
13 | "isolatedModules": true,
14 | "esModuleInterop": true,
15 | "jsx": "react-jsx",
16 | "module": "ESNext",
17 | "moduleResolution": "Bundler",
18 | "resolveJsonModule": true,
19 | "target": "ES2022",
20 | "strict": true,
21 | "allowJs": true,
22 | "skipLibCheck": true,
23 | "forceConsistentCasingInFileNames": true,
24 | "baseUrl": ".",
25 | "paths": {
26 | "~/*": ["./app/*"]
27 | },
28 |
29 | // Vite takes care of building everything, not tsc.
30 | "noEmit": true
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/packages/baseai/src/utils/logger-utils.ts:
--------------------------------------------------------------------------------
1 | import * as p from '@clack/prompts';
2 | import type { LogCategories } from 'types/config';
3 | import Logger from './logger';
4 |
5 | let loggerInstance: Logger | null = null;
6 | let initializationPromise: Promise | null = null;
7 |
8 | export const initLogger = async (): Promise => {
9 | if (!initializationPromise) {
10 | initializationPromise = Logger.initialize();
11 | }
12 | loggerInstance = await initializationPromise;
13 | };
14 |
15 | const getLogger = (): Logger => {
16 | if (!loggerInstance) {
17 | p.cancel('Logger has not been initialized. Call initLogger() first.');
18 | process.exit(1);
19 | }
20 | return loggerInstance;
21 | };
22 |
23 | export const logger = (
24 | category: LogCategories,
25 | value?: unknown,
26 | logHeader?: string
27 | ) => {
28 | getLogger().log(category, value, logHeader);
29 | };
30 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/pipes/pipe-with-tool.ts:
--------------------------------------------------------------------------------
1 | import {PipeI} from '@baseai/core';
2 | import toolCalculator from '../tools/calculator';
3 | import toolGetWeather from '../tools/weather';
4 |
5 | const pipeWithTools = (): PipeI => ({
6 | apiKey: process.env.LANGBASE_API_KEY!,
7 | name: 'pipe-with-tool',
8 | description: 'An AI agent pipe that can call tools',
9 | status: 'public',
10 | model: 'openai:gpt-4o-mini',
11 | stream: false,
12 | json: false,
13 | store: true,
14 | moderate: true,
15 | top_p: 1,
16 | max_tokens: 1000,
17 | temperature: 0.7,
18 | presence_penalty: 1,
19 | frequency_penalty: 1,
20 | stop: [],
21 | tool_choice: 'auto',
22 | parallel_tool_calls: true,
23 | messages: [{role: 'system', content: `You are a helpful AI assistant.`}],
24 | variables: [],
25 | memory: [],
26 | tools: [toolGetWeather(), toolCalculator()],
27 | });
28 | export default pipeWithTools;
29 |
--------------------------------------------------------------------------------
/examples/remix/app/components/ui/input.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | import { cn } from '~/lib/utils';
4 |
5 | export interface InputProps extends React.InputHTMLAttributes { }
6 |
7 | const Input = React.forwardRef(({ className, type, ...props }, ref) => {
8 | return (
9 |
18 | );
19 | });
20 | Input.displayName = 'Input';
21 |
22 | export { Input };
23 |
--------------------------------------------------------------------------------
/examples/agents/readme-writer-agent/utils/start-baseai-server.ts:
--------------------------------------------------------------------------------
1 | import {exec, spawn} from 'child_process';
2 | import * as p from '@clack/prompts';
3 |
4 | export async function startBaseAIDevServer() {
5 | const spinner = p.spinner();
6 | spinner.start('Starting AI server...');
7 | // Spawn the server process detached from the parent
8 | const serverProcess = spawn('npx', ['baseai', 'dev'], {
9 | // Detach the process so it runs independently
10 | detached: true,
11 | // Pipe stdout/stderr to files or ignore them
12 | stdio: 'ignore',
13 | shell: process.platform === 'win32',
14 | });
15 |
16 | // Unref the process so it won't keep the parent alive
17 | serverProcess.unref();
18 |
19 | // Wait a bit for the server to start
20 | return new Promise(resolve => {
21 | setTimeout(() => {
22 | spinner.stop('AI server started.');
23 | resolve(true);
24 | }, 2000);
25 | });
26 | }
27 |
--------------------------------------------------------------------------------
/examples/astro/baseai/pipes/pipe-with-tool.ts:
--------------------------------------------------------------------------------
1 | import type {PipeI} from '@baseai/core';
2 | import toolCalculator from '../tools/calculator';
3 | import toolGetWeather from '../tools/weather';
4 |
5 | const getPipeWithTool = (): PipeI => ({
6 | apiKey: process.env.LANGBASE_API_KEY!,
7 | name: 'pipe-with-tool',
8 | description: 'An AI agent pipe that can call tools',
9 | status: 'public',
10 | model: 'openai:gpt-4o-mini',
11 | stream: true,
12 | json: false,
13 | store: true,
14 | moderate: true,
15 | top_p: 1,
16 | max_tokens: 1000,
17 | temperature: 0.7,
18 | presence_penalty: 1,
19 | frequency_penalty: 1,
20 | stop: [],
21 | tool_choice: 'auto',
22 | parallel_tool_calls: true,
23 | messages: [{role: 'system', content: `You are a helpful AI assistant.`}],
24 | variables: [],
25 | memory: [],
26 | tools: [toolGetWeather(), toolCalculator()],
27 | });
28 | export default getPipeWithTool;
29 |
--------------------------------------------------------------------------------
/examples/nodejs/examples/pipe.stream.text.ts:
--------------------------------------------------------------------------------
1 | import 'dotenv/config';
2 | import {getRunner, Pipe} from '@baseai/core';
3 | import pipeSummary from '../baseai/pipes/summary';
4 |
5 | const pipe = new Pipe(pipeSummary());
6 |
7 | async function main() {
8 | const {stream, threadId, rawResponse} = await pipe.run({
9 | messages: [{role: 'user', content: 'Hello'}],
10 | stream: true,
11 | });
12 |
13 | // Convert the stream to a stream runner.
14 | const runner = getRunner(stream);
15 |
16 | // Method 1: Using event listeners
17 | runner.on('connect', () => {
18 | console.log('Stream started.\n');
19 | });
20 |
21 | runner.on('content', content => {
22 | process.stdout.write(content);
23 | });
24 |
25 | runner.on('end', () => {
26 | console.log('\nStream ended.');
27 | });
28 |
29 | runner.on('error', error => {
30 | console.error('Error:', error);
31 | });
32 | }
33 |
34 | main();
35 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/UsersIcon.tsx:
--------------------------------------------------------------------------------
1 | export function UsersIcon(props: React.ComponentPropsWithoutRef<'svg'>) {
2 | return (
3 |
4 |
10 |
15 |
21 |
26 |
27 | );
28 | }
29 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/pipes/pipe-with-tool-stream.ts:
--------------------------------------------------------------------------------
1 | import {PipeI} from '@baseai/core';
2 | import toolCalculator from '../tools/calculator';
3 | import toolGetWeather from '../tools/weather';
4 |
5 | const pipeWithToolsStream = (): PipeI => ({
6 | apiKey: process.env.LANGBASE_API_KEY!,
7 | name: 'pipe-with-tool',
8 | description: 'An AI agent pipe that can call tools',
9 | status: 'public',
10 | model: 'openai:gpt-4o-mini',
11 | stream: true,
12 | json: false,
13 | store: true,
14 | moderate: true,
15 | top_p: 1,
16 | max_tokens: 1000,
17 | temperature: 0.7,
18 | presence_penalty: 1,
19 | frequency_penalty: 1,
20 | stop: [],
21 | tool_choice: 'auto',
22 | parallel_tool_calls: true,
23 | messages: [{role: 'system', content: `You are a helpful AI assistant.`}],
24 | variables: [],
25 | memory: [],
26 | tools: [toolGetWeather(), toolCalculator()],
27 | });
28 | export default pipeWithToolsStream;
29 |
--------------------------------------------------------------------------------
/examples/nodejs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "example-nodejs",
3 | "private": true,
4 | "version": "0.0.0",
5 | "description": "Nodejs example on how to use BaseAI",
6 | "type": "module",
7 | "main": "index.js",
8 | "scripts": {
9 | "baseai": "baseai",
10 | "pipe.run": "npx tsx ./examples/pipe.run.ts",
11 | "pipe.run.stream": "npx tsx ./examples/pipe.run.stream.ts",
12 | "pipe.run.stream.loop": "npx tsx ./examples/pipe.run.stream.loop.ts",
13 | "pipe.generate.text": "npx tsx ./examples/pipe.generate.text.ts",
14 | "pipe.stream.text": "npx tsx ./examples/pipe.stream.text.ts"
15 | },
16 | "keywords": [],
17 | "author": "Ahmad Awais (https://twitter.com/MrAhmadAwais)",
18 | "license": "UNLICENSED",
19 | "dependencies": {
20 | "@baseai/core": "^0.9.43",
21 | "dotenv": "^16.4.5"
22 | },
23 | "devDependencies": {
24 | "baseai": "^0.9.44",
25 | "tsx": "^4.19.0"
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/tools/eslint-config/react-internal.js:
--------------------------------------------------------------------------------
1 | const {resolve} = require('node:path');
2 |
3 | const project = resolve(process.cwd(), 'tsconfig.json');
4 |
5 | /*
6 | * This is a custom ESLint configuration for use with
7 | * internal (bundled by their consumer) libraries
8 | * that utilize React.
9 | */
10 |
11 | /** @type {import("eslint").Linter.Config} */
12 | module.exports = {
13 | extends: ['eslint:recommended', 'prettier', 'turbo'],
14 | plugins: ['only-warn'],
15 | globals: {
16 | React: true,
17 | JSX: true,
18 | },
19 | env: {
20 | browser: true,
21 | },
22 | settings: {
23 | 'import/resolver': {
24 | typescript: {
25 | project,
26 | },
27 | },
28 | },
29 | ignorePatterns: [
30 | // Ignore dotfiles
31 | '.*.js',
32 | 'node_modules/',
33 | 'dist/',
34 | ],
35 | overrides: [
36 | // Force ESLint to detect .tsx files
37 | {files: ['*.js?(x)', '*.ts?(x)']},
38 | ],
39 | };
40 |
--------------------------------------------------------------------------------
/examples/astro/src/components/ui/input.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import {cn} from '../../lib/utils';
3 |
4 | export interface InputProps
5 | extends React.InputHTMLAttributes {}
6 |
7 | const Input = React.forwardRef(
8 | ({className, type, ...props}, ref) => {
9 | return (
10 |
19 | );
20 | },
21 | );
22 | Input.displayName = 'Input';
23 |
24 | export {Input};
25 |
--------------------------------------------------------------------------------
/examples/remix/baseai/pipes/pipe-with-tool.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '../../../../packages/core/types/pipes';
2 | import toolCalculator from '../tools/calculator';
3 | import toolGetWeather from '../tools/weather';
4 |
5 | const getPipeWithTool = (): Pipe => ({
6 | apiKey: process.env.LANGBASE_USER_API_KEY!,
7 | name: 'pipe-with-tool',
8 | description: 'An AI agent pipe that can call tools',
9 | status: 'public',
10 | model: 'openai:gpt-4o-mini',
11 | stream: true,
12 | json: false,
13 | store: true,
14 | moderate: true,
15 | top_p: 1,
16 | max_tokens: 1000,
17 | temperature: 0.7,
18 | presence_penalty: 1,
19 | frequency_penalty: 1,
20 | stop: [],
21 | tool_choice: 'auto',
22 | parallel_tool_calls: true,
23 | messages: [{role: 'system', content: `You are a helpful AI assistant.`}],
24 | variables: [],
25 | memory: [],
26 | tools: [toolGetWeather(), toolCalculator()],
27 | });
28 | export default getPipeWithTool;
29 |
--------------------------------------------------------------------------------
/apps/baseai.dev/src/components/icons/IconFork.tsx:
--------------------------------------------------------------------------------
1 | export function IconFork(props: JSX.IntrinsicElements['svg']) {
2 | return (
3 |
11 |
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/examples/astro/baseai/tools/pipe-call-maths.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import getPipeWithTool from '../pipes/pipe-with-tool';
3 |
4 | const runPipeWithMaths = async ({prompt}: {prompt: string}) => {
5 | const pipe = new Pipe(getPipeWithTool());
6 | const result = await pipe.run({
7 | messages: [{role: 'user', content: prompt}],
8 | });
9 |
10 | return result.completion;
11 | };
12 |
13 | const toolPipeCallMaths = () => ({
14 | run: runPipeWithMaths,
15 | type: 'function',
16 | function: {
17 | name: 'runPipeWithMaths',
18 | description: `Call a pipe that can do maths and tell weather from different tools.`,
19 | parameters: {
20 | type: 'object',
21 | required: ['prompt'],
22 | properties: {
23 | prompt: {
24 | type: 'string',
25 | description: 'User input to do maths or to get weather.',
26 | },
27 | },
28 | },
29 | },
30 | });
31 |
32 | export default toolPipeCallMaths;
33 |
--------------------------------------------------------------------------------
/examples/nextjs/baseai/tools/pipe-call-maths.ts:
--------------------------------------------------------------------------------
1 | import {Pipe} from '@baseai/core';
2 | import getPipeWithTool from '../pipes/pipe-with-tool';
3 |
4 | const runPipeWithMaths = async ({prompt}: {prompt: string}) => {
5 | const pipe = new Pipe(getPipeWithTool());
6 | const result = await pipe.run({
7 | messages: [{role: 'user', content: prompt}],
8 | });
9 |
10 | return result.completion;
11 | };
12 |
13 | const toolPipeCallMaths = () => ({
14 | run: runPipeWithMaths,
15 | type: 'function',
16 | function: {
17 | name: 'runPipeWithMaths',
18 | description: `Call a pipe that can do maths and tell weather from different tools.`,
19 | parameters: {
20 | type: 'object',
21 | required: ['prompt'],
22 | properties: {
23 | prompt: {
24 | type: 'string',
25 | description: 'User input to do maths or to get weather.',
26 | },
27 | },
28 | },
29 | },
30 | });
31 |
32 | export default toolPipeCallMaths;
33 |
--------------------------------------------------------------------------------