├── .eslintrc.json
├── .env
├── app
├── favicon.ico
├── api
│ └── chat
│ │ ├── engine
│ │ ├── constants.mjs
│ │ ├── index.ts
│ │ └── generate.mjs
│ │ ├── llamaindex-stream.ts
│ │ └── route.ts
├── components
│ ├── ui
│ │ └── chat
│ │ │ ├── index.ts
│ │ │ ├── chat-item.tsx
│ │ │ ├── chat-avatar.tsx
│ │ │ ├── chat-messages.tsx
│ │ │ └── chat-input.tsx
│ ├── chat-section.tsx
│ └── header.tsx
├── page.tsx
├── layout.tsx
└── globals.css
├── public
└── llama.png
├── data
└── VAR Handbook v8_FINAL.pdf
├── postcss.config.js
├── test
└── data
│ └── VAR Handbook v8_FINAL.pdf
├── .gitignore
├── next.config.js
├── package.json
├── tsconfig.json
├── README.md
└── tailwind.config.ts
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals"
3 | }
4 |
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | OPENAI_API_KEY=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
2 |
--------------------------------------------------------------------------------
/app/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AIAnytime/Create-Llama-RAG-App/master/app/favicon.ico
--------------------------------------------------------------------------------
/public/llama.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AIAnytime/Create-Llama-RAG-App/master/public/llama.png
--------------------------------------------------------------------------------
/data/VAR Handbook v8_FINAL.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AIAnytime/Create-Llama-RAG-App/master/data/VAR Handbook v8_FINAL.pdf
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | };
7 |
--------------------------------------------------------------------------------
/test/data/VAR Handbook v8_FINAL.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AIAnytime/Create-Llama-RAG-App/master/test/data/VAR Handbook v8_FINAL.pdf
--------------------------------------------------------------------------------
/app/api/chat/engine/constants.mjs:
--------------------------------------------------------------------------------
1 | export const STORAGE_DIR = "./data";
2 | export const STORAGE_CACHE_DIR = "./cache";
3 | export const CHUNK_SIZE = 512;
4 | export const CHUNK_OVERLAP = 20;
5 |
--------------------------------------------------------------------------------
/app/components/ui/chat/index.ts:
--------------------------------------------------------------------------------
1 | import ChatInput from "./chat-input";
2 | import ChatMessages from "./chat-messages";
3 |
4 | export type { ChatInputProps } from "./chat-input";
5 | export type { Message } from "./chat-messages";
6 | export { ChatInput, ChatMessages };
7 |
--------------------------------------------------------------------------------
/app/page.tsx:
--------------------------------------------------------------------------------
1 | import Header from "@/app/components/header";
2 | import ChatSection from "./components/chat-section";
3 |
4 | export default function Home() {
5 | return (
6 |
7 |
8 |
9 |
10 | );
11 | }
12 |
--------------------------------------------------------------------------------
/app/components/ui/chat/chat-item.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import ChatAvatar from "./chat-avatar";
4 | import { Message } from "./chat-messages";
5 |
6 | export default function ChatItem(message: Message) {
7 | return (
8 |
9 |
10 |
{message.content}
11 |
12 | );
13 | }
14 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 |
22 | # debug
23 | npm-debug.log*
24 | yarn-debug.log*
25 | yarn-error.log*
26 |
27 | # local env files
28 | .env*.local
29 |
30 | # vercel
31 | .vercel
32 |
33 | # typescript
34 | *.tsbuildinfo
35 | next-env.d.ts
36 |
--------------------------------------------------------------------------------
/app/layout.tsx:
--------------------------------------------------------------------------------
1 | import type { Metadata } from "next";
2 | import { Inter } from "next/font/google";
3 | import "./globals.css";
4 |
5 | const inter = Inter({ subsets: ["latin"] });
6 |
7 | export const metadata: Metadata = {
8 | title: "Create Llama App",
9 | description: "Generated by create-llama",
10 | };
11 |
12 | export default function RootLayout({
13 | children,
14 | }: {
15 | children: React.ReactNode;
16 | }) {
17 | return (
18 |
19 | {children}
20 |
21 | );
22 | }
23 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | webpack: (config) => {
4 | // See https://webpack.js.org/configuration/resolve/#resolvealias
5 | config.resolve.alias = {
6 | ...config.resolve.alias,
7 | sharp$: false,
8 | "onnxruntime-node$": false,
9 | mongodb$: false,
10 | };
11 | return config;
12 | },
13 | experimental: {
14 | serverComponentsExternalPackages: ["llamaindex"],
15 | outputFileTracingIncludes: {
16 | "/*": ["./cache/**/*"],
17 | },
18 | },
19 | };
20 |
21 | module.exports = nextConfig;
22 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "rag-app",
3 | "version": "0.1.0",
4 | "scripts": {
5 | "dev": "next dev",
6 | "build": "next build",
7 | "start": "next start",
8 | "lint": "next lint",
9 | "generate": "node app\\api\\chat\\engine\\generate.mjs"
10 | },
11 | "dependencies": {
12 | "ai": "^2.2.5",
13 | "llamaindex": "0.0.37",
14 | "dotenv": "^16.3.1",
15 | "next": "^13",
16 | "react": "^18",
17 | "react-dom": "^18"
18 | },
19 | "devDependencies": {
20 | "@types/node": "^20",
21 | "@types/react": "^18",
22 | "@types/react-dom": "^18",
23 | "autoprefixer": "^10.1",
24 | "eslint": "^8",
25 | "eslint-config-next": "^13",
26 | "postcss": "^8",
27 | "tailwindcss": "^3.3",
28 | "typescript": "^5"
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/app/components/chat-section.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useChat } from "ai/react";
4 | import { ChatInput, ChatMessages } from "./ui/chat";
5 |
6 | export default function ChatSection() {
7 | const {
8 | messages,
9 | input,
10 | isLoading,
11 | handleSubmit,
12 | handleInputChange,
13 | reload,
14 | stop,
15 | } = useChat({ api: process.env.NEXT_PUBLIC_CHAT_API });
16 |
17 | return (
18 |
19 |
25 |
31 |
32 | );
33 | }
34 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": [
5 | "dom",
6 | "dom.iterable",
7 | "esnext"
8 | ],
9 | "allowJs": true,
10 | "skipLibCheck": true,
11 | "strict": true,
12 | "noEmit": true,
13 | "esModuleInterop": true,
14 | "module": "esnext",
15 | "moduleResolution": "bundler",
16 | "resolveJsonModule": true,
17 | "isolatedModules": true,
18 | "jsx": "preserve",
19 | "incremental": true,
20 | "plugins": [
21 | {
22 | "name": "next"
23 | }
24 | ],
25 | "paths": {
26 | "@/*": [
27 | "./*"
28 | ]
29 | },
30 | "forceConsistentCasingInFileNames": true,
31 | },
32 | "include": [
33 | "next-env.d.ts",
34 | "**/*.ts",
35 | "**/*.tsx",
36 | ".next/types/**/*.ts"
37 | ],
38 | "exclude": [
39 | "node_modules"
40 | ]
41 | }
--------------------------------------------------------------------------------
/app/api/chat/llamaindex-stream.ts:
--------------------------------------------------------------------------------
1 | import {
2 | createCallbacksTransformer,
3 | createStreamDataTransformer,
4 | trimStartOfStreamHelper,
5 | type AIStreamCallbacksAndOptions,
6 | } from "ai";
7 |
8 | function createParser(res: AsyncGenerator) {
9 | const trimStartOfStream = trimStartOfStreamHelper();
10 | return new ReadableStream({
11 | async pull(controller): Promise {
12 | const { value, done } = await res.next();
13 | if (done) {
14 | controller.close();
15 | return;
16 | }
17 |
18 | const text = trimStartOfStream(value ?? "");
19 | if (text) {
20 | controller.enqueue(text);
21 | }
22 | },
23 | });
24 | }
25 |
26 | export function LlamaIndexStream(
27 | res: AsyncGenerator,
28 | callbacks?: AIStreamCallbacksAndOptions,
29 | ): ReadableStream {
30 | return createParser(res)
31 | .pipeThrough(createCallbacksTransformer(callbacks))
32 | .pipeThrough(
33 | createStreamDataTransformer(callbacks?.experimental_streamData),
34 | );
35 | }
36 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
2 |
3 | ## Getting Started
4 |
5 | First, install the dependencies:
6 |
7 | ```
8 | npm install
9 | ```
10 |
11 | Second, run the development server:
12 |
13 | ```
14 | npm run dev
15 | ```
16 |
17 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
18 |
19 | You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
20 |
21 | This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
22 |
23 | ## Learn More
24 |
25 | To learn more about LlamaIndex, take a look at the following resources:
26 |
27 | - [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
28 | - [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
29 |
30 | You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
31 |
--------------------------------------------------------------------------------
/app/components/ui/chat/chat-avatar.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import Image from "next/image";
4 | import { Message } from "./chat-messages";
5 |
6 | export default function ChatAvatar(message: Message) {
7 | if (message.role === "user") {
8 | return (
9 |
19 | );
20 | }
21 |
22 | return (
23 |
24 |
32 |
33 | );
34 | }
35 |
--------------------------------------------------------------------------------
/app/components/ui/chat/chat-messages.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useEffect, useRef } from "react";
4 | import ChatItem from "./chat-item";
5 |
6 | export interface Message {
7 | id: string;
8 | content: string;
9 | role: string;
10 | }
11 |
12 | export default function ChatMessages({
13 | messages,
14 | isLoading,
15 | reload,
16 | stop,
17 | }: {
18 | messages: Message[];
19 | isLoading?: boolean;
20 | stop?: () => void;
21 | reload?: () => void;
22 | }) {
23 | const scrollableChatContainerRef = useRef(null);
24 |
25 | const scrollToBottom = () => {
26 | if (scrollableChatContainerRef.current) {
27 | scrollableChatContainerRef.current.scrollTop =
28 | scrollableChatContainerRef.current.scrollHeight;
29 | }
30 | };
31 |
32 | useEffect(() => {
33 | scrollToBottom();
34 | }, [messages.length]);
35 |
36 | return (
37 |
38 |
42 | {messages.map((m: Message) => (
43 |
44 | ))}
45 |
46 |
47 | );
48 | }
49 |
--------------------------------------------------------------------------------
/app/api/chat/engine/index.ts:
--------------------------------------------------------------------------------
1 | import {
2 | ContextChatEngine,
3 | LLM,
4 | serviceContextFromDefaults,
5 | SimpleDocumentStore,
6 | storageContextFromDefaults,
7 | VectorStoreIndex,
8 | } from "llamaindex";
9 | import { CHUNK_OVERLAP, CHUNK_SIZE, STORAGE_CACHE_DIR } from "./constants.mjs";
10 |
11 | async function getDataSource(llm: LLM) {
12 | const serviceContext = serviceContextFromDefaults({
13 | llm,
14 | chunkSize: CHUNK_SIZE,
15 | chunkOverlap: CHUNK_OVERLAP,
16 | });
17 | let storageContext = await storageContextFromDefaults({
18 | persistDir: `${STORAGE_CACHE_DIR}`,
19 | });
20 |
21 | const numberOfDocs = Object.keys(
22 | (storageContext.docStore as SimpleDocumentStore).toDict(),
23 | ).length;
24 | if (numberOfDocs === 0) {
25 | throw new Error(
26 | `StorageContext is empty - call 'npm run generate' to generate the storage first`,
27 | );
28 | }
29 | return await VectorStoreIndex.init({
30 | storageContext,
31 | serviceContext,
32 | });
33 | }
34 |
35 | export async function createChatEngine(llm: LLM) {
36 | const index = await getDataSource(llm);
37 | const retriever = index.asRetriever();
38 | retriever.similarityTopK = 5;
39 |
40 | return new ContextChatEngine({
41 | chatModel: llm,
42 | retriever,
43 | });
44 | }
45 |
--------------------------------------------------------------------------------
/app/components/header.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 |
3 | export default function Header() {
4 | return (
5 |
6 |
7 | Get started by editing
8 | app/page.tsx
9 |
10 |
26 |
27 | );
28 | }
29 |
--------------------------------------------------------------------------------
/app/components/ui/chat/chat-input.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | export interface ChatInputProps {
4 | /** The current value of the input */
5 | input?: string;
6 | /** An input/textarea-ready onChange handler to control the value of the input */
7 | handleInputChange?: (
8 | e:
9 | | React.ChangeEvent
10 | | React.ChangeEvent,
11 | ) => void;
12 | /** Form submission handler to automatically reset input and append a user message */
13 | handleSubmit: (e: React.FormEvent) => void;
14 | isLoading: boolean;
15 | }
16 |
17 | export default function ChatInput(props: ChatInputProps) {
18 | return (
19 | <>
20 |
40 | >
41 | );
42 | }
43 |
--------------------------------------------------------------------------------
/app/api/chat/engine/generate.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | serviceContextFromDefaults,
3 | SimpleDirectoryReader,
4 | storageContextFromDefaults,
5 | VectorStoreIndex,
6 | } from "llamaindex";
7 |
8 | import * as dotenv from "dotenv";
9 |
10 | import {
11 | CHUNK_OVERLAP,
12 | CHUNK_SIZE,
13 | STORAGE_CACHE_DIR,
14 | STORAGE_DIR,
15 | } from "./constants.mjs";
16 |
17 | // Load environment variables from local .env file
18 | dotenv.config();
19 |
20 | async function getRuntime(func) {
21 | const start = Date.now();
22 | await func();
23 | const end = Date.now();
24 | return end - start;
25 | }
26 |
27 | async function generateDatasource(serviceContext) {
28 | console.log(`Generating storage context...`);
29 | // Split documents, create embeddings and store them in the storage context
30 | const ms = await getRuntime(async () => {
31 | const storageContext = await storageContextFromDefaults({
32 | persistDir: STORAGE_CACHE_DIR,
33 | });
34 | const documents = await new SimpleDirectoryReader().loadData({
35 | directoryPath: STORAGE_DIR,
36 | });
37 | await VectorStoreIndex.fromDocuments(documents, {
38 | storageContext,
39 | serviceContext,
40 | });
41 | });
42 | console.log(`Storage context successfully generated in ${ms / 1000}s.`);
43 | }
44 |
45 | (async () => {
46 | const serviceContext = serviceContextFromDefaults({
47 | chunkSize: CHUNK_SIZE,
48 | chunkOverlap: CHUNK_OVERLAP,
49 | });
50 |
51 | await generateDatasource(serviceContext);
52 | console.log("Finished generating storage.");
53 | })();
54 |
--------------------------------------------------------------------------------
/app/api/chat/route.ts:
--------------------------------------------------------------------------------
1 | import { Message, StreamingTextResponse } from "ai";
2 | import { OpenAI } from "llamaindex";
3 | import { NextRequest, NextResponse } from "next/server";
4 | import { createChatEngine } from "./engine";
5 | import { LlamaIndexStream } from "./llamaindex-stream";
6 |
7 | export const runtime = "nodejs";
8 | export const dynamic = "force-dynamic";
9 |
10 | export async function POST(request: NextRequest) {
11 | try {
12 | const body = await request.json();
13 | const { messages }: { messages: Message[] } = body;
14 | const lastMessage = messages.pop();
15 | if (!messages || !lastMessage || lastMessage.role !== "user") {
16 | return NextResponse.json(
17 | {
18 | error:
19 | "messages are required in the request body and the last message must be from the user",
20 | },
21 | { status: 400 },
22 | );
23 | }
24 |
25 | const llm = new OpenAI({
26 | model: "gpt-3.5-turbo",
27 | });
28 |
29 | const chatEngine = await createChatEngine(llm);
30 |
31 | const response = await chatEngine.chat(lastMessage.content, messages, true);
32 |
33 | // Transform the response into a readable stream
34 | const stream = LlamaIndexStream(response);
35 |
36 | // Return a StreamingTextResponse, which can be consumed by the client
37 | return new StreamingTextResponse(stream);
38 | } catch (error) {
39 | console.error("[LlamaIndex]", error);
40 | return NextResponse.json(
41 | {
42 | error: (error as Error).message,
43 | },
44 | {
45 | status: 500,
46 | },
47 | );
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/app/globals.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
5 | @layer base {
6 | :root {
7 | --background: 0 0% 100%;
8 | --foreground: 222.2 47.4% 11.2%;
9 |
10 | --muted: 210 40% 96.1%;
11 | --muted-foreground: 215.4 16.3% 46.9%;
12 |
13 | --popover: 0 0% 100%;
14 | --popover-foreground: 222.2 47.4% 11.2%;
15 |
16 | --border: 214.3 31.8% 91.4%;
17 | --input: 214.3 31.8% 91.4%;
18 |
19 | --card: 0 0% 100%;
20 | --card-foreground: 222.2 47.4% 11.2%;
21 |
22 | --primary: 222.2 47.4% 11.2%;
23 | --primary-foreground: 210 40% 98%;
24 |
25 | --secondary: 210 40% 96.1%;
26 | --secondary-foreground: 222.2 47.4% 11.2%;
27 |
28 | --accent: 210 40% 96.1%;
29 | --accent-foreground: 222.2 47.4% 11.2%;
30 |
31 | --destructive: 0 100% 50%;
32 | --destructive-foreground: 210 40% 98%;
33 |
34 | --ring: 215 20.2% 65.1%;
35 |
36 | --radius: 0.5rem;
37 | }
38 |
39 | .dark {
40 | --background: 224 71% 4%;
41 | --foreground: 213 31% 91%;
42 |
43 | --muted: 223 47% 11%;
44 | --muted-foreground: 215.4 16.3% 56.9%;
45 |
46 | --accent: 216 34% 17%;
47 | --accent-foreground: 210 40% 98%;
48 |
49 | --popover: 224 71% 4%;
50 | --popover-foreground: 215 20.2% 65.1%;
51 |
52 | --border: 216 34% 17%;
53 | --input: 216 34% 17%;
54 |
55 | --card: 224 71% 4%;
56 | --card-foreground: 213 31% 91%;
57 |
58 | --primary: 210 40% 98%;
59 | --primary-foreground: 222.2 47.4% 1.2%;
60 |
61 | --secondary: 222.2 47.4% 11.2%;
62 | --secondary-foreground: 210 40% 98%;
63 |
64 | --destructive: 0 63% 31%;
65 | --destructive-foreground: 210 40% 98%;
66 |
67 | --ring: 216 34% 17%;
68 |
69 | --radius: 0.5rem;
70 | }
71 | }
72 |
73 | @layer base {
74 | * {
75 | @apply border-border;
76 | }
77 | body {
78 | @apply bg-background text-foreground;
79 | font-feature-settings:
80 | "rlig" 1,
81 | "calt" 1;
82 | }
83 | .background-gradient {
84 | background-color: #fff;
85 | background-image: radial-gradient(
86 | at 21% 11%,
87 | rgba(186, 186, 233, 0.53) 0,
88 | transparent 50%
89 | ),
90 | radial-gradient(at 85% 0, hsla(46, 57%, 78%, 0.52) 0, transparent 50%),
91 | radial-gradient(at 91% 36%, rgba(194, 213, 255, 0.68) 0, transparent 50%),
92 | radial-gradient(at 8% 40%, rgba(251, 218, 239, 0.46) 0, transparent 50%);
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/tailwind.config.ts:
--------------------------------------------------------------------------------
1 | import type { Config } from "tailwindcss";
2 | import { fontFamily } from "tailwindcss/defaultTheme";
3 |
4 | const config: Config = {
5 | darkMode: ["class"],
6 | content: ["app/**/*.{ts,tsx}", "components/**/*.{ts,tsx}"],
7 | theme: {
8 | container: {
9 | center: true,
10 | padding: "2rem",
11 | screens: {
12 | "2xl": "1400px",
13 | },
14 | },
15 | extend: {
16 | colors: {
17 | border: "hsl(var(--border))",
18 | input: "hsl(var(--input))",
19 | ring: "hsl(var(--ring))",
20 | background: "hsl(var(--background))",
21 | foreground: "hsl(var(--foreground))",
22 | primary: {
23 | DEFAULT: "hsl(var(--primary))",
24 | foreground: "hsl(var(--primary-foreground))",
25 | },
26 | secondary: {
27 | DEFAULT: "hsl(var(--secondary))",
28 | foreground: "hsl(var(--secondary-foreground))",
29 | },
30 | destructive: {
31 | DEFAULT: "hsl(var(--destructive) / )",
32 | foreground: "hsl(var(--destructive-foreground) / )",
33 | },
34 | muted: {
35 | DEFAULT: "hsl(var(--muted))",
36 | foreground: "hsl(var(--muted-foreground))",
37 | },
38 | accent: {
39 | DEFAULT: "hsl(var(--accent))",
40 | foreground: "hsl(var(--accent-foreground))",
41 | },
42 | popover: {
43 | DEFAULT: "hsl(var(--popover))",
44 | foreground: "hsl(var(--popover-foreground))",
45 | },
46 | card: {
47 | DEFAULT: "hsl(var(--card))",
48 | foreground: "hsl(var(--card-foreground))",
49 | },
50 | },
51 | borderRadius: {
52 | xl: `calc(var(--radius) + 4px)`,
53 | lg: `var(--radius)`,
54 | md: `calc(var(--radius) - 2px)`,
55 | sm: "calc(var(--radius) - 4px)",
56 | },
57 | fontFamily: {
58 | sans: ["var(--font-sans)", ...fontFamily.sans],
59 | },
60 | keyframes: {
61 | "accordion-down": {
62 | from: { height: "0" },
63 | to: { height: "var(--radix-accordion-content-height)" },
64 | },
65 | "accordion-up": {
66 | from: { height: "var(--radix-accordion-content-height)" },
67 | to: { height: "0" },
68 | },
69 | },
70 | animation: {
71 | "accordion-down": "accordion-down 0.2s ease-out",
72 | "accordion-up": "accordion-up 0.2s ease-out",
73 | },
74 | },
75 | },
76 | plugins: [],
77 | };
78 | export default config;
79 |
--------------------------------------------------------------------------------