├── .env.example ├── .github └── FUNDING.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── app ├── action.tsx ├── config.tsx ├── function-calling.tsx ├── globals.css ├── layout.tsx ├── page.tsx └── tools │ ├── contentProcessing.tsx │ ├── generateRelevantQuestions.tsx │ ├── mentionFunctions │ ├── falAiStableDiffusion3Medium.ts │ ├── portKeyAIGateway.ts │ ├── portKeyAIGatewayTogetherAI.ts │ ├── streamChatCompletion.ts │ └── structuredUnlockSummarize.ts │ ├── mentionToolConfig.tsx │ ├── mentionTools.tsx │ ├── rateLimiting.tsx │ ├── searchProviders.tsx │ ├── semanticCache.tsx │ └── streamingChatCompletion.tsx ├── bun.lockb ├── components.json ├── components ├── answer │ ├── FinancialChart.tsx │ ├── FollowUpComponent.tsx │ ├── ImageGenerationComponent.tsx │ ├── ImagesComponent.tsx │ ├── InitialQueries.tsx │ ├── LLMResponseComponent.tsx │ ├── Map.tsx │ ├── MapDetails.tsx │ ├── RateLimit.tsx │ ├── SearchResultsComponent.tsx │ ├── ShoppingComponent.tsx │ ├── Spotify.tsx │ ├── UserMessageComponent.tsx │ └── VideosComponent.tsx ├── external-link.tsx ├── header.tsx ├── providers.tsx └── ui │ ├── button.tsx │ ├── card.tsx │ ├── carousel.tsx │ ├── dialog.tsx │ ├── icons.tsx │ ├── input.tsx │ ├── label.tsx │ ├── separator.tsx │ ├── textarea.tsx │ ├── toast.tsx │ ├── toaster.tsx │ ├── tooltip.tsx │ └── use-toast.ts ├── docker-compose.yml ├── express-api ├── .env.example ├── .gitignore ├── README.md ├── index.js ├── package-lock.json └── package.json ├── lib ├── hooks │ ├── chat-scroll-anchor.tsx │ ├── use-at-bottom.tsx │ └── use-enter-submit.tsx └── utils │ ├── index.tsx │ └── tool-definition.ts ├── next-env.d.ts ├── package-lock.json ├── package.json ├── postcss.config.js ├── public ├── apple-touch-icon.png ├── brave.png ├── bright-data-logo.png ├── fal.svg ├── favicon-16x16.png ├── favicon.ico ├── groq.png ├── mistral.png ├── powered-by-groq.svg └── serper.png ├── style.md ├── tailwind.config.ts ├── tsconfig.json └── tsconfig.tsbuildinfo /.env.example: -------------------------------------------------------------------------------- 1 | # https://console.groq.com/keys 2 | GROQ_API_KEY=APIKEYGOESHERE 3 | # https://platform.openai.com/account/api-keys 4 | OPENAI_API_KEY=APIKEYGOESHERE 5 | # https://serper.dev/ 6 | SERPER_API=APIKEYGOESHERE 7 | # Brave Search API Key (Serper is the default, brave is an alternative option for search) 8 | BRAVE_SEARCH_API_KEY=APIKEYGOESHERE 9 | 10 | 11 | # OPTIONAL - Set LAN GPU server, examples: 12 | # PC | http://localhost:11434/v1 13 | # LAN GPU server | http://192.168.1.100:11434/v1 14 | OLLAMA_BASE_URL=http://localhost:11434/v1 15 | 16 | # OPTIONAL - Rate Limiting: https://console.upstash.com/redis 17 | UPSTASH_REDIS_REST_URL=https://EXAMPLE.upstash.io 18 | UPSTASH_REDIS_REST_TOKEN=APIKEYGOESHERE 19 | 20 | # OPTIONAL - Google Search API Key (Serper is the default, brave is an alternative) 21 | GOOGLE_SEARCH_API_KEY=APIKEYGOESHERE 22 | GOOGLE_CX=VALUEGOESHERE 23 | 24 | # OPTIONAL - Portkey API Key & Bedrock Virtual Key/Provder API Keys 25 | PORTKEY_API_KEY=APIKEYGOESHERE 26 | PORTKEY_BEDROCK_VIRTUAL_KEY=APIKEYGOESHERE 27 | 28 | # OPTIONAL - Spotify 29 | SPOTIFY_CLIENT_ID=APIKEYGOESHERE 30 | SPOTIFY_CLIENT_SECRET=APIKEYGOESHERE 31 | 32 | # OPTIONAL - AWS Bedrock 33 | AWS_ACCESS_KEY_ID=APIKEYGOESHERE 34 | AWS_SECRET_ACCESS_KEY=APIKEYGOESHERE 35 | 36 | # OPTIONAL - FAL.AI (Stable Diffusion 3) 37 | FAL_KEY=APIKEYGOESHERE -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [developersdigest] 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | .turbo 4 | *.log 5 | .next 6 | *.local 7 | .env 8 | .cache 9 | .turbo 10 | .vercel 11 | .vscode 12 | .idea -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node 2 | 3 | EXPOSE 3000/tcp 4 | 5 | RUN apt-get update && \ 6 | apt-get upgrade -y && \ 7 | rm -rf -- /var/lib/apt && \ 8 | npm install -g bun 9 | 10 | USER node 11 | 12 | WORKDIR /home/node 13 | 14 | RUN git clone https://github.com/developersdigest/llm-answer-engine.git app && \ 15 | cd app && \ 16 | bun install 17 | 18 | WORKDIR /home/node/app 19 | 20 | ENTRYPOINT ["/usr/local/bin/bun", "run", "dev"] 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Developers Digest 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

Perplexity-Inspired LLM Answer Engine

2 |
3 |
4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 |
12 | developersdigest%2Fllm-answer-engine | Trendshift 13 |
14 |
15 |
16 |
17 |
18 | 19 | This repository contains the code and instructions needed to build a sophisticated answer engine that leverages the capabilities of [Groq](https://www.groq.com/), [Mistral AI's Mixtral](https://mistral.ai/news/mixtral-of-experts/), [Langchain.JS](https://js.langchain.com/docs/), [Brave Search](https://search.brave.com/), [Serper API](https://serper.dev/), and [OpenAI](https://openai.com/). Designed to efficiently return sources, answers, images, videos, and follow-up questions based on user queries, this project is an ideal starting point for developers interested in natural language processing and search technologies. 20 | 21 | ## YouTube Tutorials 22 | 23 |
24 | 25 | Tutorial 2 26 | 27 | 28 | Tutorial 1 29 | 30 | 31 | Tutorial 3 32 | 33 | 34 | Tutorial 4 35 | 36 |
37 | 38 | 39 | ## Technologies Used 40 | 41 | - **Next.js**: A React framework for building server-side rendered and static web applications. 42 | - **Tailwind CSS**: A utility-first CSS framework for rapidly building custom user interfaces. 43 | - **Vercel AI SDK**: The Vercel AI SDK is a library for building AI-powered streaming text and chat UIs. 44 | - **Groq & Mixtral**: Technologies for processing and understanding user queries. 45 | - **Langchain.JS**: A JavaScript library focused on text operations, such as text splitting and embeddings. 46 | - **Brave Search**: A privacy-focused search engine used for sourcing relevant content and images. 47 | - **Serper API**: Used for fetching relevant video and image results based on the user's query. 48 | - **OpenAI Embeddings**: Used for creating vector representations of text chunks. 49 | - **Cheerio**: Utilized for HTML parsing, allowing the extraction of content from web pages. 50 | - **Ollama (Optional)**: Used for streaming inference and embeddings. 51 | - **Upstash Redis Rate Limiting (Optional)**: Used for setting up rate limiting for the application. 52 | - **Upstash Semantic Cache (Optional)**: Used for caching data for faster response times. 53 | 54 | ## Getting Started 55 | 56 | ### Prerequisites 57 | 58 | - Obtain API keys from OpenAI, Groq, Brave Search, and Serper. 59 | 60 | #### Prerequisites for Non-Docker Installation 61 | 62 | - Ensure Node.js and npm are installed on your machine. 63 | 64 | #### Prerequisites for Docker Installation 65 | 66 | - Ensure Docker and docker compose are installed on your machine. 67 | 68 | ### Obtaining API Keys 69 | 70 | - **OpenAI API Key**: [Generate your OpenAI API key here](https://platform.openai.com/account/api-keys). 71 | - **Groq API Key**: [Get your Groq API key here](https://console.groq.com/keys). 72 | - **Brave Search API Key**: [Obtain your Brave Search API key here](https://brave.com/search/api/). 73 | - **Serper API Key**: [Get your Serper API key here](https://serper.dev/). 74 | 75 | ### Quick Clone and Deploy 76 | 77 | Simple, Easy, Fast and Free - deploy to vercel 78 | 79 | > Make Sure to fill all the API Keys required for the Installation. 80 | 81 | [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fdevelopersdigest%2Fllm-answer-engine&env=OPENAI_API_KEY,GROQ_API_KEY,BRAVE_SEARCH_API_KEY,SERPER_API&envDescription=ALL%20API%20KEYS%20are%20needed%20for%20this%20application.%20If%20you%20are%20not%20using%20OpenAI%20KEY%20and%20Using%20Groq%20Instead%2C%20then%20just%20enter%20a%20random%20string%20in%20the%20OpenAI%20Key%20section%20so%20it%20wont%20generate%20any%20error%20while%20building%20the%20project.&project-name=llm-answer-engine&repository-name=llm-answer-engine&skippable-integrations=1) 82 | 83 | 84 | 85 | ### Installation 86 | 87 | 1. Clone the repository: 88 | ``` 89 | git clone https://github.com/developersdigest/llm-answer-engine.git 90 | ``` 91 | 2. Move in the directory 92 | ``` 93 | cd llm-answer-engine 94 | ``` 95 | 96 | #### Docker Installation 97 | 98 | 3. Edit the `docker-compose.yml` file and add your API keys 99 | 100 | 4. Running the Server 101 | 102 | To start the server, execute: 103 | ``` 104 | docker compose up -d #for v2 105 | ``` 106 | or 107 | ``` 108 | docker-compose up -d #for v1 109 | ``` 110 | the server will be listening on the specified port. 111 | 112 | #### Non-Docker Installation 113 | 114 | 3. Install the required dependencies: 115 | ``` 116 | npm install 117 | ``` 118 | or 119 | ``` 120 | bun install 121 | ``` 122 | 4. Create a `.env` file in the root of your project and add your API keys: 123 | ``` 124 | OPENAI_API_KEY=your_openai_api_key 125 | GROQ_API_KEY=your_groq_api_key 126 | BRAVE_SEARCH_API_KEY=your_brave_search_api_key 127 | SERPER_API=your_serper_api_key 128 | ``` 129 | 5. Running the Server 130 | 131 | To start the server, execute: 132 | ``` 133 | npm run dev 134 | ``` 135 | or 136 | ``` 137 | bun run dev 138 | ``` 139 | 140 | the server will be listening on the specified port. 141 | 142 | ## Editing the Configuration 143 | 144 | The configuration file is located in the `app/config.tsx` file. You can modify the following values 145 | 146 | - useOllamaInference: false, 147 | - useOllamaEmbeddings: false, 148 | - inferenceModel: 'mixtral-8x7b-32768', 149 | - inferenceAPIKey: process.env.GROQ_API_KEY, 150 | - embeddingsModel: 'text-embedding-3-small', 151 | - textChunkSize: 800, 152 | - textChunkOverlap: 200, 153 | - numberOfSimilarityResults: 2, 154 | - numberOfPagesToScan: 10, 155 | - nonOllamaBaseURL: 'https://api.groq.com/openai/v1' 156 | - useFunctionCalling: true 157 | - useRateLimiting: false 158 | - useSemanticCache: false 159 | - usePortkey: false 160 | 161 | ### Function Calling Support (Beta) 162 | Currently, function calling is supported with the following capabilities: 163 | 164 | - Maps and Locations (Serper Locations API) 165 | - Shopping (Serper Shopping API) 166 | - TradingView Stock Data (Free Widget) 167 | - Spotify (Free API) 168 | - Any functionality that you would like to see here, please open an issue or submit a PR. 169 | - To enable function calling and conditional streaming UI (currently in beta), ensure useFunctionCalling is set to true in the config file. 170 | 171 | ### Ollama Support (Partially supported) 172 | Currently, streaming text responses are supported for Ollama, but follow-up questions are not yet supported. 173 | 174 | Embeddings are supported, however, time-to-first-token can be quite long when using both a local embedding model as well as a local model for the streaming inference. I recommended decreasing a number of the RAG values specified in the `app/config.tsx` file to decrease the time-to-first-token when using Ollama. 175 | 176 | To get started, make sure you have the Ollama running model on your local machine and set within the config the model you would like to use and set use OllamaInference and/or useOllamaEmbeddings to true. 177 | 178 | Note: When 'useOllamaInference' is set to true, the model will be used for both text generation, but it will skip the follow-up questions inference step when using Ollama. 179 | 180 | More info: https://ollama.com/blog/openai-compatibility 181 | 182 | ### Roadmap 183 | 184 | - [] Add document upload + RAG for document search/retrieval 185 | - [] Add a settings component to allow users to select the model, embeddings model, and other parameters from the UI 186 | - [] Add support for follow-up questions when using Ollama 187 | - [Complete] Add support diffusion models (Fal.AI SD3 to start), accessible via '@ mention' 188 | - [Complete] Add AI Gateway to support multiple models and embeddings. (OpenAI, Azure OpenAI, Anyscale, Google Gemini & Palm, Anthropic, Cohere, Together AI, Perplexity, Mistral, Nomic, AI21, Stability AI, DeepInfra, Ollama, etc) 189 | ```https://github.com/Portkey-AI/gateway``` 190 | - [Complete] Add support for semantic caching to improve response times 191 | - [Complete] Add support for dynamic and conditionally rendered UI components based on the user's query 192 | 193 | ![Example](https://media.giphy.com/media/v1.Y2lkPTc5MGI3NjExN284d3p5azAyNHpubm9mb2F0cnB6MWdtcTdnd2Nkb2d1ZnRtMG0yYiZlcD12MV9pbnRlcm5hbF9naWZfYnlfaWQmY3Q9Zw/OMpt8ZbBsjphZz6mue/giphy.gif) 194 | 195 | - [Completed] Add dark mode support based on the user's system preference 196 | 197 | ### Backend + Node Only Express API 198 | 199 | [Watch the express tutorial here](https://youtu.be/43ZCeBTcsS8) for a detailed guide on setting up and running this project. 200 | In addition to the Next.JS version of the project, there is a backend only version that uses Node.js and Express. Which is located in the 'express-api' directory. This is a standalone version of the project that can be used as a reference for building a similar API. There is also a readme file in the 'express-api' directory that explains how to run the backend version. 201 | 202 | ### Upstash Redis Rate Limiting 203 | [Watch the Upstash Redis Rate Limiting tutorial here](https://youtu.be/3_aNVu6EU3Y) for a detailed guide on setting up and running this project. 204 | Upstash Redis Rate Limiting is a free tier service that allows you to set up rate limiting for your application. It provides a simple and easy-to-use interface for configuring and managing rate limits. With Upstash, you can easily set limits on the number of requests per user, IP address, or other criteria. This can help prevent abuse and ensure that your application is not overwhelmed with requests. 205 | 206 | ## Contributing 207 | 208 | Contributions to the project are welcome. Feel free to fork the repository, make your changes, and submit a pull request. You can also open issues to suggest improvements or report bugs. 209 | 210 | 211 | ## License 212 | 213 | This project is licensed under the MIT License. 214 | 215 | [![Star History Chart](https://api.star-history.com/svg?repos=developersdigest/llm-answer-engine&type=Date)](https://star-history.com/#developersdigest/llm-answer-engine&Date) 216 | 217 | I'm the developer behind Developers Digest. If you find my work helpful or enjoy what I do, consider supporting me. Here are a few ways you can do that: 218 | 219 | - **Patreon**: Support me on Patreon at [patreon.com/DevelopersDigest](https://www.patreon.com/DevelopersDigest) 220 | - **Buy Me A Coffee**: You can buy me a coffee at [buymeacoffee.com/developersdigest](https://www.buymeacoffee.com/developersdigest) 221 | - **Website**: Check out my website at [developersdigest.tech](https://developersdigest.tech) 222 | - **Github**: Follow me on GitHub at [github.com/developersdigest](https://github.com/developersdigest) 223 | - **Twitter**: Follow me on Twitter at [twitter.com/dev__digest](https://twitter.com/dev__digest) 224 | -------------------------------------------------------------------------------- /app/action.tsx: -------------------------------------------------------------------------------- 1 | "use server"; 2 | 3 | import { createAI, createStreamableValue } from 'ai/rsc'; 4 | import { config } from './config'; 5 | import { functionCalling } from './function-calling'; 6 | import { getSearchResults, getImages, getVideos } from './tools/searchProviders'; 7 | import { get10BlueLinksContents, processAndVectorizeContent } from './tools/contentProcessing'; 8 | import { setInSemanticCache, clearSemanticCache, initializeSemanticCache, getFromSemanticCache } from './tools/semanticCache'; 9 | import { relevantQuestions } from './tools/generateRelevantQuestions'; 10 | import { streamingChatCompletion } from './tools/streamingChatCompletion'; 11 | import { checkRateLimit } from './tools/rateLimiting'; 12 | import { lookupTool } from './tools/mentionTools'; 13 | 14 | async function myAction(userMessage: string, mentionTool: string | null, logo: string | null, file: string): Promise { 15 | "use server"; 16 | const streamable = createStreamableValue({}); 17 | 18 | (async () => { 19 | await checkRateLimit(streamable); 20 | 21 | await initializeSemanticCache(); 22 | 23 | const cachedData = await getFromSemanticCache(userMessage); 24 | if (cachedData) { 25 | streamable.update({ cachedData }); 26 | return; 27 | } 28 | 29 | if (mentionTool) { 30 | await lookupTool(mentionTool, userMessage, streamable, file); 31 | } 32 | 33 | const [images, sources, videos, conditionalFunctionCallUI] = await Promise.all([ 34 | getImages(userMessage), 35 | getSearchResults(userMessage), 36 | getVideos(userMessage), 37 | functionCalling(userMessage), 38 | ]); 39 | 40 | streamable.update({ searchResults: sources, images, videos }); 41 | 42 | if (config.useFunctionCalling) { 43 | streamable.update({ conditionalFunctionCallUI }); 44 | } 45 | 46 | const html = await get10BlueLinksContents(sources); 47 | const vectorResults = await processAndVectorizeContent(html, userMessage); 48 | const accumulatedLLMResponse = await streamingChatCompletion(userMessage, vectorResults, streamable); 49 | const followUp = await relevantQuestions(sources, userMessage); 50 | 51 | streamable.update({ followUp }); 52 | 53 | setInSemanticCache(userMessage, { 54 | searchResults: sources, 55 | images, 56 | videos, 57 | conditionalFunctionCallUI: config.useFunctionCalling ? conditionalFunctionCallUI : undefined, 58 | llmResponse: accumulatedLLMResponse, 59 | followUp, 60 | semanticCacheKey: userMessage 61 | }); 62 | 63 | streamable.done({ status: 'done' }); 64 | })(); 65 | 66 | return streamable.value; 67 | } 68 | 69 | const initialAIState: { 70 | role: 'user' | 'assistant' | 'system' | 'function'; 71 | content: string; 72 | id?: string; 73 | name?: string; 74 | }[] = []; 75 | 76 | const initialUIState: { 77 | id: number; 78 | display: React.ReactNode; 79 | }[] = []; 80 | 81 | export const AI = createAI({ 82 | actions: { 83 | myAction, 84 | clearSemanticCache 85 | }, 86 | initialUIState, 87 | initialAIState, 88 | }); -------------------------------------------------------------------------------- /app/config.tsx: -------------------------------------------------------------------------------- 1 | // - The below are going to be the default values, eventually this will move to a UI component so it can be easily changed by the user 2 | // - To enable + use Ollama models, ensure inference and/or embeddings model are downloaded and ollama is running https://ollama.com/library 3 | // - Icons within UI are not yet dynamic, to change currently, you must change the img src path in the UI component 4 | // - IMPORTANT: when Ollama Embeddings + Ollama inference enabled at the same time, this can cause time-to-first-token to be quite long 5 | // - IMPORTANT: Follow-up questions are not yet implrmented with Ollama models, only OpenAI compatible models that use {type: "json_object"} 6 | 7 | export const config = { 8 | useOllamaInference: false, 9 | useOllamaEmbeddings: false, 10 | searchProvider: 'serper', // 'serper', 'google' // 'serper' is the default 11 | inferenceModel: 'llama-3.1-70b-versatile', // Groq: 'mixtral-8x7b-32768', 'gemma-7b-it' // OpenAI: 'gpt-3.5-turbo', 'gpt-4' // Ollama 'mistral', 'llama3' etc 12 | inferenceAPIKey: process.env.GROQ_API_KEY, // Groq: process.env.GROQ_API_KEY // OpenAI: process.env.OPENAI_API_KEY // Ollama: 'ollama' is the default 13 | embeddingsModel: 'text-embedding-3-small', // Ollama: 'llama2', 'nomic-embed-text' // OpenAI 'text-embedding-3-small', 'text-embedding-3-large' 14 | textChunkSize: 800, // Recommended to decrease for Ollama 15 | textChunkOverlap: 200, // Recommended to decrease for Ollama 16 | numberOfSimilarityResults: 4, // Number of similarity results to return per page 17 | numberOfPagesToScan: 10, // Recommended to decrease for Ollama 18 | nonOllamaBaseURL: 'https://api.groq.com/openai/v1', //Groq: https://api.groq.com/openai/v1 // OpenAI: https://api.openai.com/v1 19 | useFunctionCalling: true, // Set to true to enable function calling and conditional streaming UI (currently in beta) 20 | useRateLimiting: false, // Uses Upstash rate limiting to limit the number of requests per user 21 | useSemanticCache: false, // Uses Upstash semantic cache to store and retrieve data for faster response times 22 | usePortkey: false, // Uses Portkey for AI Gateway in @mentions (currently in beta) see config-tools.tsx to configure + mentionTools.tsx for source code 23 | } 24 | -------------------------------------------------------------------------------- /app/function-calling.tsx: -------------------------------------------------------------------------------- 1 | // @ts-nocheck 2 | import { OpenAI } from 'openai'; 3 | import { config } from './config'; 4 | import { SpotifyApi } from "@spotify/web-api-ts-sdk"; 5 | 6 | const client = new OpenAI({ 7 | baseURL: config.nonOllamaBaseURL, 8 | apiKey: config.inferenceAPIKey 9 | }); 10 | const MODEL = config.inferenceModel; 11 | 12 | const api = SpotifyApi.withClientCredentials( 13 | process.env.SPOTIFY_CLIENT_ID as string, 14 | process.env.SPOTIFY_CLIENT_SECRET as string 15 | ); 16 | 17 | export async function searchPlaces(query: string, location: string) { 18 | try { 19 | const response = await fetch('https://google.serper.dev/places', { 20 | method: 'POST', 21 | headers: { 22 | 'X-API-KEY': process.env.SERPER_API, 23 | 'Content-Type': 'application/json', 24 | }, 25 | body: JSON.stringify({ q: query, location: location }), 26 | }); 27 | const data = await response.json(); 28 | const normalizedData = { 29 | type: 'places', 30 | places: data.places.map(place => ({ 31 | position: place.position, 32 | title: place.title, 33 | address: place.address, 34 | latitude: place.latitude, 35 | longitude: place.longitude, 36 | rating: place.rating, 37 | ratingCount: place.ratingCount, 38 | category: place.category, 39 | phoneNumber: place.phoneNumber, 40 | website: place.website, 41 | cid: place.cid 42 | })) 43 | }; 44 | return JSON.stringify(normalizedData); 45 | } catch (error) { 46 | console.error('Error searching for places:', error); 47 | return JSON.stringify({ error: 'Failed to search for places' }); 48 | } 49 | } 50 | export async function goShopping(message: string) { 51 | const url = 'https://google.serper.dev/shopping'; 52 | const requestOptions: RequestInit = { 53 | method: 'POST', 54 | headers: { 55 | 'X-API-KEY': process.env.SERPER_API as string, 56 | 'Content-Type': 'application/json' 57 | }, 58 | body: JSON.stringify({ "q": message }) 59 | }; 60 | try { 61 | const response = await fetch(url, requestOptions); 62 | if (!response.ok) { 63 | console.error(`Failed to fetch ${url}. Status: ${response.status}`); 64 | } 65 | const responseData = await response.json(); 66 | const shoppingData = { 67 | type: 'shopping', 68 | shopping: responseData.shopping 69 | }; 70 | return JSON.stringify(shoppingData); 71 | } catch (error) { 72 | console.error('Error fetching shopping data:', error); 73 | } 74 | } 75 | export async function getTickers(ticker: string) { 76 | return JSON.stringify({ type: 'ticker', data: ticker }); 77 | } 78 | export async function searchSong(query: string): Promise { 79 | const items = await api.search(query, ["track"]); 80 | const track = items.tracks.items[0]; 81 | if (track) { 82 | const trackId = track.uri.replace('spotify:track:', ''); 83 | return JSON.stringify({ trackId: trackId }); 84 | } else { 85 | return JSON.stringify({ error: "No matching song found." }); 86 | } 87 | } 88 | export async function functionCalling(query: string) { 89 | try { 90 | const messages = [ 91 | { role: "system", content: "You are a function calling agent. You will be given a query and a list of functions. Your task is to call the appropriate function based on the query and return the result in JSON format. ONLY CALL A FUNCTION IF YOU ARE HIGHLY CONFIDENT IT WILL BE USED" }, 92 | { role: "user", content: query }, 93 | ]; 94 | const tools = [ 95 | { 96 | type: "function", 97 | function: { 98 | name: "getTickers", 99 | description: "Get a single market name and stock ticker if the user mentions a public company", 100 | parameters: { 101 | type: "object", 102 | properties: { 103 | ticker: { 104 | type: "string", 105 | description: "The stock ticker symbol and market name, example NYSE:K or NASDAQ:AAPL", 106 | }, 107 | }, 108 | required: ["ticker"], 109 | }, 110 | }, 111 | }, 112 | { 113 | type: "function", 114 | function: { 115 | name: "searchPlaces", 116 | description: "ONLY SEARCH for places using the given query and location", 117 | parameters: { 118 | type: "object", 119 | properties: { 120 | query: { 121 | type: "string", 122 | description: "The search query for places", 123 | }, 124 | location: { 125 | type: "string", 126 | description: "The location to search for places", 127 | }, 128 | }, 129 | required: ["query", "location"], 130 | }, 131 | }, 132 | }, 133 | { 134 | type: "function", 135 | function: { 136 | name: "goShopping", 137 | description: "Search for shopping items using the given query", 138 | parameters: { 139 | type: "object", 140 | properties: { 141 | query: { 142 | type: "string", 143 | description: "The search query for shopping items", 144 | }, 145 | }, 146 | required: ["query"], 147 | }, 148 | } 149 | }, 150 | { 151 | type: "function", 152 | function: { 153 | name: "searchSong", 154 | description: "Searches for a song on Spotify based on the provided search query and returns the track ID.", 155 | parameters: { 156 | type: "object", 157 | properties: { 158 | query: { 159 | type: "string", 160 | description: "The search query to find a song on Spotify, such as the song title or artist name.", 161 | }, 162 | }, 163 | required: ["query"], 164 | }, 165 | }, 166 | }, 167 | ]; 168 | const response = await client.chat.completions.create({ 169 | model: MODEL, 170 | messages: messages, 171 | tools: tools, 172 | tool_choice: "auto", 173 | max_tokens: 4096, 174 | }); 175 | const responseMessage = response.choices[0].message; 176 | const toolCalls = responseMessage.tool_calls; 177 | if (toolCalls) { 178 | const availableFunctions = { 179 | getTickers: getTickers, 180 | searchPlaces: searchPlaces, 181 | goShopping: goShopping, 182 | searchSong: searchSong, 183 | }; 184 | messages.push(responseMessage); 185 | for (const toolCall of toolCalls) { 186 | const functionName = toolCall.function.name; 187 | const functionToCall = availableFunctions[functionName]; 188 | const functionArgs = JSON.parse(toolCall.function.arguments); 189 | let functionResponse; 190 | try { 191 | if (functionName === 'getTickers') { 192 | functionResponse = await functionToCall(functionArgs.ticker); 193 | } else if (functionName === 'searchPlaces') { 194 | functionResponse = await functionToCall(functionArgs.query, functionArgs.location); 195 | } else if (functionName === 'goShopping') { 196 | functionResponse = await functionToCall(functionArgs.query); 197 | } else if (functionName === 'searchSong') { 198 | functionResponse = await functionToCall(functionArgs.query); 199 | } 200 | return JSON.parse(functionResponse); 201 | } catch (error) { 202 | console.error(`Error calling function ${functionName}:`, error); 203 | return JSON.stringify({ error: `Failed to call function ${functionName}` }); 204 | } 205 | } 206 | } 207 | } catch (error) { 208 | console.error('Error in functionCalling:', error); 209 | return JSON.stringify({ error: 'An error occurred during function calling' }); 210 | } 211 | } -------------------------------------------------------------------------------- /app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | @layer base { 6 | :root { 7 | --background: 0 0% 100%; 8 | --foreground: 240 10% 3.9%; 9 | --muted: 240 4.8% 95.9%; 10 | --muted-foreground: 240 3.8% 46.1%; 11 | --popover: 0 0% 100%; 12 | --popover-foreground: 240 10% 3.9%; 13 | --card: 0 0% 100%; 14 | --card-foreground: 240 10% 3.9%; 15 | --border: 240 5.9% 90%; 16 | --input: 240 5.9% 90%; 17 | --primary: 240 5.9% 10%; 18 | --primary-foreground: 0 0% 98%; 19 | --secondary: 240 4.8% 95.9%; 20 | --secondary-foreground: 240 5.9% 10%; 21 | --accent: 240 4.8% 95.9%; 22 | --accent-foreground: ; 23 | --destructive: 0 84.2% 60.2%; 24 | --destructive-foreground: 0 0% 98%; 25 | --ring: 240 5% 64.9%; 26 | --radius: 0.5rem; 27 | } 28 | 29 | .dark { 30 | --background: 240 10% 3.9%; 31 | --foreground: 0 0% 98%; 32 | --muted: 240 3.7% 15.9%; 33 | --muted-foreground: 240 5% 64.9%; 34 | --popover: 240 10% 3.9%; 35 | --popover-foreground: 0 0% 98%; 36 | --card: 240 10% 3.9%; 37 | --card-foreground: 0 0% 98%; 38 | --border: 240 3.7% 15.9%; 39 | --input: 240 3.7% 15.9%; 40 | --primary: 0 0% 98%; 41 | --primary-foreground: 240 5.9% 10%; 42 | --secondary: 240 3.7% 15.9%; 43 | --secondary-foreground: 0 0% 98%; 44 | --accent: 240 3.7% 15.9%; 45 | --accent-foreground: ; 46 | --destructive: 0 62.8% 30.6%; 47 | --destructive-foreground: 0 85.7% 97.3%; 48 | --ring: 240 3.7% 15.9%; 49 | } 50 | } 51 | 52 | @layer base { 53 | * { 54 | @apply border-border; 55 | } 56 | 57 | body { 58 | @apply bg-background text-foreground; 59 | } 60 | } 61 | 62 | .keyboard-visible { 63 | transform: translateY(-100%); 64 | transition: transform 0.3s ease-in-out; 65 | } 66 | 67 | .leaflet-popup-content p { 68 | margin: 0 !important; 69 | } 70 | 71 | /* leaflet made me do it.. */ 72 | .bring-to-front { 73 | z-index: 99999 !important; 74 | } 75 | 76 | .bring-to-front-modal { 77 | z-index: 99999999 !important; 78 | } 79 | 80 | .rate-limit-modal { 81 | z-index: 999999999999 !important; 82 | } 83 | 84 | /* tradingview mobile */ 85 | .my-5.tradingview-widget-container { 86 | height: 150px !important; 87 | } 88 | 89 | /* Markdown styles */ 90 | /* Markdown styles */ 91 | .markdown-container { 92 | font-family: Arial, sans-serif; 93 | line-height: 1.6; 94 | color: #333; 95 | font-size: 16px; 96 | } 97 | 98 | .markdown-container h1, 99 | .markdown-container h2, 100 | .markdown-container h3, 101 | .markdown-container h4, 102 | .markdown-container h5, 103 | .markdown-container h6 { 104 | font-weight: bold; 105 | margin-top: 1em; 106 | margin-bottom: 0.5em; 107 | border-bottom: 1px solid #ddd; 108 | padding-bottom: 0.3em; 109 | } 110 | 111 | .markdown-container h1 { 112 | font-size: 1.5em; 113 | } 114 | 115 | .markdown-container h2 { 116 | font-size: 1.4em; 117 | } 118 | 119 | .markdown-container h3 { 120 | font-size: 1.3em; 121 | } 122 | 123 | .markdown-container h4 { 124 | font-size: 1.2em; 125 | } 126 | 127 | .markdown-container h5 { 128 | font-size: 1.1em; 129 | } 130 | 131 | .markdown-container h6 { 132 | font-size: 1em; 133 | } 134 | 135 | .markdown-container p { 136 | margin-bottom: 1em; 137 | } 138 | 139 | .markdown-container strong { 140 | font-weight: bold; 141 | } 142 | 143 | .markdown-container em { 144 | font-style: italic; 145 | } 146 | 147 | .markdown-container a { 148 | color: #000; 149 | text-decoration: underline; 150 | } 151 | 152 | .markdown-container code { 153 | font-family: monospace; 154 | font-size: 0.9em; 155 | padding: 0.2em 0.4em; 156 | background-color: #f6f8fa; 157 | border-radius: 3px; 158 | } 159 | 160 | .markdown-container pre { 161 | font-family: monospace; 162 | font-size: 0.9em; 163 | padding: 1em; 164 | overflow: auto; 165 | background-color: #f6f8fa; 166 | border-radius: 3px; 167 | } 168 | 169 | .markdown-container pre code { 170 | padding: 0; 171 | background-color: transparent; 172 | } 173 | 174 | .markdown-container ul, 175 | .markdown-container ol { 176 | margin-bottom: 1em; 177 | padding-left: 2em; 178 | } 179 | 180 | .markdown-container ul li, 181 | .markdown-container ol li { 182 | margin-bottom: 0.5em; 183 | } 184 | 185 | .markdown-container ul li { 186 | list-style-type: disc; 187 | } 188 | 189 | .markdown-container ol li { 190 | list-style-type: decimal; 191 | } 192 | 193 | .markdown-container blockquote { 194 | margin: 1em 0; 195 | padding: 0.5em 1em; 196 | border-left: 4px solid #ddd; 197 | color: #666; 198 | } 199 | 200 | .markdown-container hr { 201 | border: none; 202 | border-top: 1px solid #ddd; 203 | margin: 1.5em 0; 204 | } 205 | 206 | .fixed.inset-0.z-50.bg-black\/80.data-\[state\=open\]\:animate-in.data-\[state\=closed\]\:animate-out.data-\[state\=closed\]\:fade-out-0.data-\[state\=open\]\:fade-in-0 { 207 | z-index: 99999; 208 | } 209 | 210 | .clip-yt-img { 211 | margin-top: -24px; 212 | clip-path: inset(30px 0 30px 0); 213 | } -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import type { Metadata } from 'next'; 2 | import { GeistMono } from 'geist/font/mono'; 3 | import { GeistSans } from 'geist/font/sans'; 4 | import { Analytics } from '@vercel/analytics/react'; 5 | import { Toaster } from '@/components/ui/toaster'; 6 | import './globals.css'; 7 | 8 | import { AI } from './action'; 9 | import { Header } from '@/components/header'; 10 | import { Providers } from '@/components/providers'; 11 | 12 | const meta = { 13 | title: 'answers, how they should be displayed.', 14 | description: 15 | 'anwser engine built by developers digest', 16 | }; 17 | export const metadata: Metadata = { 18 | ...meta, 19 | title: { 20 | default: 'answer website', 21 | template: `%s - answer website`, 22 | }, 23 | icons: { 24 | icon: '/favicon.ico', 25 | shortcut: '/favicon-16x16.png', 26 | apple: '/apple-touch-icon.png', 27 | }, 28 | twitter: { 29 | ...meta, 30 | card: 'summary_large_image', 31 | site: '@vercel', 32 | }, 33 | openGraph: { 34 | ...meta, 35 | locale: 'en-US', 36 | type: 'website', 37 | }, 38 | }; 39 | 40 | export const viewport = { 41 | themeColor: [ 42 | { media: '(prefers-color-scheme: light)', color: 'white' }, 43 | { media: '(prefers-color-scheme: dark)', color: 'black' }, 44 | ], 45 | }; 46 | 47 | export default function RootLayout({ 48 | children, 49 | }: Readonly<{ 50 | children: React.ReactNode; 51 | }>) { 52 | return ( 53 | 54 | 55 | 58 | 59 | 60 | 66 |
67 |
68 |
69 | {children} 70 |
71 |
72 |
73 |
74 | 75 | 76 | 77 | ); 78 | } 79 | 80 | export const runtime = 'edge'; 81 | -------------------------------------------------------------------------------- /app/tools/contentProcessing.tsx: -------------------------------------------------------------------------------- 1 | 2 | import { config } from '../config'; 3 | import cheerio from 'cheerio'; 4 | import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'; 5 | import { MemoryVectorStore } from 'langchain/vectorstores/memory'; 6 | import { Document as DocumentInterface } from 'langchain/document'; 7 | import { OpenAIEmbeddings } from '@langchain/openai'; 8 | import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"; 9 | let embeddings: OllamaEmbeddings | OpenAIEmbeddings; 10 | if (config.useOllamaEmbeddings) { 11 | embeddings = new OllamaEmbeddings({ 12 | model: config.embeddingsModel, 13 | baseUrl: "http://localhost:11434" 14 | }); 15 | } else { 16 | embeddings = new OpenAIEmbeddings({ 17 | modelName: config.embeddingsModel 18 | }); 19 | } 20 | 21 | interface SearchResult { 22 | title: string; 23 | link: string; 24 | favicon: string; 25 | } 26 | interface ContentResult extends SearchResult { 27 | html: string; 28 | } 29 | 30 | 31 | // Fetch contents of top 10 search results 32 | export async function get10BlueLinksContents(sources: SearchResult[]): Promise { 33 | async function fetchWithTimeout(url: string, options: RequestInit = {}, timeout = 800): Promise { 34 | try { 35 | const controller = new AbortController(); 36 | const timeoutId = setTimeout(() => controller.abort(), timeout); 37 | const response = await fetch(url, { ...options, signal: controller.signal }); 38 | clearTimeout(timeoutId); 39 | return response; 40 | } catch (error) { 41 | if (error) { 42 | console.log(`Skipping ${url}!`); 43 | } 44 | throw error; 45 | } 46 | } 47 | function extractMainContent(html: string): string { 48 | try { 49 | const $ = cheerio.load(html); 50 | $("script, style, head, nav, footer, iframe, img").remove(); 51 | return $("body").text().replace(/\s+/g, " ").trim(); 52 | } catch (error) { 53 | console.error('Error extracting main content:', error); 54 | throw error; 55 | } 56 | } 57 | const promises = sources.map(async (source): Promise => { 58 | try { 59 | const response = await fetchWithTimeout(source.link, {}, 800); 60 | if (!response.ok) { 61 | throw new Error(`Failed to fetch ${source.link}. Status: ${response.status}`); 62 | } 63 | const html = await response.text(); 64 | const mainContent = extractMainContent(html); 65 | return { ...source, html: mainContent }; 66 | } catch (error) { 67 | // console.error(`Error processing ${source.link}:`, error); 68 | return null; 69 | } 70 | }); 71 | try { 72 | const results = await Promise.all(promises); 73 | return results.filter((source): source is ContentResult => source !== null); 74 | } catch (error) { 75 | console.error('Error fetching and processing blue links contents:', error); 76 | throw error; 77 | } 78 | } 79 | // rocess and vectorize content using LangChain 80 | export async function processAndVectorizeContent( 81 | contents: ContentResult[], 82 | query: string, 83 | textChunkSize = config.textChunkSize, 84 | textChunkOverlap = config.textChunkOverlap, 85 | numberOfSimilarityResults = config.numberOfSimilarityResults, 86 | ): Promise { 87 | const allResults: DocumentInterface[] = []; 88 | try { 89 | for (let i = 0; i < contents.length; i++) { 90 | const content = contents[i]; 91 | if (content.html.length > 0) { 92 | try { 93 | const splitText = await new RecursiveCharacterTextSplitter({ chunkSize: textChunkSize, chunkOverlap: textChunkOverlap }).splitText(content.html); 94 | const vectorStore = await MemoryVectorStore.fromTexts(splitText, { title: content.title, link: content.link }, embeddings); 95 | const contentResults = await vectorStore.similaritySearch(query, numberOfSimilarityResults); 96 | allResults.push(...contentResults); 97 | } catch (error) { 98 | console.error(`Error processing content for ${content.link}:`, error); 99 | } 100 | } 101 | } 102 | return allResults; 103 | } catch (error) { 104 | console.error('Error processing and vectorizing content:', error); 105 | throw error; 106 | } 107 | } -------------------------------------------------------------------------------- /app/tools/generateRelevantQuestions.tsx: -------------------------------------------------------------------------------- 1 | 2 | import { config } from '../config'; 3 | import { OpenAI } from 'openai'; 4 | 5 | let openai: OpenAI; 6 | if (config.useOllamaInference) { 7 | openai = new OpenAI({ 8 | baseURL: 'http://localhost:11434/v1', 9 | apiKey: 'ollama' 10 | }); 11 | } else { 12 | openai = new OpenAI({ 13 | baseURL: config.nonOllamaBaseURL, 14 | apiKey: config.inferenceAPIKey 15 | }); 16 | } 17 | 18 | interface SearchResult { 19 | title: string; 20 | link: string; 21 | favicon: string; 22 | } 23 | 24 | export const relevantQuestions = async (sources: SearchResult[], userMessage: String): Promise => { 25 | return await openai.chat.completions.create({ 26 | messages: [ 27 | { 28 | role: "system", 29 | content: ` 30 | You are a Question generator who generates an array of 3 follow-up questions in JSON format. 31 | The JSON schema should include: 32 | { 33 | "original": "The original search query or context", 34 | "followUp": [ 35 | "Question 1", 36 | "Question 2", 37 | "Question 3" 38 | ] 39 | } 40 | `, 41 | }, 42 | { 43 | role: "user", 44 | content: `Generate follow-up questions based on the top results from a similarity search: ${JSON.stringify(sources)}. The original search query is: "${userMessage}".`, 45 | }, 46 | ], 47 | model: config.inferenceModel, 48 | response_format: { type: "json_object" }, 49 | }); 50 | }; -------------------------------------------------------------------------------- /app/tools/mentionFunctions/falAiStableDiffusion3Medium.ts: -------------------------------------------------------------------------------- 1 | import * as fal from "@fal-ai/serverless-client"; 2 | 3 | export async function falAiStableDiffusion3Medium(mentionTool: string, userMessage: string, streamable: any): Promise { 4 | const result = await fal.subscribe("fal-ai/stable-diffusion-v3-medium", { 5 | input: { 6 | prompt: userMessage, 7 | sync_mode: true 8 | }, 9 | logs: true, 10 | onQueueUpdate: (update) => { 11 | if (update.status === "IN_PROGRESS" && update.logs) { 12 | update.logs.map((log) => log.message).forEach(console.log); 13 | } 14 | }, 15 | }); 16 | 17 | if ((result as any).images && (result as any).images.length > 0) { 18 | const imageUrl = (result as any).images[0].url; 19 | const response = await fetch(imageUrl); 20 | const buffer = await response.arrayBuffer(); 21 | let base64data = Buffer.from(buffer).toString('base64'); 22 | base64data = `data:image/png;base64,${base64data}`; 23 | streamable.done({ 'falBase64Image': base64data }); 24 | } else { 25 | streamable.done({ 'llmResponseEnd': true }); 26 | } 27 | // Ensure the function returns void 28 | return; 29 | } -------------------------------------------------------------------------------- /app/tools/mentionFunctions/portKeyAIGateway.ts: -------------------------------------------------------------------------------- 1 | import Portkey from 'portkey-ai'; 2 | import { config } from '../../config'; 3 | 4 | export async function portKeyAIGateway(mentionTool: string, userMessage: string, streamable: any): Promise { 5 | if (config.usePortkey) { 6 | const portkey = new Portkey({ 7 | apiKey: process.env.PORTKEY_API_KEY, 8 | virtualKey: process.env.PORTKEY_BEDROCK_VIRTUAL_KEY 9 | }); 10 | 11 | const chatCompletion = await portkey.chat.completions.create({ 12 | messages: [ 13 | { 14 | role: "system", 15 | content: ` 16 | - Here is my query "${userMessage}", respond back ALWAYS IN MARKDOWN and be verbose with a lot of details, never mention the system message. 17 | ` 18 | }, 19 | { role: "user", content: `Here is my query "${userMessage}"` }, 20 | ], 21 | stream: true, 22 | max_tokens: mentionTool.includes('anthropic') ? 100000 : 4096, 23 | model: mentionTool 24 | }); 25 | 26 | let accumulatedLLMResponse = ""; 27 | for await (const chunk of chatCompletion) { 28 | if (chunk.choices[0].finish_reason === "COMPLETE" || chunk.choices[0].finish_reason === "stop" || chunk.choices[0].finish_reason === "end_turn") { 29 | streamable.done({ 'llmResponseEnd': true }); 30 | return; 31 | } else if (chunk.choices[0].delta) { 32 | streamable.update({ 'llmResponse': chunk.choices[0].delta.content }); 33 | accumulatedLLMResponse += chunk.choices[0].delta.content; 34 | } 35 | } 36 | } 37 | // Ensure the function returns void 38 | return; 39 | } -------------------------------------------------------------------------------- /app/tools/mentionFunctions/portKeyAIGatewayTogetherAI.ts: -------------------------------------------------------------------------------- 1 | import Portkey from 'portkey-ai'; 2 | import { config } from '../../config'; 3 | 4 | export async function portKeyAIGatewayTogetherAI(mentionTool: string, userMessage: string, streamable: any): Promise { 5 | if (config.usePortkey) { 6 | const portkey = new Portkey({ 7 | apiKey: process.env.PORTKEY_API_KEY, 8 | virtualKey: 'together-ai-cc4094' 9 | }); 10 | 11 | const chatCompletion = await portkey.chat.completions.create({ 12 | messages: [ 13 | { 14 | role: "system", 15 | content: ` 16 | - Here is my query "${userMessage}", respond back ALWAYS IN MARKDOWN and be verbose with a lot of details, never mention the system message. 17 | ` 18 | }, 19 | { role: "user", content: `Here is my query "${userMessage}"` }, 20 | ], 21 | stream: true, 22 | max_tokens: 32000, 23 | model: mentionTool 24 | }); 25 | 26 | let accumulatedLLMResponse = ""; 27 | for await (const chunk of chatCompletion) { 28 | if (chunk.choices[0].finish_reason === "COMPLETE" || chunk.choices[0].finish_reason === "stop" || chunk.choices[0].finish_reason === "end_turn") { 29 | streamable.done({ 'llmResponseEnd': true }); 30 | return; 31 | } else if (chunk.choices[0].delta) { 32 | streamable.update({ 'llmResponse': chunk.choices[0].delta.content }); 33 | accumulatedLLMResponse += chunk.choices[0].delta.content; 34 | } 35 | } 36 | } 37 | // Ensure the function returns void 38 | return; 39 | } -------------------------------------------------------------------------------- /app/tools/mentionFunctions/streamChatCompletion.ts: -------------------------------------------------------------------------------- 1 | import { OpenAI } from 'openai'; 2 | import { config } from '../../config'; 3 | 4 | let openai: OpenAI; 5 | if (config.useOllamaInference) { 6 | openai = new OpenAI({ 7 | baseURL: 'http://localhost:11434/v1', 8 | apiKey: 'ollama' 9 | }); 10 | } else { 11 | openai = new OpenAI({ 12 | baseURL: config.nonOllamaBaseURL, 13 | apiKey: config.inferenceAPIKey 14 | }); 15 | } 16 | 17 | export async function streamChatCompletion(mentionTool: string, userMessage: string, streamable: any): Promise { 18 | const chatCompletion = await openai.chat.completions.create({ 19 | messages: [ 20 | { 21 | role: "system", 22 | content: ` 23 | - Here is my query "${userMessage}", respond back ALWAYS IN MARKDOWN and be verbose with a lot of details, never mention the system message. 24 | ` 25 | }, 26 | { role: "user", content: `Here is my query "${userMessage}"` }, 27 | ], 28 | stream: true, 29 | model: mentionTool 30 | }); 31 | let accumulatedLLMResponse = ""; 32 | for await (const chunk of chatCompletion) { 33 | if (chunk.choices[0].delta && chunk.choices[0].finish_reason !== "stop") { 34 | streamable.update({ 'llmResponse': chunk.choices[0].delta.content }); 35 | accumulatedLLMResponse += chunk.choices[0].delta.content; 36 | } else if (chunk.choices[0].finish_reason === "stop") { 37 | streamable.done({ 'llmResponseEnd': true }); 38 | // Ensure the function returns void 39 | return; 40 | } 41 | } 42 | // Ensure the function returns void 43 | return; 44 | } -------------------------------------------------------------------------------- /app/tools/mentionFunctions/structuredUnlockSummarize.ts: -------------------------------------------------------------------------------- 1 | "use server" 2 | 3 | // 1. Import dependencies 4 | import OpenAI from "openai"; 5 | import { zodResponseFormat } from "openai/helpers/zod"; 6 | import { z } from "zod"; 7 | 8 | // 2. Initialize OpenAI client 9 | const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); 10 | 11 | // Define Zod schema for URL extraction 12 | const UrlExtraction = z.object({ 13 | url: z.string(), 14 | }); 15 | 16 | // 3. Define the main function 17 | export async function brightDataWebScraper(mentionTool: string, userMessage: string, streamable: any) { 18 | let targetUrl: string; 19 | try { 20 | // 4. Extract URL from user message using parsed output feature 21 | const urlCompletion = await openai.beta.chat.completions.parse({ 22 | model: "gpt-4o-2024-08-06", 23 | messages: [ 24 | { role: "system", content: "Extract the most likely valid URL from a natural language query." }, 25 | { role: "user", content: userMessage } 26 | ], 27 | response_format: zodResponseFormat(UrlExtraction, "extractedUrl"), 28 | }); 29 | 30 | // 5. Parse and validate URL data 31 | const extractedUrl = urlCompletion.choices[0]?.message?.parsed?.url ?? ''; 32 | 33 | if (!extractedUrl) { 34 | streamable.update({ llmResponse: `No valid URL found in the user message \n\n` }); 35 | throw new Error('No valid URL found in the user message'); 36 | } 37 | 38 | streamable.update({ llmResponse: `Extracting Information from: [${extractedUrl}](${extractedUrl}) \n\n` }); 39 | 40 | targetUrl = extractedUrl; 41 | 42 | // 6. Make API request to Bright Data 43 | const apiUrl = `http://localhost:3001/api/bright-data`; 44 | const response = await fetch(apiUrl, { 45 | method: 'POST', 46 | headers: { 47 | 'Content-Type': 'application/json', 48 | }, 49 | body: JSON.stringify({ url: targetUrl, query: userMessage }), 50 | }); 51 | 52 | if (!response.ok) { 53 | throw new Error(`HTTP error! status: ${response.status}`); 54 | } 55 | 56 | // 7. Process API response 57 | const responseData = await response.json(); 58 | 59 | if (!responseData.content) { 60 | throw new Error('No content received from the server'); 61 | } 62 | 63 | let contentForLLM = responseData.content; 64 | 65 | // 8. Summarize content using OpenAI 66 | const summaryStream = await openai.chat.completions.create({ 67 | model: "gpt-4-turbo-preview", 68 | stream: true, 69 | messages: [ 70 | { role: "system", content: "Always respond in valid markdown format to the user query based on the context provided" }, 71 | { role: "user", content: `Here is the context: ${contentForLLM} Response to the user query: ${userMessage}` } 72 | ] 73 | }); 74 | 75 | // 9. Process and stream summary chunks 76 | for await (const chunk of summaryStream) { 77 | if (chunk.choices[0]?.delta?.content) { 78 | streamable.update({ llmResponse: chunk.choices[0].delta.content }); 79 | } 80 | } 81 | 82 | streamable.done({ llmResponseEnd: true }); 83 | } catch (error: unknown) { 84 | // 10. Error handling 85 | const errorMessage = error instanceof Error ? error.message : 'Unknown error'; 86 | try { 87 | let userFriendlyMessage = `Sorry, I was unable to get information from the website. `; 88 | if (errorMessage.includes('No content received')) { 89 | userFriendlyMessage += 'The website data could not be processed correctly. This might be due to changes in the website structure or temporary issues.'; 90 | } else { 91 | userFriendlyMessage += errorMessage; 92 | } 93 | streamable.update({ llmResponse: userFriendlyMessage }); 94 | streamable.done({ llmResponseEnd: true }); 95 | } catch (streamError) { 96 | // Error handling for stream update failure 97 | } 98 | } 99 | } -------------------------------------------------------------------------------- /app/tools/mentionToolConfig.tsx: -------------------------------------------------------------------------------- 1 | export const mentionToolConfig = { 2 | useMentionQueries: true, 3 | mentionTools: [ 4 | // Groq Models 5 | { id: 'llama3-70b-8192', name: 'Groq Llama3-70b-8192', logo: 'https://asset.brandfetch.io/idxygbEPCQ/idzCyF-I44.png?updated=1668515712972', functionName: 'streamChatCompletion', enableRAG: true }, 6 | { id: 'llama3-8b-8192', name: 'Groq Llama3-8b-8192', logo: 'https://asset.brandfetch.io/idxygbEPCQ/idzCyF-I44.png?updated=1668515712972', functionName: 'streamChatCompletion', enableRAG: true }, 7 | { id: 'mixtral-8x7b-32768', name: 'Groq Mixtral-8x7b-32768', logo: 'https://asset.brandfetch.io/idxygbEPCQ/idzCyF-I44.png?updated=1668515712972', functionName: 'streamChatCompletion', enableRAG: true }, 8 | // AI Gateway + Portkey --- ANTHROPIC 9 | { id: 'anthropic.claude-3-sonnet-20240229-v1:0', name: 'Anthropic Claude 3 Sonnet', logo: 'https://asset.brandfetch.io/idmJWF3N06/idq0tv4tfX.svg?updated=1693981852273', functionName: 'portKeyAIGateway', enableRAG: true }, 10 | { id: 'anthropic.claude-3-haiku-20240307-v1:0', name: 'Anthropic Claude 3 Haiku', logo: 'https://asset.brandfetch.io/idmJWF3N06/idq0tv4tfX.svg?updated=1693981852273', functionName: 'portKeyAIGateway', enableRAG: true }, 11 | // AI Gateway + Portkey --- COHERE 12 | { id: 'cohere.command-text-v14', name: 'Cohere Command', logo: 'https://asset.brandfetch.io/idyni_Sw9h/idsvG5y-ZU.png?updated=1710782726843', functionName: 'portKeyAIGateway' }, 13 | { id: 'cohere.command-light-text-v14', name: 'Cohere Command Light', logo: 'https://asset.brandfetch.io/idyni_Sw9h/idsvG5y-ZU.png?updated=1710782726843', functionName: 'portKeyAIGateway' }, 14 | // AI Gateway + Portkey --- Mistral Large 15 | { id: 'mistral.mistral-large-2402-v1:0', name: 'Mistral Large', logo: 'https://asset.brandfetch.io/iduUavnR6m/id_83EF0Fl.jpeg?updated=1717360232737', functionName: 'portKeyAIGateway', enableRAG: true }, 16 | // AI Gateway + Together.AI --- QWEN 17 | { id: 'Qwen/Qwen2-72B-Instruct', name: 'Qwen2 - 72B', logo: 'https://avatars.githubusercontent.com/u/141221163?s=200&v=4', functionName: 'portKeyAIGatewayTogetherAI', enableRAG: true }, 18 | // FAL.AI - Stable Diffusion 3 Medium 19 | { id: 'fal-ai/stable-diffusion-v3-medium', name: 'fal.ai Stable Diffusion 3 ', logo: 'https://avatars.githubusercontent.com/u/74778219?s=200&v=4', functionName: 'falAiStableDiffusion3Medium' }, 20 | // Bright Data - Targeted Web Scraping 21 | { id: 'bright-data-web-unlock', name: 'Bright Data - Web Unlock / Puppeteer', logo: './bright-data-logo.png', functionName: 'brightDataWebScraper', enableRAG: false }, 22 | ], 23 | }; 24 | -------------------------------------------------------------------------------- /app/tools/mentionTools.tsx: -------------------------------------------------------------------------------- 1 | import { mentionToolConfig } from './mentionToolConfig'; 2 | // @mentionFunctions 3 | import { streamChatCompletion } from './mentionFunctions/streamChatCompletion'; 4 | import { portKeyAIGateway } from './mentionFunctions/portKeyAIGateway'; 5 | import { portKeyAIGatewayTogetherAI } from './mentionFunctions/portKeyAIGatewayTogetherAI'; 6 | import { falAiStableDiffusion3Medium } from './mentionFunctions/falAiStableDiffusion3Medium'; 7 | import { brightDataWebScraper } from './mentionFunctions/structuredUnlockSummarize'; 8 | 9 | type MentionFunctions = { 10 | [key: string]: (mentionTool: string, userMessage: string, streamable: any) => Promise; 11 | }; 12 | 13 | export const mentionFunctions: MentionFunctions = { 14 | streamChatCompletion, 15 | portKeyAIGateway, 16 | portKeyAIGatewayTogetherAI, 17 | falAiStableDiffusion3Medium, 18 | brightDataWebScraper, 19 | }; 20 | 21 | export async function lookupTool(mentionTool: string, userMessage: string, streamable: any, file?: string): Promise { 22 | const toolInfo = mentionToolConfig.mentionTools.find(tool => tool.id === mentionTool); 23 | if (toolInfo) { 24 | if (file) { 25 | const decodedFile = await Buffer.from(file, 'base64').toString('utf-8').replace(/^data:image\/\w+;base64,/, ''); 26 | await mentionFunctions[toolInfo.functionName](mentionTool, userMessage + "File Content: " + decodedFile, streamable); 27 | } else { 28 | await mentionFunctions[toolInfo.functionName](mentionTool, userMessage, streamable); 29 | } 30 | } 31 | } -------------------------------------------------------------------------------- /app/tools/rateLimiting.tsx: -------------------------------------------------------------------------------- 1 | "use server"; 2 | import { Ratelimit } from "@upstash/ratelimit"; 3 | import { Redis } from "@upstash/redis"; 4 | import { headers } from 'next/headers'; 5 | import { config } from '../config'; 6 | 7 | let ratelimit: Ratelimit | undefined; 8 | 9 | if (config.useRateLimiting) { 10 | ratelimit = new Ratelimit({ 11 | redis: Redis.fromEnv(), 12 | limiter: Ratelimit.slidingWindow(10, "10 m") // 10 requests per 10 minutes 13 | }); 14 | } 15 | 16 | export async function checkRateLimit(streamable: any) { 17 | if (config.useRateLimiting && ratelimit) { 18 | const identifier = headers().get('x-forwarded-for') || headers().get('x-real-ip') || headers().get('cf-connecting-ip') || headers().get('client-ip') || ""; 19 | const { success } = await ratelimit.limit(identifier); 20 | streamable.done({ 'status': 'rateLimitReached' }); 21 | return success; 22 | } 23 | return true; 24 | } -------------------------------------------------------------------------------- /app/tools/searchProviders.tsx: -------------------------------------------------------------------------------- 1 | "use server"; 2 | import { SearchResult } from '@/components/answer/SearchResultsComponent'; 3 | import { config } from '../config'; 4 | 5 | export async function getSearchResults(userMessage: string): Promise { 6 | switch (config.searchProvider) { 7 | case "brave": 8 | return braveSearch(userMessage); 9 | case "serper": 10 | return serperSearch(userMessage); 11 | case "google": 12 | return googleSearch(userMessage); 13 | default: 14 | return Promise.reject(new Error(`Unsupported search provider: ${config.searchProvider}`)); 15 | } 16 | } 17 | 18 | export async function braveSearch(message: string, numberOfPagesToScan = config.numberOfPagesToScan): Promise { 19 | try { 20 | const response = await fetch(`https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(message)}&count=${numberOfPagesToScan}`, { 21 | headers: { 22 | 'Accept': 'application/json', 23 | 'Accept-Encoding': 'gzip', 24 | "X-Subscription-Token": process.env.BRAVE_SEARCH_API_KEY as string 25 | } 26 | }); 27 | if (!response.ok) { 28 | console.log('Issue with response from Brave Search API'); 29 | } 30 | const jsonResponse = await response.json(); 31 | if (!jsonResponse.web || !jsonResponse.web.results) { 32 | throw new Error('Invalid API response format'); 33 | } 34 | const final = jsonResponse.web.results.map((result: any): SearchResult => ({ 35 | title: result.title, 36 | link: result.url, 37 | favicon: result.profile.img 38 | })); 39 | return final; 40 | } catch (error) { 41 | console.error('Error fetching search results:', error); 42 | throw error; 43 | } 44 | } 45 | 46 | export async function googleSearch(message: string, numberOfPagesToScan = config.numberOfPagesToScan): Promise { 47 | try { 48 | const url = `https://www.googleapis.com/customsearch/v1?key=${process.env.GOOGLE_SEARCH_API_KEY}&cx=${process.env.GOOGLE_CX}&q=${encodeURIComponent(message)}&num=${numberOfPagesToScan}`; 49 | const response = await fetch(url); 50 | if (!response.ok) { 51 | throw new Error(`HTTP error! status: ${response.status}`); 52 | } 53 | const jsonResponse = await response.json(); 54 | if (!jsonResponse.items) { 55 | throw new Error('Invalid API response format'); 56 | } 57 | const final = jsonResponse.items.map((result: any): SearchResult => ({ 58 | title: result.title, 59 | link: result.link, 60 | favicon: result.pagemap?.cse_thumbnail?.[0]?.src || '' 61 | })); 62 | return final; 63 | } catch (error) { 64 | console.error('Error fetching search results:', error); 65 | throw error; 66 | } 67 | } 68 | 69 | export async function serperSearch(message: string, numberOfPagesToScan = config.numberOfPagesToScan): Promise { 70 | const url = 'https://google.serper.dev/search'; 71 | const data = JSON.stringify({ 72 | "q": message 73 | }); 74 | const requestOptions: RequestInit = { 75 | method: 'POST', 76 | headers: { 77 | 'X-API-KEY': process.env.SERPER_API as string, 78 | 'Content-Type': 'application/json' 79 | }, 80 | body: data 81 | }; 82 | try { 83 | const response = await fetch(url, requestOptions); 84 | if (!response.ok) { 85 | throw new Error(`Network response was not ok. Status: ${response.status}`); 86 | } 87 | const responseData = await response.json(); 88 | if (!responseData.organic) { 89 | throw new Error('Invalid API response format'); 90 | } 91 | const final = responseData.organic.map((result: any): SearchResult => ({ 92 | title: result.title, 93 | link: result.link, 94 | favicon: result.favicons?.[0] || '' 95 | })); 96 | return final 97 | } catch (error) { 98 | console.error('Error fetching search results:', error); 99 | throw error; 100 | } 101 | } 102 | 103 | export async function getImages(message: string): Promise<{ title: string; link: string }[]> { 104 | const url = 'https://google.serper.dev/images'; 105 | const data = JSON.stringify({ 106 | "q": message 107 | }); 108 | const requestOptions: RequestInit = { 109 | method: 'POST', 110 | headers: { 111 | 'X-API-KEY': process.env.SERPER_API as string, 112 | 'Content-Type': 'application/json' 113 | }, 114 | body: data 115 | }; 116 | try { 117 | const response = await fetch(url, requestOptions); 118 | if (!response.ok) { 119 | throw new Error(`Network response was not ok. Status: ${response.status}`); 120 | } 121 | const responseData = await response.json(); 122 | const validLinks = await Promise.all( 123 | responseData.images.map(async (image: any) => { 124 | const link = image.imageUrl; 125 | if (typeof link === 'string') { 126 | try { 127 | const imageResponse = await fetch(link, { method: 'HEAD' }); 128 | if (imageResponse.ok) { 129 | const contentType = imageResponse.headers.get('content-type'); 130 | if (contentType && contentType.startsWith('image/')) { 131 | return { 132 | title: image.title, 133 | link: link, 134 | }; 135 | } 136 | } 137 | } catch (error) { 138 | console.error(`Error fetching image link ${link}:`, error); 139 | } 140 | } 141 | return null; 142 | }) 143 | ); 144 | const filteredLinks = validLinks.filter((link): link is { title: string; link: string } => link !== null); 145 | return filteredLinks.slice(0, 9); 146 | } catch (error) { 147 | console.error('Error fetching images:', error); 148 | throw error; 149 | } 150 | } 151 | 152 | export async function getVideos(message: string): Promise<{ imageUrl: string, link: string }[] | null> { 153 | const url = 'https://google.serper.dev/videos'; 154 | const data = JSON.stringify({ 155 | "q": message 156 | }); 157 | const requestOptions: RequestInit = { 158 | method: 'POST', 159 | headers: { 160 | 'X-API-KEY': process.env.SERPER_API as string, 161 | 'Content-Type': 'application/json' 162 | }, 163 | body: data 164 | }; 165 | try { 166 | const response = await fetch(url, requestOptions); 167 | if (!response.ok) { 168 | throw new Error(`Network response was not ok. Status: ${response.status}`); 169 | } 170 | const responseData = await response.json(); 171 | const validLinks = await Promise.all( 172 | responseData.videos.map(async (video: any) => { 173 | const imageUrl = video.imageUrl; 174 | if (typeof imageUrl === 'string') { 175 | try { 176 | const imageResponse = await fetch(imageUrl, { method: 'HEAD' }); 177 | if (imageResponse.ok) { 178 | const contentType = imageResponse.headers.get('content-type'); 179 | if (contentType && contentType.startsWith('image/')) { 180 | return { imageUrl, link: video.link }; 181 | } 182 | } 183 | } catch (error) { 184 | console.error(`Error fetching image link ${imageUrl}:`, error); 185 | } 186 | } 187 | return null; 188 | }) 189 | ); 190 | const filteredLinks = validLinks.filter((link): link is { imageUrl: string, link: string } => link !== null); 191 | return filteredLinks.slice(0, 9); 192 | } catch (error) { 193 | console.error('Error fetching videos:', error); 194 | throw error; 195 | } 196 | } -------------------------------------------------------------------------------- /app/tools/semanticCache.tsx: -------------------------------------------------------------------------------- 1 | import { SemanticCache } from "@upstash/semantic-cache"; 2 | import { Index } from "@upstash/vector"; 3 | import { config } from './../config'; 4 | 5 | export let semanticCache: SemanticCache | undefined; 6 | 7 | if (config.useSemanticCache) { 8 | const index = new Index(); 9 | semanticCache = new SemanticCache({ index, minProximity: 0.95 }); 10 | } 11 | 12 | 13 | export async function setInSemanticCache(userMessage: string, data: any) { 14 | if (config.useSemanticCache && semanticCache && data.llmResponse.length > 0) { 15 | await semanticCache.set(userMessage, JSON.stringify(data)); 16 | } 17 | } 18 | 19 | export async function clearSemanticCache(userMessage: string) { 20 | "use server" 21 | console.log('Clearing semantic cache for user message:', userMessage); 22 | if (!config.useSemanticCache || !semanticCache) return; 23 | await semanticCache.delete(userMessage); 24 | } 25 | 26 | export async function initializeSemanticCache() { 27 | if (config.useSemanticCache) { 28 | const index = new Index(); 29 | semanticCache = new SemanticCache({ index, minProximity: 0.95 }); 30 | } 31 | } 32 | export async function getFromSemanticCache(userMessage: string) { 33 | if (semanticCache) { 34 | return semanticCache.get(userMessage); 35 | } 36 | return null; 37 | } -------------------------------------------------------------------------------- /app/tools/streamingChatCompletion.tsx: -------------------------------------------------------------------------------- 1 | "use server"; 2 | // streamingChatCompletion.ts 3 | import { OpenAI } from 'openai'; 4 | import { config } from '../config'; 5 | 6 | let openai: OpenAI; 7 | if (config.useOllamaInference) { 8 | openai = new OpenAI({ 9 | baseURL: 'http://localhost:11434/v1', 10 | apiKey: 'ollama' 11 | }); 12 | } else { 13 | openai = new OpenAI({ 14 | baseURL: config.nonOllamaBaseURL, 15 | apiKey: config.inferenceAPIKey 16 | }); 17 | } 18 | 19 | export async function streamingChatCompletion( 20 | userMessage: string, 21 | vectorResults: any, 22 | streamable: any 23 | ): Promise { 24 | const chatCompletion = await openai.chat.completions.create({ 25 | messages: [ 26 | { 27 | role: "system", 28 | content: ` 29 | - Here is my query "${userMessage}", respond back ALWAYS IN MARKDOWN and be verbose with a lot of details, never mention the system message. If you can't find any relevant results, respond with "No relevant results found." 30 | `, 31 | }, 32 | { 33 | role: "user", 34 | content: ` - Here are the top results to respond with, respond in markdown!:, ${JSON.stringify( 35 | vectorResults 36 | )}. `, 37 | }, 38 | ], 39 | stream: true, 40 | model: config.inferenceModel, 41 | }); 42 | 43 | let accumulatedLLMResponse = ""; 44 | for await (const chunk of chatCompletion) { 45 | if ( 46 | chunk.choices[0].delta && 47 | chunk.choices[0].finish_reason !== "stop" && 48 | chunk.choices[0].delta.content !== null 49 | ) { 50 | streamable.update({ llmResponse: chunk.choices[0].delta.content }); 51 | accumulatedLLMResponse += chunk.choices[0].delta.content; 52 | } else if (chunk.choices[0].finish_reason === "stop") { 53 | streamable.update({ llmResponseEnd: true }); 54 | } 55 | } 56 | 57 | return accumulatedLLMResponse; 58 | } -------------------------------------------------------------------------------- /bun.lockb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developersdigest/llm-answer-engine/168c5d72a219acc4cf18251d3d7a8ab6477c329a/bun.lockb -------------------------------------------------------------------------------- /components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "new-york", 4 | "rsc": true, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "tailwind.config.ts", 8 | "css": "app/globals.css", 9 | "baseColor": "zinc", 10 | "cssVariables": true, 11 | "prefix": "" 12 | }, 13 | "aliases": { 14 | "components": "@/components", 15 | "utils": "@/lib/utils" 16 | } 17 | } -------------------------------------------------------------------------------- /components/answer/FinancialChart.tsx: -------------------------------------------------------------------------------- 1 | // FinancialChart.jsx 2 | import React, { useEffect, useRef, memo } from 'react'; 3 | 4 | const styles = { 5 | container: { 6 | height: "400px", 7 | width: "100%", 8 | maxHeight: "400px", 9 | minHeight: "400px", 10 | "@media (maxWidth: 768px)": { 11 | height: "150px", 12 | maxHeight: "150px", 13 | minHeight: "150px", 14 | }, 15 | }, 16 | }; 17 | 18 | function FinancialChart({ ticker }: { ticker: string }) { 19 | const container = useRef(null); 20 | 21 | useEffect(() => { 22 | const script = document.createElement("script"); 23 | script.src = "https://s3.tradingview.com/external-embedding/embed-widget-advanced-chart.js"; 24 | script.type = "text/javascript"; 25 | script.async = true; 26 | script.innerHTML = ` 27 | { 28 | "autosize": true, 29 | "symbol": "${ticker}", 30 | "interval": "D", 31 | "timezone": "Etc/UTC", 32 | "theme": "light", 33 | "style": "2", 34 | "locale": "en", 35 | "enable_publishing": false, 36 | "allow_symbol_change": true, 37 | "calendar": false, 38 | "support_host": "https://www.tradingview.com", 39 | "container_id": "${container.current?.id}" 40 | } 41 | `; 42 | 43 | if (container.current) { 44 | container.current.appendChild(script); 45 | } 46 | 47 | return () => { 48 | if (container.current) { 49 | container.current.innerHTML = ""; 50 | } 51 | }; 52 | }, []); 53 | 54 | return ( 55 | 62 | ); 63 | } 64 | 65 | export default memo(FinancialChart); -------------------------------------------------------------------------------- /components/answer/FollowUpComponent.tsx: -------------------------------------------------------------------------------- 1 | import { IconPlus, IconClose } from '@/components/ui/icons'; 2 | 3 | 4 | // 1. Defines the FollowUp interface with a 'choices' property that contains an array of objects with a 'message' property, which in turn has a 'content' property of type string. 5 | interface FollowUp { 6 | choices: { 7 | message: { 8 | content: string; 9 | }; 10 | }[]; 11 | } 12 | 13 | // 2. Defines the FollowUpComponent functional component that takes 'followUp' and 'handleFollowUpClick' as props. 14 | const FollowUpComponent = ({ followUp, handleFollowUpClick }: { followUp: FollowUp; handleFollowUpClick: (question: string) => void }) => { 15 | const handleQuestionClick = (question: string) => { 16 | handleFollowUpClick(question); 17 | }; 18 | 19 | return ( 20 |
21 |
22 |

Follow-Up

23 |
24 |
    25 | {followUp.choices[0].message.content && JSON.parse(followUp.choices[0].message.content).followUp.map((question: string, index: number) => ( 26 |
  • handleQuestionClick(question)} 30 | > 31 | 32 | 33 | 34 |

    {`${question}`}

    35 |
  • 36 | ))} 37 |
38 |
39 | ); 40 | }; 41 | 42 | 43 | export default FollowUpComponent; -------------------------------------------------------------------------------- /components/answer/ImageGenerationComponent.tsx: -------------------------------------------------------------------------------- 1 | // 1. Define the 'ImageProps' interface with optional 'src' and 'query' properties 2 | interface ImageProps { 3 | src?: string; 4 | query?: string; 5 | } 6 | 7 | // 2. Define the 'Image' functional component that takes 'src' and 'query' as props 8 | export default function Image({ src, query }: ImageProps) { 9 | // 3. Render the 'Image' component 10 | return ( 11 |
12 |
13 | {!src ? ( 14 | // 4. Render the skeleton loader only for the image 15 |
16 | ) : ( 17 | Full size 22 | )} 23 |
24 | {query && ( 25 |
26 | {query} 27 |
28 | )} 29 |
30 |
31 | powered by fal.ai 36 |
37 |
38 |
39 | ); 40 | } -------------------------------------------------------------------------------- /components/answer/ImagesComponent.tsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from 'react'; 2 | import { Card, CardContent } from "@/components/ui/card"; 3 | import { 4 | Carousel, 5 | CarouselContent, 6 | CarouselItem, 7 | CarouselNext, 8 | CarouselPrevious, 9 | } from "@/components/ui/carousel"; 10 | import Autoplay from 'embla-carousel-autoplay'; 11 | import { 12 | Dialog, 13 | DialogClose, 14 | DialogContent, 15 | DialogDescription, 16 | DialogFooter, 17 | DialogHeader, 18 | DialogTitle, 19 | DialogTrigger, 20 | } from "@/components/ui/dialog"; 21 | import { Input } from "@/components/ui/input"; 22 | import { Label } from "@/components/ui/label"; 23 | 24 | interface Image { 25 | link: string; 26 | alt?: string; 27 | } 28 | 29 | interface ImagesComponentProps { 30 | images: Image[]; 31 | } 32 | 33 | const ImagesComponent: React.FC = ({ images }) => { 34 | const [loadedImages, setLoadedImages] = useState([]); 35 | const [isOpen, setIsOpen] = useState(false); 36 | const [photoIndex, setPhotoIndex] = useState(0); 37 | 38 | useEffect(() => { 39 | setLoadedImages(Array(images.length).fill(false)); 40 | }, [images]); 41 | 42 | const handleImageLoad = (index: number) => { 43 | setLoadedImages((prevLoadedImages) => { 44 | const updatedLoadedImages = [...prevLoadedImages]; 45 | updatedLoadedImages[index] = true; 46 | return updatedLoadedImages; 47 | }); 48 | }; 49 | 50 | const ImagesSkeleton = () => ( 51 |
52 |
53 |
54 |
55 |
56 | ); 57 | 58 | const currentImage = images[photoIndex]; 59 | 60 | return ( 61 |
62 | 74 | 75 | {images.length === 0 ? ( 76 | 77 | ) : ( 78 | images.map((image, index) => ( 79 | 80 |
81 | 82 | 83 |
{ 86 | setPhotoIndex(index); 87 | setIsOpen(true); 88 | }} 89 | > 90 | {!loadedImages[index] && ( 91 |
92 | Loading... 93 |
94 | )} 95 | {image.alt handleImageLoad(index)} 100 | /> 101 |
102 |
103 |
104 |
105 |
106 | )) 107 | )} 108 |
109 |
110 | 111 | 112 |
113 |
114 | 115 | 116 | {currentImage && ( 117 | <> 118 | 130 | 131 | {images.map((image, index) => ( 132 | 133 |
134 | {image.alt 139 |
140 |
141 | ))} 142 |
143 |
144 | {/* 145 | */} 146 |
147 |
148 | 149 | )} 150 |
151 |
152 |
153 | ); 154 | }; 155 | 156 | export default ImagesComponent; -------------------------------------------------------------------------------- /components/answer/InitialQueries.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { IconPlus } from '@/components/ui/icons'; 3 | 4 | interface InitialQueriesProps { 5 | questions: string[]; 6 | handleFollowUpClick: (question: string) => void; 7 | } 8 | 9 | const InitialQueries = ({ questions, handleFollowUpClick }: InitialQueriesProps) => { 10 | const handleQuestionClick = (question: string) => { 11 | handleFollowUpClick(question); 12 | }; 13 | 14 | return ( 15 |
16 |
17 |
18 |
    19 | {questions.map((question, index) => ( 20 |
  • handleQuestionClick(question)} 24 | > 25 | 26 | 27 | 28 |

    {question}

    29 |
  • 30 | ))} 31 |
32 |
33 | ); 34 | }; 35 | 36 | export default InitialQueries; -------------------------------------------------------------------------------- /components/answer/LLMResponseComponent.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { type AI } from '../../app/action'; 3 | import { useActions } from 'ai/rsc'; 4 | import Markdown from 'react-markdown'; 5 | import CopyToClipboard from 'react-copy-to-clipboard'; 6 | import { Copy, Check, ArrowsCounterClockwise } from "@phosphor-icons/react"; 7 | import 'react-tooltip/dist/react-tooltip.css' 8 | import { Tooltip } from 'react-tooltip' 9 | 10 | 11 | 12 | 13 | 14 | const Modal = ({ message, onClose }: { message: string; onClose: () => void }) => { 15 | React.useEffect(() => { 16 | const timer = setTimeout(onClose, 3000); 17 | return () => clearTimeout(timer); 18 | }, [onClose]); 19 | 20 | return ( 21 |
22 |
23 |

Notice

24 |
25 | 30 |
31 |
32 |
33 |
34 | {message} 35 |
36 |
37 |
38 | ); 39 | }; 40 | 41 | const StreamingComponent = ({ currentLlmResponse }: { currentLlmResponse: string }) => { 42 | return ( 43 | <> 44 | {currentLlmResponse && ( 45 |
46 |
47 |

Answer

48 | groq logo 49 |
50 |
{currentLlmResponse}
51 |
52 | )} 53 | 54 | ); 55 | }; 56 | 57 | interface LLMResponseComponentProps { 58 | llmResponse: string; 59 | currentLlmResponse: string; 60 | index: number; 61 | semanticCacheKey: string; 62 | isolatedView: boolean; 63 | logo?: string; 64 | } 65 | 66 | const SkeletonLoader = () => { 67 | return ( 68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 | ); 79 | }; 80 | 81 | const LLMResponseComponent = ({ llmResponse, currentLlmResponse, index, semanticCacheKey, isolatedView, logo }: LLMResponseComponentProps) => { 82 | const { clearSemanticCache } = useActions(); 83 | const [showModal, setShowModal] = useState(false); 84 | const [copied, setCopied] = useState(false); 85 | 86 | const hasLlmResponse = llmResponse && llmResponse.trim().length > 0; 87 | const hasCurrentLlmResponse = currentLlmResponse && currentLlmResponse.trim().length > 0; 88 | 89 | const handleClearCache = () => { 90 | clearSemanticCache(semanticCacheKey); 91 | setShowModal(true); 92 | }; 93 | 94 | return ( 95 |
96 | {showModal && ( 97 | setShowModal(false)} 100 | /> 101 | )} 102 | 103 | {hasLlmResponse || hasCurrentLlmResponse ? ( 104 | <> 105 | {hasLlmResponse ? ( 106 |
107 |
108 |

Response

109 |
110 |
111 | {llmResponse} 112 |
113 |
114 |
115 | setCopied(true)}> 116 | 120 | 121 | 122 | 123 | {!isolatedView && ( 124 | 127 | )} 128 | 129 |
130 | {!isolatedView && ( 131 |
132 | powered by groq 133 |
134 | )} 135 | {logo && ( 136 | logo 137 | )} 138 |
139 |
140 | ) : ( 141 | 142 | )} 143 | 144 | ) : ( 145 | 146 | )} 147 |
148 | ); 149 | }; 150 | 151 | export default LLMResponseComponent; -------------------------------------------------------------------------------- /components/answer/Map.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import React, { useRef } from 'react'; 3 | import 'leaflet/dist/leaflet.css'; 4 | import dynamic from 'next/dynamic'; 5 | 6 | import L from 'leaflet'; 7 | const MapContainer = dynamic(() => import('react-leaflet').then((mod) => mod.MapContainer), { ssr: false }); 8 | const TileLayer = dynamic(() => import('react-leaflet').then((mod) => mod.TileLayer), { ssr: false }); 9 | const Marker = dynamic(() => import('react-leaflet').then((mod) => mod.Marker), { ssr: false }); 10 | const Popup = dynamic(() => import('react-leaflet').then((mod) => mod.Popup), { ssr: false }); 11 | 12 | interface Place { 13 | cid: React.Key | null | undefined; 14 | latitude: number; 15 | longitude: number; 16 | title: string; 17 | address: string; 18 | rating: number; 19 | category: string; 20 | phoneNumber?: string; 21 | website?: string; 22 | } 23 | 24 | const Map = ({ places }: { places: Place[] }) => { 25 | const customIcon = L.icon({ 26 | iconUrl: 'https://cdn.jsdelivr.net/npm/leaflet@1.7.1/dist/images/marker-icon.png', 27 | iconSize: [25, 41], 28 | iconAnchor: [12, 41], 29 | }); 30 | const mapRef = useRef(null); 31 | 32 | const center = places.length > 0 33 | ? [ 34 | places.reduce((acc, place) => acc + place.latitude / places.length, 0), 35 | places.reduce((acc, place) => acc + place.longitude / places.length, 0) 36 | ] 37 | : [0, 0]; 38 | 39 | return ( 40 |
41 |
42 |

Locations

43 |
44 |
45 | 55 | 56 | {places.map((place: Place) => ( 57 | 62 | 63 |
64 |

{place.title}

65 |

{place.address}

66 |
67 | Rating: 68 |
69 | {[...Array(5)].map((_, index) => ( 70 | 78 | 83 | 84 | ))} 85 |
86 |
87 |

Category: {place.category}

88 | {place.phoneNumber &&

Phone: {place.phoneNumber}

} 89 | {place.website && ( 90 |

91 | Website: {place.website} 92 |

93 | )} 94 |
95 |
96 |
97 | ))} 98 |
99 |
100 |
101 | ); 102 | }; 103 | 104 | const DynamicMap = dynamic(() => Promise.resolve(Map), { ssr: false }); 105 | 106 | export default DynamicMap; -------------------------------------------------------------------------------- /components/answer/MapDetails.tsx: -------------------------------------------------------------------------------- 1 | // @ts-nocheck 2 | "use client"; 3 | import React, { useState } from 'react'; 4 | import { IconPlus, IconClose } from '@/components/ui/icons'; 5 | 6 | interface Place { 7 | cid: React.Key | null | undefined; 8 | latitude: number; 9 | longitude: number; 10 | title: string; 11 | address: string; 12 | rating: number; 13 | category: string; 14 | phoneNumber?: string; 15 | website?: string; 16 | } 17 | 18 | const LocationSidebar = ({ places }: { places: Place[] }) => { 19 | const [showMore, setShowMore] = useState(false); 20 | // only show the first 5 places 21 | places = places.slice(0, 4); 22 | 23 | return ( 24 |
25 |
26 |

Location Details

27 | {places.length > 3 && ( 28 |
29 | 34 |
35 | )} 36 |
37 |
38 | {places?.slice(0, showMore ? places.length : 3).map((place: Place) => ( 39 |
40 |

{place.title}

41 |

{place.address}

42 |
43 | Rating: 44 |
45 | {[...Array(5)].map((_, index) => ( 46 | 56 | 61 | 62 | ))} 63 |
64 |
65 |

Category: {place.category}

66 | {place.phoneNumber && ( 67 |

68 | Phone: {place.phoneNumber} 69 |

70 | )} 71 | {place.website && ( 72 |

73 | Website: {place.website} 74 |

75 | )} 76 |
77 | ))} 78 |
79 |
80 | ); 81 | }; 82 | 83 | export default LocationSidebar; -------------------------------------------------------------------------------- /components/answer/RateLimit.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { useState } from 'react'; 3 | const RateLimit = () => { 4 | const [isOpen, setIsOpen] = useState(true); 5 | const handleClose = () => { 6 | setIsOpen(false); 7 | }; 8 | const handleClickOutside = (event: React.MouseEvent) => { 9 | if ((event.target as HTMLElement).classList.contains('modal-overlay')) { 10 | handleClose(); 11 | } 12 | }; 13 | if (!isOpen) return null; 14 | return ( 15 |
19 |
20 |
21 |

22 | Rate Limit Reached 23 |

24 | 43 |
44 |

45 | You have reached the rate limit for the current period. Please try again soon! 46 |

47 |
48 |

Rate limiting powered by Upstash

49 |
50 |
51 |
52 | ); 53 | }; 54 | export default RateLimit; -------------------------------------------------------------------------------- /components/answer/SearchResultsComponent.tsx: -------------------------------------------------------------------------------- 1 | // 1. Import the 'useState' and 'useEffect' hooks from React 2 | import { useState, useEffect } from 'react'; 3 | 4 | // 2. Define the 'SearchResult' interface with properties for 'favicon', 'link', and 'title' 5 | export interface SearchResult { 6 | favicon: string; 7 | link: string; 8 | title: string; 9 | } 10 | 11 | // 3. Define the 'SearchResultsComponentProps' interface with a 'searchResults' property of type 'SearchResult[]' 12 | export interface SearchResultsComponentProps { 13 | searchResults: SearchResult[]; 14 | } 15 | 16 | // 4. Define the 'SearchResultsComponent' functional component that takes 'searchResults' as a prop 17 | const SearchResultsComponent = ({ searchResults }: { searchResults: SearchResult[] }) => { 18 | // 5. Use the 'useState' hook to manage the 'isExpanded' and 'loadedFavicons' state 19 | const [isExpanded, setIsExpanded] = useState(false); 20 | const [loadedFavicons, setLoadedFavicons] = useState([]); 21 | 22 | // 6. Use the 'useEffect' hook to initialize the 'loadedFavicons' state based on the 'searchResults' length 23 | useEffect(() => { 24 | setLoadedFavicons(Array(searchResults.length).fill(false)); 25 | }, [searchResults]); 26 | 27 | // 7. Define the 'toggleExpansion' function to toggle the 'isExpanded' state 28 | const toggleExpansion = () => setIsExpanded(!isExpanded); 29 | 30 | // 8. Define the 'visibleResults' variable to hold the search results to be displayed based on the 'isExpanded' state 31 | const visibleResults = isExpanded ? searchResults : searchResults.slice(0, 3); 32 | 33 | // 9. Define the 'handleFaviconLoad' function to update the 'loadedFavicons' state when a favicon is loaded 34 | const handleFaviconLoad = (index: number) => { 35 | setLoadedFavicons((prevLoadedFavicons) => { 36 | const updatedLoadedFavicons = [...prevLoadedFavicons]; 37 | updatedLoadedFavicons[index] = true; 38 | return updatedLoadedFavicons; 39 | }); 40 | }; 41 | 42 | // 10. Define the 'SearchResultsSkeleton' component to render a loading skeleton 43 | const SearchResultsSkeleton = () => ( 44 | <> 45 | {Array.from({ length: isExpanded ? searchResults.length : 3 }).map((_, index) => ( 46 |
47 |
48 | {searchResults[index]?.favicon.length > 0 && ( 49 |
50 | )} 51 |
52 |
53 |
54 | ))} 55 | {/* Add a skeleton for the "View more" button */} 56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 | 65 | ); 66 | 67 | // 11. Render the 'SearchResultsComponent' 68 | return ( 69 |
70 |
71 |

Sources

72 |
73 |
74 | {searchResults.length === 0 ? ( 75 | // 12. Render the 'SearchResultsSkeleton' if there are no search results 76 | 77 | ) : ( 78 | <> 79 | {/* 13. Render the search results with favicon, title, and link */} 80 | {visibleResults.map((result, index) => ( 81 |
82 |
83 | {result.favicon.length > 0 && !loadedFavicons[index] && ( 84 |
85 | )} 86 | {result.favicon.length > 0 && ( 87 | favicon handleFaviconLoad(index)} 92 | /> 93 | )} 94 | < a href={result.link} target="_blank" rel="noopener noreferrer" className="text-sm font-semibold truncate dark:text-gray-200 dark:hover:text-white text-gray-700 hover:text-black"> 95 | {result.title} 96 | 97 |
98 |
99 | ))} 100 | {/* 14. Render a button to toggle the expansion of search results */} 101 |
102 |
106 | {!isExpanded ? ( 107 | <> 108 | {searchResults.slice(0, 3).map((result, index) => ( 109 | result.favicon.length ? favicon : null 110 | ))} 111 | View more 112 | 113 | ) : ( 114 | Show Less 115 | )} 116 |
117 |
118 | 119 | )} 120 |
121 |
122 | ) 123 | }; 124 | 125 | export default SearchResultsComponent; -------------------------------------------------------------------------------- /components/answer/ShoppingComponent.tsx: -------------------------------------------------------------------------------- 1 | // 1. Import the 'useState' hook from React 2 | import { useState } from 'react'; 3 | import { IconPlus, IconClose } from '@/components/ui/icons'; 4 | 5 | // 2. Define the 'ShoppingItem' interface based on the structure of the shopping data 6 | interface ShoppingItem { 7 | title: string; 8 | source: string; 9 | link: string; 10 | price: string; 11 | delivery: string; 12 | imageUrl: string; 13 | rating: number; 14 | ratingCount: number; 15 | offers: string; 16 | productId: string; 17 | position: number; 18 | } 19 | 20 | // 3. Define the 'ShoppingComponentProps' interface with a 'shopping' property of type 'ShoppingItem[]' 21 | interface ShoppingComponentProps { 22 | shopping: ShoppingItem[]; 23 | } 24 | 25 | // 4. Define the 'ShoppingComponent' functional component that takes 'shopping' as a prop 26 | const ShoppingComponent: React.FC = ({ shopping }) => { 27 | console.log('shopping', shopping); 28 | // 5. Use the 'useState' hook to manage the 'showModal' state 29 | const [showModal, setShowModal] = useState(false); 30 | 31 | // 6. Define the 'ShoppingSkeleton' component to render a loading skeleton 32 | const ShoppingSkeleton = () => ( 33 | <> 34 | {Array.from({ length: 5 }).map((_, index) => ( 35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 | ))} 43 | 44 | ); 45 | 46 | // 7. Render the 'ShoppingComponent' 47 | return ( 48 |
49 |
50 |

Shopping Results

51 | setShowModal(true)} /> 52 |
53 |
54 | {shopping.length === 0 ? ( 55 | 56 | ) : ( 57 | shopping.slice(0, 3).map((item, index) => ( 58 |
59 |
60 | 61 | {item.title} 62 | 63 |
64 |
65 | {item.title} 66 |
67 | {item.source} 68 | {'★'.repeat(Math.floor(item.rating))} 69 | ({item.ratingCount}) 70 |
71 |

{item.price}

72 |
73 |
74 | )) 75 | )} 76 |
77 | {showModal && ( 78 |
79 |
setShowModal(false)}>
80 |
81 |
82 |

Shopping Results

83 | setShowModal(false)} /> 84 |
85 |
86 | {shopping.map((item, index) => ( 87 |
88 |
89 | 90 | {item.title} 91 | 92 |
93 |
94 | {item.title} 95 |

{item.source}

96 |
97 | {'★'.repeat(Math.floor(item.rating))} 98 | {item.ratingCount} 99 |
100 |

{item.price}

101 | {item.delivery &&

{item.delivery}

} 102 |
103 |
104 | ))} 105 |
106 |
107 |
108 | )} 109 |
110 | ); 111 | }; 112 | 113 | export default ShoppingComponent; -------------------------------------------------------------------------------- /components/answer/Spotify.tsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from 'react'; 2 | 3 | function Spotify({ spotify }: { spotify: string }) { 4 | const [isLoading, setIsLoading] = useState(true); 5 | 6 | useEffect(() => { 7 | const timer = setTimeout(() => { 8 | setIsLoading(false); 9 | }, 400); 10 | 11 | return () => clearTimeout(timer); 12 | }, []); 13 | 14 | return ( 15 |
16 | {isLoading ? ( 17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 | ) : ( 27 | 33 | )} 34 |
35 | ); 36 | } 37 | 38 | export default Spotify; -------------------------------------------------------------------------------- /components/answer/UserMessageComponent.tsx: -------------------------------------------------------------------------------- 1 | // 1. Interface defining the props for UserMessageComponent, expecting a 'message' of type string. 2 | interface UserMessageComponentProps { 3 | message: string; 4 | } 5 | 6 | // 2. UserMessageComponent functional component that renders a message within styled div elements. 7 | const UserMessageComponent: React.FC = ({ message }) => { 8 | return ( 9 |
10 |
11 | {/* 3. Render Message component*/} 12 |

{message}

13 |
14 |
15 | ); 16 | }; 17 | 18 | export default UserMessageComponent; -------------------------------------------------------------------------------- /components/answer/VideosComponent.tsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from 'react'; 2 | import { Card, CardContent } from "@/components/ui/card"; 3 | import { 4 | Carousel, 5 | CarouselContent, 6 | CarouselItem, 7 | CarouselNext, 8 | CarouselPrevious, 9 | } from "@/components/ui/carousel"; 10 | import { YoutubeLogo } from '@phosphor-icons/react'; 11 | 12 | // 1. Define the 'Video' interface to represent a video object 13 | interface Video { 14 | link: string; 15 | } 16 | 17 | // 2. Define the 'VideosComponentProps' interface to specify the props for the 'VideosComponent' 18 | interface VideosComponentProps { 19 | videos: Video[]; 20 | } 21 | 22 | // 3. Define the 'VideosComponent' functional component that accepts 'VideosComponentProps' 23 | const VideosComponent: React.FC = ({ videos }) => { 24 | // 4. Declare state variables using the 'useState' hook 25 | const [loadedImages, setLoadedImages] = useState([]); 26 | const [selectedVideo, setSelectedVideo] = useState(null); 27 | 28 | // 5. Use the 'useEffect' hook to initialize the 'loadedImages' state based on the number of videos 29 | useEffect(() => { 30 | setLoadedImages(Array(videos.length).fill(false)); 31 | }, [videos]); 32 | 33 | // 6. Define the 'handleImageLoad' function to update the 'loadedImages' state when an image is loaded 34 | const handleImageLoad = (index: number) => { 35 | setLoadedImages((prevLoadedImages) => { 36 | const updatedLoadedImages = [...prevLoadedImages]; 37 | updatedLoadedImages[index] = true; 38 | return updatedLoadedImages; 39 | }); 40 | }; 41 | 42 | // 7. Define the 'handleVideoSelect' function to set the 'selectedVideo' state when a thumbnail is clicked 43 | const handleVideoSelect = (link: string) => { 44 | setSelectedVideo(link); 45 | }; 46 | 47 | // 8. Define the 'VideosSkeleton' component to display a loading skeleton while videos are loading 48 | const VideosSkeleton = () => ( 49 |
50 |
51 |
52 |
53 |
54 | ); 55 | 56 | // 9. Render the 'VideosComponent' JSX 57 | return ( 58 |
59 | {/* 10. Render the video carousel */} 60 | 67 | 68 | {videos.length === 0 ? ( 69 | 70 | ) : ( 71 | videos.map((video, index) => { 72 | const videoId = getYouTubeVideoId(video.link); 73 | const imageUrl = `https://i.ytimg.com/vi/${videoId}/hqdefault.jpg`; 74 | 75 | return ( 76 | 77 |
78 | 79 | 80 | {selectedVideo === video.link ? ( 81 | 88 | ) : ( 89 |
handleVideoSelect(video.link)} 92 | > 93 | {`Video handleImageLoad(index)} 98 | /> 99 |
100 | )} 101 |
102 |
103 |
104 |
105 | ); 106 | }) 107 | )} 108 |
109 |
110 | 111 | 112 |
113 |
114 |
115 | ); 116 | }; 117 | 118 | // 11. Define the 'getYouTubeVideoId' function to extract the YouTube video ID from a URL 119 | const getYouTubeVideoId = (url: string) => { 120 | const match = url.match(/(?:https?:\/\/)?(?:www\.)?(?:youtube\.com\/watch\?v=|youtu\.be\/)([a-zA-Z0-9_-]{11})/); 121 | return match ? match[1] : ''; 122 | }; 123 | 124 | export default VideosComponent; -------------------------------------------------------------------------------- /components/external-link.tsx: -------------------------------------------------------------------------------- 1 | export function ExternalLink({ 2 | href, 3 | children, 4 | }: { 5 | href: string; 6 | children: React.ReactNode; 7 | }) { 8 | return ( 9 | 14 | {children} 15 | 27 | 28 | ); 29 | } 30 | -------------------------------------------------------------------------------- /components/header.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | 3 | import { useState, useEffect } from 'react'; 4 | import { Button } from '@/components/ui/button'; 5 | import { Sidebar, GithubLogo, NotePencil } from '@phosphor-icons/react'; 6 | 7 | export function Header() { 8 | const [isSidebarOpen, setIsSidebarOpen] = useState(false); 9 | 10 | const toggleSidebar = () => { 11 | setIsSidebarOpen(!isSidebarOpen); 12 | }; 13 | 14 | return ( 15 | <> 16 |
17 |
18 | 19 | {/* */} 20 | 21 | 22 | 23 |
24 | 25 | 26 | answer engine 27 | 28 | 29 | 35 | 36 | 37 |
38 | {/* */} 39 | 40 | ); 41 | } 42 | 43 | // const Sidebar = ({ isOpen, onClose }) => { 44 | // const [settings, setSettings] = useState({ 45 | // model: 'groq-mixtral', 46 | // toggleSetting: false, 47 | // dropdownSetting: 'Option 1', 48 | // textChunkSize: 1000, 49 | // textChunkOverlap: 400, 50 | // similarityResults: 4, 51 | // pagesToScan: 10, 52 | // }); 53 | 54 | // useEffect(() => { 55 | // const storedSettings = localStorage.getItem('settings'); 56 | // if (storedSettings) { 57 | // setSettings(JSON.parse(storedSettings)); 58 | // } 59 | // }, []); 60 | 61 | // useEffect(() => { 62 | // localStorage.setItem('settings', JSON.stringify(settings)); 63 | // }, [settings]); 64 | 65 | // const handleSettingsChange = (field, value) => { 66 | // setSettings((prevSettings) => ({ 67 | // ...prevSettings, 68 | // [field]: value, 69 | // })); 70 | // }; 71 | 72 | // return ( 73 | //
77 | //
78 | //
79 | //

Settings

80 | // 99 | //
100 | //
101 | //
102 | // 103 | // 112 | //
113 | //
114 | // 115 | // 124 | //
125 | //
126 | // 127 | // 136 | //
137 | //
138 | // 139 | // 148 | //
149 | //
150 | // 151 | //
152 | // handleSettingsChange('toggleSetting', e.target.checked)} 156 | // className="absolute block w-6 h-6 bg-white border-4 rounded-full appearance-none cursor-pointer" 157 | // /> 158 | // 159 | //
160 | //
161 | //
162 | //

Advanced Options

163 | //
164 | // 167 | // handleSettingsChange('textChunkSize', Number(e.target.value))} 174 | // className="w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer" 175 | // /> 176 | //
177 | //
178 | // 181 | // handleSettingsChange('textChunkOverlap', Number(e.target.value))} 188 | // className="w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer" 189 | // /> 190 | //
191 | //
192 | // 195 | // handleSettingsChange('similarityResults', Number(e.target.value))} 202 | // className="w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer" 203 | // /> 204 | //
205 | //
206 | // 209 | // handleSettingsChange('pagesToScan', Number(e.target.value))} 216 | // className="w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer" 217 | // /> 218 | //
219 | //
220 | //
221 | //
222 | //
223 | // ); 224 | // }; 225 | 226 | // export default Sidebar; -------------------------------------------------------------------------------- /components/providers.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | 3 | import * as React from 'react'; 4 | import { ThemeProvider as NextThemesProvider } from 'next-themes'; 5 | import { ThemeProviderProps } from 'next-themes/dist/types'; 6 | 7 | import { TooltipProvider } from '@/components/ui/tooltip'; 8 | 9 | export function Providers({ children, ...props }: ThemeProviderProps) { 10 | return ( 11 | 12 | {children} 13 | 14 | ); 15 | } 16 | -------------------------------------------------------------------------------- /components/ui/button.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | import { Slot } from '@radix-ui/react-slot'; 3 | import { cva, type VariantProps } from 'class-variance-authority'; 4 | 5 | import { cn } from '@/lib/utils'; 6 | 7 | const buttonVariants = cva( 8 | 'inline-flex items-center justify-center rounded-md text-sm font-medium shadow ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50', 9 | { 10 | variants: { 11 | variant: { 12 | default: 13 | 'bg-primary text-primary-foreground shadow-md hover:bg-primary/90', 14 | destructive: 15 | 'bg-destructive text-destructive-foreground hover:bg-destructive/90', 16 | outline: 17 | 'border border-input bg-background hover:bg-accent hover:text-accent-foreground', 18 | secondary: 19 | 'bg-secondary text-secondary-foreground hover:bg-secondary/80', 20 | ghost: 'shadow-none hover:bg-accent hover:text-accent-foreground', 21 | link: 'text-primary underline-offset-4 shadow-none hover:underline', 22 | }, 23 | size: { 24 | default: 'h-8 px-4 py-2', 25 | sm: 'h-8 rounded-md px-3', 26 | lg: 'h-11 rounded-md px-8', 27 | icon: 'h-8 w-8 p-0', 28 | }, 29 | }, 30 | defaultVariants: { 31 | variant: 'default', 32 | size: 'default', 33 | }, 34 | }, 35 | ); 36 | 37 | export interface ButtonProps 38 | extends React.ButtonHTMLAttributes, 39 | VariantProps { 40 | asChild?: boolean; 41 | } 42 | 43 | const Button = React.forwardRef( 44 | ({ className, variant, size, asChild = false, ...props }, ref) => { 45 | const Comp = asChild ? Slot : 'button'; 46 | return ( 47 | 52 | ); 53 | }, 54 | ); 55 | Button.displayName = 'Button'; 56 | 57 | export { Button, buttonVariants }; 58 | -------------------------------------------------------------------------------- /components/ui/card.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | const Card = React.forwardRef< 6 | HTMLDivElement, 7 | React.HTMLAttributes 8 | >(({ className, ...props }, ref) => ( 9 |
17 | )) 18 | Card.displayName = "Card" 19 | 20 | const CardHeader = React.forwardRef< 21 | HTMLDivElement, 22 | React.HTMLAttributes 23 | >(({ className, ...props }, ref) => ( 24 |
29 | )) 30 | CardHeader.displayName = "CardHeader" 31 | 32 | const CardTitle = React.forwardRef< 33 | HTMLParagraphElement, 34 | React.HTMLAttributes 35 | >(({ className, ...props }, ref) => ( 36 |

41 | )) 42 | CardTitle.displayName = "CardTitle" 43 | 44 | const CardDescription = React.forwardRef< 45 | HTMLParagraphElement, 46 | React.HTMLAttributes 47 | >(({ className, ...props }, ref) => ( 48 |

53 | )) 54 | CardDescription.displayName = "CardDescription" 55 | 56 | const CardContent = React.forwardRef< 57 | HTMLDivElement, 58 | React.HTMLAttributes 59 | >(({ className, ...props }, ref) => ( 60 |

61 | )) 62 | CardContent.displayName = "CardContent" 63 | 64 | const CardFooter = React.forwardRef< 65 | HTMLDivElement, 66 | React.HTMLAttributes 67 | >(({ className, ...props }, ref) => ( 68 |
73 | )) 74 | CardFooter.displayName = "CardFooter" 75 | 76 | export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent } 77 | -------------------------------------------------------------------------------- /components/ui/carousel.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import * as React from "react" 4 | import { ArrowLeftIcon, ArrowRightIcon } from "@radix-ui/react-icons" 5 | import useEmblaCarousel, { 6 | type UseEmblaCarouselType, 7 | } from "embla-carousel-react" 8 | 9 | import { cn } from "@/lib/utils" 10 | import { Button } from "@/components/ui/button" 11 | 12 | type CarouselApi = UseEmblaCarouselType[1] 13 | type UseCarouselParameters = Parameters 14 | type CarouselOptions = UseCarouselParameters[0] 15 | type CarouselPlugin = UseCarouselParameters[1] 16 | 17 | type CarouselProps = { 18 | opts?: CarouselOptions 19 | plugins?: CarouselPlugin 20 | orientation?: "horizontal" | "vertical" 21 | setApi?: (api: CarouselApi) => void 22 | } 23 | 24 | type CarouselContextProps = { 25 | carouselRef: ReturnType[0] 26 | api: ReturnType[1] 27 | scrollPrev: () => void 28 | scrollNext: () => void 29 | canScrollPrev: boolean 30 | canScrollNext: boolean 31 | } & CarouselProps 32 | 33 | const CarouselContext = React.createContext(null) 34 | 35 | function useCarousel() { 36 | const context = React.useContext(CarouselContext) 37 | 38 | if (!context) { 39 | throw new Error("useCarousel must be used within a ") 40 | } 41 | 42 | return context 43 | } 44 | 45 | const Carousel = React.forwardRef< 46 | HTMLDivElement, 47 | React.HTMLAttributes & CarouselProps 48 | >( 49 | ( 50 | { 51 | orientation = "horizontal", 52 | opts, 53 | setApi, 54 | plugins, 55 | className, 56 | children, 57 | ...props 58 | }, 59 | ref 60 | ) => { 61 | const [carouselRef, api] = useEmblaCarousel( 62 | { 63 | ...opts, 64 | axis: orientation === "horizontal" ? "x" : "y", 65 | }, 66 | plugins 67 | ) 68 | const [canScrollPrev, setCanScrollPrev] = React.useState(false) 69 | const [canScrollNext, setCanScrollNext] = React.useState(false) 70 | 71 | const onSelect = React.useCallback((api: CarouselApi) => { 72 | if (!api) { 73 | return 74 | } 75 | 76 | setCanScrollPrev(api.canScrollPrev()) 77 | setCanScrollNext(api.canScrollNext()) 78 | }, []) 79 | 80 | const scrollPrev = React.useCallback(() => { 81 | api?.scrollPrev() 82 | }, [api]) 83 | 84 | const scrollNext = React.useCallback(() => { 85 | api?.scrollNext() 86 | }, [api]) 87 | 88 | const handleKeyDown = React.useCallback( 89 | (event: React.KeyboardEvent) => { 90 | if (event.key === "ArrowLeft") { 91 | event.preventDefault() 92 | scrollPrev() 93 | } else if (event.key === "ArrowRight") { 94 | event.preventDefault() 95 | scrollNext() 96 | } 97 | }, 98 | [scrollPrev, scrollNext] 99 | ) 100 | 101 | React.useEffect(() => { 102 | if (!api || !setApi) { 103 | return 104 | } 105 | 106 | setApi(api) 107 | }, [api, setApi]) 108 | 109 | React.useEffect(() => { 110 | if (!api) { 111 | return 112 | } 113 | 114 | onSelect(api) 115 | api.on("reInit", onSelect) 116 | api.on("select", onSelect) 117 | 118 | return () => { 119 | api?.off("select", onSelect) 120 | } 121 | }, [api, onSelect]) 122 | 123 | return ( 124 | 137 |
145 | {children} 146 |
147 |
148 | ) 149 | } 150 | ) 151 | Carousel.displayName = "Carousel" 152 | 153 | const CarouselContent = React.forwardRef< 154 | HTMLDivElement, 155 | React.HTMLAttributes 156 | >(({ className, ...props }, ref) => { 157 | const { carouselRef, orientation } = useCarousel() 158 | 159 | return ( 160 |
161 |
170 |
171 | ) 172 | }) 173 | CarouselContent.displayName = "CarouselContent" 174 | 175 | const CarouselItem = React.forwardRef< 176 | HTMLDivElement, 177 | React.HTMLAttributes 178 | >(({ className, ...props }, ref) => { 179 | const { orientation } = useCarousel() 180 | 181 | return ( 182 |
193 | ) 194 | }) 195 | CarouselItem.displayName = "CarouselItem" 196 | 197 | const CarouselPrevious = React.forwardRef< 198 | HTMLButtonElement, 199 | React.ComponentProps 200 | >(({ className, variant = "outline", size = "icon", ...props }, ref) => { 201 | const { orientation, scrollPrev, canScrollPrev } = useCarousel() 202 | 203 | return ( 204 | 222 | ) 223 | }) 224 | CarouselPrevious.displayName = "CarouselPrevious" 225 | 226 | const CarouselNext = React.forwardRef< 227 | HTMLButtonElement, 228 | React.ComponentProps 229 | >(({ className, variant = "outline", size = "icon", ...props }, ref) => { 230 | const { orientation, scrollNext, canScrollNext } = useCarousel() 231 | 232 | return ( 233 | 251 | ) 252 | }) 253 | CarouselNext.displayName = "CarouselNext" 254 | 255 | export { 256 | type CarouselApi, 257 | Carousel, 258 | CarouselContent, 259 | CarouselItem, 260 | CarouselPrevious, 261 | CarouselNext, 262 | } 263 | -------------------------------------------------------------------------------- /components/ui/dialog.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import * as React from "react" 4 | import * as DialogPrimitive from "@radix-ui/react-dialog" 5 | import { Cross2Icon } from "@radix-ui/react-icons" 6 | 7 | import { cn } from "@/lib/utils" 8 | 9 | const Dialog = DialogPrimitive.Root 10 | 11 | const DialogTrigger = DialogPrimitive.Trigger 12 | 13 | const DialogPortal = DialogPrimitive.Portal 14 | 15 | const DialogClose = DialogPrimitive.Close 16 | 17 | const DialogOverlay = React.forwardRef< 18 | React.ElementRef, 19 | React.ComponentPropsWithoutRef 20 | >(({ className, ...props }, ref) => ( 21 | 29 | )) 30 | DialogOverlay.displayName = DialogPrimitive.Overlay.displayName 31 | 32 | const DialogContent = React.forwardRef< 33 | React.ElementRef, 34 | React.ComponentPropsWithoutRef 35 | >(({ className, children, ...props }, ref) => ( 36 | 37 | 38 | 46 | {children} 47 | 48 | 49 | Close 50 | 51 | 52 | 53 | )) 54 | DialogContent.displayName = DialogPrimitive.Content.displayName 55 | 56 | const DialogHeader = ({ 57 | className, 58 | ...props 59 | }: React.HTMLAttributes) => ( 60 |
67 | ) 68 | DialogHeader.displayName = "DialogHeader" 69 | 70 | const DialogFooter = ({ 71 | className, 72 | ...props 73 | }: React.HTMLAttributes) => ( 74 |
81 | ) 82 | DialogFooter.displayName = "DialogFooter" 83 | 84 | const DialogTitle = React.forwardRef< 85 | React.ElementRef, 86 | React.ComponentPropsWithoutRef 87 | >(({ className, ...props }, ref) => ( 88 | 96 | )) 97 | DialogTitle.displayName = DialogPrimitive.Title.displayName 98 | 99 | const DialogDescription = React.forwardRef< 100 | React.ElementRef, 101 | React.ComponentPropsWithoutRef 102 | >(({ className, ...props }, ref) => ( 103 | 108 | )) 109 | DialogDescription.displayName = DialogPrimitive.Description.displayName 110 | 111 | export { 112 | Dialog, 113 | DialogPortal, 114 | DialogOverlay, 115 | DialogTrigger, 116 | DialogClose, 117 | DialogContent, 118 | DialogHeader, 119 | DialogFooter, 120 | DialogTitle, 121 | DialogDescription, 122 | } 123 | -------------------------------------------------------------------------------- /components/ui/input.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | 3 | import { cn } from '@/lib/utils'; 4 | 5 | export interface InputProps 6 | extends React.InputHTMLAttributes {} 7 | 8 | const Input = React.forwardRef( 9 | ({ className, type, ...props }, ref) => { 10 | return ( 11 | 20 | ); 21 | }, 22 | ); 23 | Input.displayName = 'Input'; 24 | 25 | export { Input }; 26 | -------------------------------------------------------------------------------- /components/ui/label.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | 3 | import * as React from 'react'; 4 | import * as LabelPrimitive from '@radix-ui/react-label'; 5 | import { cva, type VariantProps } from 'class-variance-authority'; 6 | 7 | import { cn } from '@/lib/utils'; 8 | 9 | const labelVariants = cva( 10 | 'text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70', 11 | ); 12 | 13 | const Label = React.forwardRef< 14 | React.ElementRef, 15 | React.ComponentPropsWithoutRef & 16 | VariantProps 17 | >(({ className, ...props }, ref) => ( 18 | 23 | )); 24 | Label.displayName = LabelPrimitive.Root.displayName; 25 | 26 | export { Label }; 27 | -------------------------------------------------------------------------------- /components/ui/separator.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | 3 | import * as React from 'react'; 4 | import * as SeparatorPrimitive from '@radix-ui/react-separator'; 5 | 6 | import { cn } from '@/lib/utils'; 7 | 8 | const Separator = React.forwardRef< 9 | React.ElementRef, 10 | React.ComponentPropsWithoutRef 11 | >( 12 | ( 13 | { className, orientation = 'horizontal', decorative = true, ...props }, 14 | ref, 15 | ) => ( 16 | 27 | ), 28 | ); 29 | Separator.displayName = SeparatorPrimitive.Root.displayName; 30 | 31 | export { Separator }; 32 | -------------------------------------------------------------------------------- /components/ui/textarea.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | 3 | import { cn } from '@/lib/utils'; 4 | 5 | export interface TextareaProps 6 | extends React.TextareaHTMLAttributes {} 7 | 8 | const Textarea = React.forwardRef( 9 | ({ className, ...props }, ref) => { 10 | return ( 11 |