├── .env.example ├── .eslintrc.json ├── .vs ├── slnx.sqlite ├── chat-llm │ └── v17 │ │ └── .wsuo └── VSWorkspaceState.json ├── app ├── favicon.ico ├── globals.css ├── layout.tsx ├── api │ └── chat │ │ └── route.ts └── page.tsx ├── public ├── clean.png ├── patient.png ├── vercel.svg ├── ai.svg ├── next.svg └── user.svg ├── next.config.js ├── postcss.config.js ├── .gitignore ├── tailwind.config.ts ├── tsconfig.json ├── package.json ├── styles └── root.module.css └── README.md /.env.example: -------------------------------------------------------------------------------- 1 | GOOGLE_API_KEY= -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /.vs/slnx.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kanugurajesh/Chat-LLM/HEAD/.vs/slnx.sqlite -------------------------------------------------------------------------------- /app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kanugurajesh/Chat-LLM/HEAD/app/favicon.ico -------------------------------------------------------------------------------- /public/clean.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kanugurajesh/Chat-LLM/HEAD/public/clean.png -------------------------------------------------------------------------------- /public/patient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kanugurajesh/Chat-LLM/HEAD/public/patient.png -------------------------------------------------------------------------------- /.vs/chat-llm/v17/.wsuo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kanugurajesh/Chat-LLM/HEAD/.vs/chat-llm/v17/.wsuo -------------------------------------------------------------------------------- /.vs/VSWorkspaceState.json: -------------------------------------------------------------------------------- 1 | { 2 | "ExpandedNodes": [ 3 | "" 4 | ], 5 | "PreviewInSolutionExplorer": false 6 | } -------------------------------------------------------------------------------- /next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = {} 3 | 4 | module.exports = nextConfig 5 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | body { 6 | box-sizing: border-box; 7 | } -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import type { Metadata } from 'next' 2 | import { Inter } from 'next/font/google' 3 | import './globals.css' 4 | 5 | const inter = Inter({ subsets: ['latin'] }) 6 | 7 | export const metadata: Metadata = { 8 | title: 'Chat LLM', 9 | description: 'Chat with Gemini Pro LLM', 10 | } 11 | 12 | export default function RootLayout({ 13 | children, 14 | }: { 15 | children: React.ReactNode 16 | }) { 17 | return ( 18 | 19 | {children} 20 | 21 | ) 22 | } 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | .yarn/install-state.gz 8 | 9 | # testing 10 | /coverage 11 | 12 | # next.js 13 | /.next/ 14 | /out/ 15 | 16 | # production 17 | /build 18 | 19 | # misc 20 | .DS_Store 21 | *.pem 22 | 23 | # debug 24 | npm-debug.log* 25 | yarn-debug.log* 26 | yarn-error.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | -------------------------------------------------------------------------------- /tailwind.config.ts: -------------------------------------------------------------------------------- 1 | import type { Config } from 'tailwindcss' 2 | 3 | const config: Config = { 4 | content: [ 5 | './pages/**/*.{js,ts,jsx,tsx,mdx}', 6 | './components/**/*.{js,ts,jsx,tsx,mdx}', 7 | './app/**/*.{js,ts,jsx,tsx,mdx}', 8 | ], 9 | theme: { 10 | extend: { 11 | backgroundImage: { 12 | 'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))', 13 | 'gradient-conic': 14 | 'conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))', 15 | }, 16 | }, 17 | }, 18 | plugins: [], 19 | } 20 | export default config 21 | -------------------------------------------------------------------------------- /public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": ["dom", "dom.iterable", "esnext"], 5 | "allowJs": true, 6 | "skipLibCheck": true, 7 | "strict": true, 8 | "noEmit": true, 9 | "esModuleInterop": true, 10 | "module": "esnext", 11 | "moduleResolution": "bundler", 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "jsx": "preserve", 15 | "incremental": true, 16 | "plugins": [ 17 | { 18 | "name": "next" 19 | } 20 | ], 21 | "paths": { 22 | "@/*": ["./*"] 23 | } 24 | }, 25 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], 26 | "exclude": ["node_modules"] 27 | } 28 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chat-llm", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@google/generative-ai": "^0.1.3", 13 | "next": "14.0.4", 14 | "react": "^18", 15 | "react-dom": "^18", 16 | "react-hot-toast": "^2.4.1", 17 | "react-markdown": "^9.0.1", 18 | "react-spinners": "^0.13.8" 19 | }, 20 | "devDependencies": { 21 | "@types/node": "^20", 22 | "@types/react": "^18", 23 | "@types/react-dom": "^18", 24 | "autoprefixer": "^10.0.1", 25 | "eslint": "^8", 26 | "eslint-config-next": "14.0.4", 27 | "postcss": "^8", 28 | "tailwindcss": "^3.3.0", 29 | "typescript": "^5" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /styles/root.module.css: -------------------------------------------------------------------------------- 1 | .user { 2 | float: left; 3 | max-width: 50%; 4 | background-color: white; 5 | border: 1px solid #ccc; 6 | } 7 | 8 | .model { 9 | float: right; 10 | max-width: 50%; 11 | background-color: white; 12 | border: 1px solid #ccc; 13 | } 14 | 15 | .container { 16 | display: flex; 17 | flex-direction: column; 18 | align-items: center; 19 | height: 100vh; 20 | width: 100vw; 21 | gap: 10px; 22 | padding: 20px; 23 | } 24 | 25 | .chatHistory { 26 | height: 90vh; 27 | width: 100%; 28 | padding: 20px 10px; 29 | overflow-y: scroll; 30 | } 31 | 32 | .inputContainer { 33 | display: flex; 34 | flex-direction: row; 35 | padding: 20px 10px; 36 | height: 10vh; 37 | width: 100vw; 38 | } 39 | 40 | .button { 41 | background-color: black; 42 | color: white; 43 | padding: 10px; 44 | border-radius: 5px; 45 | cursor: pointer; 46 | font-weight: 600; 47 | border: none; 48 | } 49 | 50 | .input { 51 | width: 280px; 52 | padding: 10px; 53 | border-radius: 5px; 54 | border: 1px solid #ccc; 55 | outline: none; 56 | padding-right: 40px; 57 | } 58 | 59 | .icons { 60 | display: flex; 61 | flex-direction: row; 62 | gap: 10px; 63 | } -------------------------------------------------------------------------------- /public/ai.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Icon_24px_MLEngine_Color 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /app/api/chat/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server"; 2 | 3 | import { GoogleGenerativeAI } from "@google/generative-ai"; 4 | 5 | const genAI = new GoogleGenerativeAI(process.env.GOOGLE_API_KEY as string); 6 | 7 | const model = genAI.getGenerativeModel({ model: "gemini-pro"}); 8 | 9 | const chat = model.startChat({ 10 | history: [ 11 | // you can write any conversation history here 12 | // { 13 | // role: "user", 14 | // parts: "Hello, can you answer my questions.", 15 | // }, 16 | // { 17 | // role: "model", 18 | // parts: "Sure, what do you want to know?", 19 | // }, 20 | ], 21 | generationConfig: { 22 | maxOutputTokens: 1000, 23 | }, 24 | }); 25 | 26 | export async function POST(req: NextRequest) { 27 | 28 | // get prompt field from the request body 29 | const reqBody = await req.json(); 30 | 31 | // get the userPrompt from the request body 32 | const { userPrompt } = reqBody; 33 | 34 | const result = await chat.sendMessage(userPrompt); 35 | 36 | const response = await result.response; 37 | 38 | const text = response.text(); 39 | 40 | if (text === "") { 41 | return NextResponse.json({ 42 | text: "Sorry, I don't understand.", 43 | }); 44 | } 45 | 46 | return NextResponse.json({ 47 | text, 48 | }); 49 | 50 | } -------------------------------------------------------------------------------- /public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /public/user.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | #### 🌟star this repo if you like it 2 |
3 | 4 |
5 | 6 | # Chat-LLM 7 | 8 | Chat-LLM is a user-friendly application designed to simplify chatting with Gemini Pro. This application offers a seamless experience, and its flexibility and power can be enhanced by training the model in MakerSuite. 9 | 10 | ## Problem Statement 11 | 12 | Gemini Pro users often face challenges when engaging in conversations. Chat-LLM aims to address these issues by providing a user-friendly interface for chatting. Additionally, it offers the unique feature of model training through MakerSuite, making it a flexible and powerful solution. 13 | 14 | ## Features 15 | 16 | - **User-Friendly Interface**: Chat-LLM is designed with a focus on simplicity and ease of use, ensuring that even beginners can navigate the application effortlessly. 17 | 18 | - **Gemini Pro Integration**: Seamlessly connect with Gemini Pro to enhance your chatting experience and unlock advanced features. 19 | 20 | - **MakerSuite Training**: Take advantage of the powerful MakerSuite to train the model according to your preferences. Customize the application to suit your unique needs. 21 | 22 | - **Flexibility and Power**: Chat-LLM is not just a chat application; it's a dynamic tool that adapts to your requirements. With MakerSuite training, you have the power to shape the model to better serve your communication style. 23 | 24 | ## Getting Started 25 | 26 | ### Installation 27 | 28 | To get started with Chat-LLM, follow these simple steps: 29 | 30 | 1. Clone the repository: 31 | ```bash 32 | # Fork the application to your github 33 | git clone https://github.com/yourusername/Chat-LLM.git 34 | ``` 35 | 2. Change the directory: 36 | ```bash 37 | cd Chat-LLM 38 | ``` 39 | 3. Install the dependencies 40 | ```bash 41 | npm install 42 | ``` 43 | 4. Add secret keys 44 | ```bash 45 | # Then add all the secret keys in .env.local 46 | cp .env.example .env.local 47 | ``` 48 | 5. Run the application 49 | ```bash 50 | npm run dev 51 | ``` 52 | 53 | ## Screeshots 54 | 55 | ![Screenshot 2024-01-15 094714](https://github.com/kanugurajesh/Chat-LLM/assets/77529419/b49932f3-f16b-49e8-853e-70f2860990a7) 56 |
57 | ![Screenshot 2024-01-15 094721](https://github.com/kanugurajesh/Chat-LLM/assets/77529419/c07603a4-d214-4793-b0b6-4cd412cd7558) 58 |
59 | ![Screenshot 2024-01-15 094735](https://github.com/kanugurajesh/Chat-LLM/assets/77529419/d9087f0c-46df-413f-b34e-746f71d34f48) 60 | 61 | ## 🔗 Links 62 | [![portfolio](https://img.shields.io/badge/my_portfolio-000?style=for-the-badge&logo=ko-fi&logoColor=white)](https://rajeshportfolio.me/) 63 | [![linkedin](https://img.shields.io/badge/linkedin-0A66C2?style=for-the-badge&logo=linkedin&logoColor=white)](https://www.linkedin.com/in/rajesh-kanugu-aba8a3254/) 64 | [![twitter](https://img.shields.io/badge/twitter-1DA1F2?style=for-the-badge&logo=twitter&logoColor=white)](https://twitter.com/exploringengin1) 65 | [![github](https://img.shields.io/badge/my_portfolio-000?style=for-the-badge&logo=github&logoColor=white)](https://github.com/kanugurajesh) 66 | 67 | ## Tech Stack 68 | 69 | - Next.js 70 | - Typescript 71 | - Tailwind css 72 | - Gemini Pro 73 | - Css 74 | 75 | ## Contributing 76 | 77 | I want to add more features and Contributions are always welcome! 78 | 79 | See [contributing.md](https://github.com/rajesh604/SnapScale/blob/main/contributing.md) for ways to get started. 80 | 81 | Please adhere to this project's [code of conduct](https://github.com/rajesh604/SnapScale/blob/main/code_of_conduct.md). 82 | 83 | ## Authors 84 | 85 | - [@kanugurajesh](https://github.com/kanugurajesh) 86 | 87 | ## Support 88 | 89 | For support, you can buy me a coffee 90 | 91 | Buy Me A Coffee 92 | 93 | [![MIT License](https://img.shields.io/badge/License-MIT-green.svg)](https://choosealicense.com/licenses/mit/) 94 | -------------------------------------------------------------------------------- /app/page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useState, useEffect, useRef } from "react" 4 | import styles from "@/styles/root.module.css" 5 | import Image from "next/image" 6 | import Link from "next/link" 7 | import Markdown from "react-markdown"; 8 | import toast, { Toaster } from "react-hot-toast"; 9 | import { BeatLoader } from "react-spinners" 10 | 11 | export default function Home() { 12 | 13 | const [prompt, setPrompt] = useState(""); 14 | const [loading, setLoading] = useState(false); 15 | 16 | const chatContainerRef = useRef(null); 17 | 18 | const getDate = () => { 19 | const date = new Date() 20 | return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}` 21 | } 22 | 23 | const [chatHistory, setChatHistory] = useState([ 24 | { id: 1, message: 'Hello', sender: 'model', time: getDate() }, 25 | ]) 26 | 27 | const onPromptChange = (e: any) => { 28 | localStorage.setItem('prompt', e.target.value) 29 | setPrompt(e.target.value) 30 | } 31 | 32 | const RequestGemini = async (prompt: any) => { 33 | 34 | const response = await fetch('/api/chat', { 35 | method: 'POST', 36 | body: JSON.stringify({ userPrompt: prompt }), 37 | headers: { 38 | 'Content-Type': 'application/json' 39 | } 40 | }) 41 | 42 | const data = await response.json() 43 | 44 | setChatHistory((prevChatHistory) => [ 45 | ...prevChatHistory, 46 | { id: prevChatHistory.length + 1, message: data.text, sender: 'model', time: getDate() } 47 | ]); 48 | 49 | setLoading(false) 50 | 51 | setChatHistory((prevChatHistory) => { 52 | const updatedChatHistory = [...prevChatHistory]; 53 | localStorage.setItem('chatHistory', JSON.stringify(updatedChatHistory)); 54 | return updatedChatHistory; 55 | }); 56 | 57 | } 58 | 59 | const deleteChatHistory = () => { 60 | localStorage.removeItem('chatHistory') 61 | setChatHistory([]) 62 | } 63 | 64 | // Scroll to the bottom whenever chatHistory changes 65 | useEffect(() => { 66 | // @ts-ignore 67 | chatContainerRef.current.scrollTop = chatContainerRef.current.scrollHeight; 68 | }, [chatHistory]); 69 | 70 | 71 | // handle click 72 | const onHandleClick = async () => { 73 | setLoading(true) 74 | if (prompt == "") { 75 | toast.error("Please enter the prompt") 76 | return 77 | } 78 | 79 | setChatHistory((prevChatHistory) => [ 80 | ...prevChatHistory, 81 | { id: prevChatHistory.length + 1, message: prompt, sender: 'user', time: getDate() } 82 | ]); 83 | 84 | await RequestGemini(prompt) 85 | setPrompt("") 86 | 87 | } 88 | 89 | const handleSubmit = (e: any) => { 90 | if (e.key === "Enter") { 91 | onHandleClick() 92 | } 93 | } 94 | 95 | // show welcome message 96 | useEffect(() => { 97 | const visited = localStorage.getItem('visited') 98 | if (!visited) { 99 | toast.success("Welcome to Gemini") 100 | setTimeout(() => { 101 | toast.success("Get started by entering the prompt") 102 | }, 2500) 103 | localStorage.setItem('visited', "true") 104 | } 105 | }) 106 | 107 | useEffect(() => { 108 | const chatHistory = localStorage.getItem('chatHistory') 109 | if (chatHistory) { 110 | setChatHistory(() => JSON.parse(chatHistory)) 111 | console.log(chatHistory) 112 | } 113 | }, []) 114 | 115 | return ( 116 |
117 | 118 | 119 |
120 | logo 121 |

Chat-LLM

122 |
123 | 124 |
125 | {chatHistory.length > 0 && chatHistory.map((chat) => ( 126 |
127 |
128 |
129 | logo 130 |
131 | 132 | {chat.message} 133 | 134 |
135 |
136 | ))} 137 |
138 |
139 |
140 | 141 | logo 142 |
143 | {loading ? ( 144 | 147 | ) : ( 148 | 149 | )} 150 |
151 |
152 | ) 153 | } 154 | --------------------------------------------------------------------------------