├── .eslintrc.json
├── .example.env
├── .gitignore
├── .prettierrc
├── README.md
├── app
├── api
│ ├── getChat
│ │ └── route.ts
│ ├── getParsedSources
│ │ └── route.ts
│ └── getSources
│ │ └── route.ts
├── favicon.ico
├── globals.css
├── layout.tsx
└── page.tsx
├── components
├── Chat.tsx
├── FinalInputArea.tsx
├── Footer.tsx
├── Header.tsx
├── Hero.tsx
├── InitialInputArea.tsx
├── Sources.tsx
├── TypeAnimation.tsx
└── logo.tsx
├── next.config.mjs
├── package-lock.json
├── package.json
├── postcss.config.mjs
├── public
├── arrow-up.svg
├── basketball-new.svg
├── desktop-screenshot.png
├── finance.svg
├── github.svg
├── light-new.svg
├── new-bg.png
├── new-logo.svg
├── og-image.png
├── screenshot-mobile.png
├── similarTopics.svg
├── simple-logo.png
├── simple-logo.svg
├── togethercomputer.png
├── twitter.svg
├── up-arrow.svg
└── us.svg
├── tailwind.config.ts
├── tsconfig.json
└── utils
├── TogetherAIStream.ts
└── utils.ts
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals"
3 | }
4 |
--------------------------------------------------------------------------------
/.example.env:
--------------------------------------------------------------------------------
1 | # Note: You either need BING_SEARCH_API or SERPER_API_KEY, not both
2 | TOGETHER_API_KEY=
3 | BING_API_KEY=
4 | SERPER_API_KEY=
5 | HELICONE_API_KEY=
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 | .yarn/install-state.gz
8 |
9 | # testing
10 | /coverage
11 |
12 | # next.js
13 | /.next/
14 | /out/
15 |
16 | # production
17 | /build
18 |
19 | # misc
20 | .DS_Store
21 | *.pem
22 |
23 | # debug
24 | npm-debug.log*
25 | yarn-debug.log*
26 | yarn-error.log*
27 |
28 | # local env files
29 | .env*.local
30 | .env
31 |
32 | # vercel
33 | .vercel
34 |
35 | # typescript
36 | *.tsbuildinfo
37 | next-env.d.ts
38 | .env
39 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | { "plugins": ["prettier-plugin-tailwindcss"] }
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | Llama Tutor
4 |
5 |
6 |
7 | An open source AI personal tutor. Powered by Llama 3 70B & Together.ai
8 |
9 |
10 | ## Tech stack
11 |
12 | - Llama 3.1 70B from Meta for the LLM
13 | - Together AI for LLM inference
14 | - Next.js app router with Tailwind
15 | - Serper for the search API
16 | - Helicone for observability
17 | - Plausible for website analytics
18 |
19 | ## Cloning & running
20 |
21 | 1. Fork or clone the repo
22 | 2. Create an account at [Together AI](https://togetherai.link) for the LLM
23 | 3. Create an account at [SERP API](https://serper.dev/) or with Azure ([Bing Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api))
24 | 4. Create an account at [Helicone](https://www.helicone.ai/) for observability
25 | 5. Create a `.env` (use the `.example.env` for reference) and replace the API keys
26 | 6. Run `npm install` and `npm run dev` to install dependencies and run locally
27 |
28 | ## Future Tasks
29 |
30 | - [ ] Add a share & copy buttons that folks can click on after convos are generated
31 | - [ ] Add potential follow up questions + new chat at the end of chat page
32 | - [ ] Split the page into two pages and add back the footer
33 | - [ ] Move all my icons into their own typescript file (transform.tools)
34 | - [ ] Add a more detailed landing page with a nice section with the GitHub link
35 | - [ ] Add nice hamburger menu on mobile
36 | - [ ] Try out the generative UI stuff from Vercel
37 | - [ ] Add a nicer dropdown overall
38 |
--------------------------------------------------------------------------------
/app/api/getChat/route.ts:
--------------------------------------------------------------------------------
1 | import {
2 | TogetherAIStream,
3 | TogetherAIStreamPayload,
4 | } from "@/utils/TogetherAIStream";
5 |
6 | export async function POST(request: Request) {
7 | let { messages } = await request.json();
8 |
9 | console.log("messages", messages);
10 | try {
11 | console.log("[getChat] Fetching answer stream from Together API");
12 | const payload: TogetherAIStreamPayload = {
13 | model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
14 | messages,
15 | stream: true,
16 | };
17 | const stream = await TogetherAIStream(payload);
18 |
19 | return new Response(stream, {
20 | headers: new Headers({
21 | "Cache-Control": "no-cache",
22 | }),
23 | });
24 | } catch (e) {
25 | return new Response("Error. Answer stream failed.", { status: 202 });
26 | }
27 | }
28 |
29 | export const runtime = "edge";
30 |
--------------------------------------------------------------------------------
/app/api/getParsedSources/route.ts:
--------------------------------------------------------------------------------
1 | import { Readability } from "@mozilla/readability";
2 | import jsdom, { JSDOM } from "jsdom";
3 | import { cleanedText, fetchWithTimeout } from "@/utils/utils";
4 | import { NextResponse } from "next/server";
5 |
6 | export const maxDuration = 30;
7 |
8 | export async function POST(request: Request) {
9 | let { sources } = await request.json();
10 |
11 | console.log("[getAnswer] Fetching text from source URLS");
12 | let finalResults = await Promise.all(
13 | sources.map(async (result: any) => {
14 | try {
15 | // Fetch the source URL, or abort if it's been 3 seconds
16 | const response = await fetchWithTimeout(result.url);
17 | const html = await response.text();
18 | const virtualConsole = new jsdom.VirtualConsole();
19 | const dom = new JSDOM(html, { virtualConsole });
20 |
21 | const doc = dom.window.document;
22 | const parsed = new Readability(doc).parse();
23 | let parsedContent = parsed
24 | ? cleanedText(parsed.textContent)
25 | : "Nothing found";
26 |
27 | return {
28 | ...result,
29 | fullContent: parsedContent,
30 | };
31 | } catch (e) {
32 | console.log(`error parsing ${result.name}, error: ${e}`);
33 | return {
34 | ...result,
35 | fullContent: "not available",
36 | };
37 | }
38 | }),
39 | );
40 |
41 | return NextResponse.json(finalResults);
42 | }
43 |
--------------------------------------------------------------------------------
/app/api/getSources/route.ts:
--------------------------------------------------------------------------------
1 | import { NextResponse } from "next/server";
2 | import { z } from "zod";
3 |
4 | let excludedSites = ["youtube.com"];
5 | let searchEngine: "bing" | "serper" = "serper";
6 |
7 | export async function POST(request: Request) {
8 | let { question } = await request.json();
9 |
10 | const finalQuestion = `what is ${question}`;
11 |
12 | if (searchEngine === "bing") {
13 | const BING_API_KEY = process.env["BING_API_KEY"];
14 | if (!BING_API_KEY) {
15 | throw new Error("BING_API_KEY is required");
16 | }
17 |
18 | const params = new URLSearchParams({
19 | q: `${finalQuestion} ${excludedSites.map((site) => `-site:${site}`).join(" ")}`,
20 | mkt: "en-US",
21 | count: "6",
22 | safeSearch: "Strict",
23 | });
24 |
25 | const response = await fetch(
26 | `https://api.bing.microsoft.com/v7.0/search?${params}`,
27 | {
28 | method: "GET",
29 | headers: {
30 | "Ocp-Apim-Subscription-Key": BING_API_KEY,
31 | },
32 | },
33 | );
34 |
35 | const BingJSONSchema = z.object({
36 | webPages: z.object({
37 | value: z.array(z.object({ name: z.string(), url: z.string() })),
38 | }),
39 | });
40 |
41 | const rawJSON = await response.json();
42 | const data = BingJSONSchema.parse(rawJSON);
43 |
44 | let results = data.webPages.value.map((result) => ({
45 | name: result.name,
46 | url: result.url,
47 | }));
48 |
49 | return NextResponse.json(results);
50 | // TODO: Figure out a way to remove certain results like YT
51 | } else if (searchEngine === "serper") {
52 | const SERPER_API_KEY = process.env["SERPER_API_KEY"];
53 | if (!SERPER_API_KEY) {
54 | throw new Error("SERPER_API_KEY is required");
55 | }
56 |
57 | const response = await fetch("https://google.serper.dev/search", {
58 | method: "POST",
59 | headers: {
60 | "X-API-KEY": SERPER_API_KEY,
61 | "Content-Type": "application/json",
62 | },
63 | body: JSON.stringify({
64 | q: finalQuestion,
65 | num: 9,
66 | }),
67 | });
68 |
69 | const rawJSON = await response.json();
70 |
71 | const SerperJSONSchema = z.object({
72 | organic: z.array(z.object({ title: z.string(), link: z.string() })),
73 | });
74 |
75 | const data = SerperJSONSchema.parse(rawJSON);
76 |
77 | let results = data.organic.map((result) => ({
78 | name: result.title,
79 | url: result.link,
80 | }));
81 |
82 | return NextResponse.json(results);
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/app/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/app/favicon.ico
--------------------------------------------------------------------------------
/app/globals.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
5 | @layer utilities {
6 | .text-balance {
7 | text-wrap: balance;
8 | }
9 | /* Hide scrollbar for Chrome, Safari and Opera */
10 | .no-scrollbar::-webkit-scrollbar {
11 | display: none;
12 | }
13 | /* Hide scrollbar for IE, Edge and Firefox */
14 | .no-scrollbar {
15 | -ms-overflow-style: none; /* IE and Edge */
16 | scrollbar-width: none; /* Firefox */
17 | }
18 | .loader {
19 | text-align: left;
20 | display: flex;
21 | gap: 3px;
22 | }
23 |
24 | .loader span {
25 | display: inline-block;
26 | vertical-align: middle;
27 | width: 7px;
28 | height: 7px;
29 | /* background: #4b4b4b; */
30 | background: white;
31 | border-radius: 50%;
32 | animation: loader 0.6s infinite alternate;
33 | }
34 |
35 | .loader span:nth-of-type(2) {
36 | animation-delay: 0.2s;
37 | }
38 |
39 | .loader span:nth-of-type(3) {
40 | animation-delay: 0.6s;
41 | }
42 |
43 | @keyframes loader {
44 | 0% {
45 | opacity: 1;
46 | transform: scale(0.6);
47 | }
48 |
49 | 100% {
50 | opacity: 0.3;
51 | transform: scale(1);
52 | }
53 | }
54 | }
55 |
56 | body {
57 | margin: 0px !important;
58 | }
59 |
--------------------------------------------------------------------------------
/app/layout.tsx:
--------------------------------------------------------------------------------
1 | import type { Metadata } from "next";
2 | import { Montserrat } from "next/font/google";
3 | import PlausibleProvider from "next-plausible";
4 | import "./globals.css";
5 | import Image from "next/image";
6 | import bgImage from "../public/new-bg.png";
7 |
8 | const montserrat = Montserrat({ subsets: ["latin"] });
9 |
10 | let title = "Llama Tutor – AI Personal Tutor";
11 | let description = "Learn faster with our open source AI personal tutor";
12 | let url = "https://llamatutor.com/";
13 | let ogimage = "https://llamatutor.together.ai/og-image.png";
14 | let sitename = "llamatutor.com";
15 |
16 | export const metadata: Metadata = {
17 | metadataBase: new URL(url),
18 | title,
19 | description,
20 | icons: {
21 | icon: "/favicon.ico",
22 | },
23 | openGraph: {
24 | images: [ogimage],
25 | title,
26 | description,
27 | url: url,
28 | siteName: sitename,
29 | locale: "en_US",
30 | type: "website",
31 | },
32 | twitter: {
33 | card: "summary_large_image",
34 | images: [ogimage],
35 | title,
36 | description,
37 | },
38 | };
39 |
40 | export default function RootLayout({
41 | children,
42 | }: Readonly<{
43 | children: React.ReactNode;
44 | }>) {
45 | return (
46 |
47 |
48 |
49 |
50 |
51 |
54 |
59 | {children}
60 |
61 |
62 | );
63 | }
64 |
--------------------------------------------------------------------------------
/app/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import Footer from "@/components/Footer";
4 | import Header from "@/components/Header";
5 | import Hero from "@/components/Hero";
6 | import Sources from "@/components/Sources";
7 | import { useState } from "react";
8 | import {
9 | createParser,
10 | ParsedEvent,
11 | ReconnectInterval,
12 | } from "eventsource-parser";
13 | import { getSystemPrompt } from "@/utils/utils";
14 | import Chat from "@/components/Chat";
15 |
16 | export default function Home() {
17 | const [inputValue, setInputValue] = useState("");
18 | const [topic, setTopic] = useState("");
19 | const [showResult, setShowResult] = useState(false);
20 | const [sources, setSources] = useState<{ name: string; url: string }[]>([]);
21 | const [isLoadingSources, setIsLoadingSources] = useState(false);
22 | const [messages, setMessages] = useState<{ role: string; content: string }[]>(
23 | [],
24 | );
25 | const [loading, setLoading] = useState(false);
26 | const [ageGroup, setAgeGroup] = useState("Middle School");
27 |
28 | const handleInitialChat = async () => {
29 | setShowResult(true);
30 | setLoading(true);
31 | setTopic(inputValue);
32 | setInputValue("");
33 |
34 | await handleSourcesAndChat(inputValue);
35 |
36 | setLoading(false);
37 | };
38 |
39 | const handleChat = async (messages?: { role: string; content: string }[]) => {
40 | setLoading(true);
41 | const chatRes = await fetch("/api/getChat", {
42 | method: "POST",
43 | headers: {
44 | "Content-Type": "application/json",
45 | },
46 | body: JSON.stringify({ messages }),
47 | });
48 |
49 | if (!chatRes.ok) {
50 | throw new Error(chatRes.statusText);
51 | }
52 |
53 | // This data is a ReadableStream
54 | const data = chatRes.body;
55 | if (!data) {
56 | return;
57 | }
58 | let fullAnswer = "";
59 |
60 | const onParse = (event: ParsedEvent | ReconnectInterval) => {
61 | if (event.type === "event") {
62 | const data = event.data;
63 | try {
64 | const text = JSON.parse(data).text ?? "";
65 | fullAnswer += text;
66 | // Update messages with each chunk
67 | setMessages((prev) => {
68 | const lastMessage = prev[prev.length - 1];
69 | if (lastMessage.role === "assistant") {
70 | return [
71 | ...prev.slice(0, -1),
72 | { ...lastMessage, content: lastMessage.content + text },
73 | ];
74 | } else {
75 | return [...prev, { role: "assistant", content: text }];
76 | }
77 | });
78 | } catch (e) {
79 | console.error(e);
80 | }
81 | }
82 | };
83 |
84 | // https://web.dev/streams/#the-getreader-and-read-methods
85 | const reader = data.getReader();
86 | const decoder = new TextDecoder();
87 | const parser = createParser(onParse);
88 | let done = false;
89 |
90 | while (!done) {
91 | const { value, done: doneReading } = await reader.read();
92 | done = doneReading;
93 | const chunkValue = decoder.decode(value);
94 | parser.feed(chunkValue);
95 | }
96 | setLoading(false);
97 | };
98 |
99 | async function handleSourcesAndChat(question: string) {
100 | setIsLoadingSources(true);
101 | let sourcesResponse = await fetch("/api/getSources", {
102 | method: "POST",
103 | body: JSON.stringify({ question }),
104 | });
105 | let sources;
106 | if (sourcesResponse.ok) {
107 | sources = await sourcesResponse.json();
108 |
109 | setSources(sources);
110 | } else {
111 | setSources([]);
112 | }
113 | setIsLoadingSources(false);
114 |
115 | const parsedSourcesRes = await fetch("/api/getParsedSources", {
116 | method: "POST",
117 | body: JSON.stringify({ sources }),
118 | });
119 | let parsedSources;
120 | if (parsedSourcesRes.ok) {
121 | parsedSources = await parsedSourcesRes.json();
122 | }
123 |
124 | const initialMessage = [
125 | { role: "system", content: getSystemPrompt(parsedSources, ageGroup) },
126 | { role: "user", content: `${question}` },
127 | ];
128 | setMessages(initialMessage);
129 | await handleChat(initialMessage);
130 | }
131 |
132 | return (
133 | <>
134 |
135 |
136 |
139 | {showResult ? (
140 |
141 |
142 |
143 |
152 |
153 |
154 |
155 |
156 | ) : (
157 |
165 | )}
166 |
167 | {/* */}
168 | >
169 | );
170 | }
171 |
--------------------------------------------------------------------------------
/components/Chat.tsx:
--------------------------------------------------------------------------------
1 | import ReactMarkdown from "react-markdown";
2 | import FinalInputArea from "./FinalInputArea";
3 | import { useEffect, useRef, useState } from "react";
4 | import simpleLogo from "../public/simple-logo.png";
5 | import Image from "next/image";
6 |
7 | export default function Chat({
8 | messages,
9 | disabled,
10 | promptValue,
11 | setPromptValue,
12 | setMessages,
13 | handleChat,
14 | topic,
15 | }: {
16 | messages: { role: string; content: string }[];
17 | disabled: boolean;
18 | promptValue: string;
19 | setPromptValue: React.Dispatch>;
20 | setMessages: React.Dispatch<
21 | React.SetStateAction<{ role: string; content: string }[]>
22 | >;
23 | handleChat: () => void;
24 | topic: string;
25 | }) {
26 | const messagesEndRef = useRef(null);
27 | const scrollableContainerRef = useRef(null);
28 | const [didScrollToBottom, setDidScrollToBottom] = useState(true);
29 |
30 | function scrollToBottom() {
31 | messagesEndRef.current?.scrollIntoView({ behavior: "instant" });
32 | }
33 |
34 | useEffect(() => {
35 | if (didScrollToBottom) {
36 | scrollToBottom();
37 | }
38 | }, [didScrollToBottom, messages]);
39 |
40 | useEffect(() => {
41 | let el = scrollableContainerRef.current;
42 | if (!el) {
43 | return;
44 | }
45 |
46 | function handleScroll() {
47 | if (scrollableContainerRef.current) {
48 | const { scrollTop, scrollHeight, clientHeight } =
49 | scrollableContainerRef.current;
50 | setDidScrollToBottom(scrollTop + clientHeight >= scrollHeight);
51 | }
52 | }
53 |
54 | el.addEventListener("scroll", handleScroll);
55 |
56 | return () => {
57 | el.removeEventListener("scroll", handleScroll);
58 | };
59 | }, []);
60 |
61 | return (
62 |
63 |
64 |
65 | Topic:
66 | {topic}
67 |
68 |
72 | {messages.length > 2 ? (
73 |
74 | {messages.slice(2).map((message, index) =>
75 | message.role === "assistant" ? (
76 |
77 |
82 |
83 | {message.content}
84 |
85 |
86 | ) : (
87 |
91 | {message.content}
92 |
93 | ),
94 | )}
95 |
96 |
97 | ) : (
98 |
99 | {Array.from(Array(10).keys()).map((i) => (
100 |
105 | ))}
106 |
107 | )}
108 |
109 |
110 |
111 |
112 |
120 |
121 |
122 | );
123 | }
124 |
--------------------------------------------------------------------------------
/components/FinalInputArea.tsx:
--------------------------------------------------------------------------------
1 | import { FC, KeyboardEvent } from "react";
2 | import TypeAnimation from "./TypeAnimation";
3 | import Image from "next/image";
4 |
5 | type TInputAreaProps = {
6 | promptValue: string;
7 | setPromptValue: React.Dispatch>;
8 | disabled?: boolean;
9 | messages: { role: string; content: string }[];
10 | setMessages: React.Dispatch<
11 | React.SetStateAction<{ role: string; content: string }[]>
12 | >;
13 | handleChat: (messages?: { role: string; content: string }[]) => void;
14 | };
15 |
16 | const FinalInputArea: FC = ({
17 | promptValue,
18 | setPromptValue,
19 | disabled,
20 | messages,
21 | setMessages,
22 | handleChat,
23 | }) => {
24 | function onSubmit() {
25 | let latestMessages = [...messages, { role: "user", content: promptValue }];
26 | setPromptValue("");
27 | setMessages(latestMessages);
28 | handleChat(latestMessages);
29 | }
30 |
31 | const handleKeyDown = (e: KeyboardEvent) => {
32 | if (e.key === "Enter") {
33 | if (e.shiftKey) {
34 | return;
35 | } else {
36 | e.preventDefault();
37 | onSubmit();
38 | }
39 | }
40 | };
41 |
42 | return (
43 |
83 | );
84 | };
85 |
86 | export default FinalInputArea;
87 |
--------------------------------------------------------------------------------
/components/Footer.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 | import Link from "next/link";
3 |
4 | const Footer = () => {
5 | return (
6 |
7 |
8 |
15 |
16 |
17 |
18 |
25 |
26 |
27 | {" "}
34 |
35 |
36 |
37 | );
38 | };
39 |
40 | export default Footer;
41 |
--------------------------------------------------------------------------------
/components/Header.tsx:
--------------------------------------------------------------------------------
1 | import { Logo } from "./logo";
2 |
3 | const Header = () => {
4 | return (
5 |
10 | );
11 | };
12 |
13 | export default Header;
14 |
--------------------------------------------------------------------------------
/components/Hero.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 | import { FC } from "react";
3 | import desktopImg from "../public/desktop-screenshot.png";
4 | import mobileImg from "../public/screenshot-mobile.png";
5 | import InitialInputArea from "./InitialInputArea";
6 | import { suggestions } from "@/utils/utils";
7 |
8 | type THeroProps = {
9 | promptValue: string;
10 | setPromptValue: React.Dispatch>;
11 | handleChat: (messages?: { role: string; content: string }[]) => void;
12 | ageGroup: string;
13 | setAgeGroup: React.Dispatch>;
14 | handleInitialChat: () => void;
15 | };
16 |
17 | const Hero: FC = ({
18 | promptValue,
19 | setPromptValue,
20 | handleChat,
21 | ageGroup,
22 | setAgeGroup,
23 | handleInitialChat,
24 | }) => {
25 | const handleClickSuggestion = (value: string) => {
26 | setPromptValue(value);
27 | };
28 |
29 | return (
30 | <>
31 |
32 |
37 |
44 |
45 | Powered by Llama 3.1 and Together AI
46 |
47 |
48 |
49 | Your Personal{" "}
50 |
51 | Tutor
52 |
53 |
54 |
55 | Enter a topic you want to learn about along with the education level
56 | you want to be taught at and generate a personalized tutor tailored to
57 | you!
58 |
59 |
60 |
61 |
69 |
70 |
71 |
72 | {suggestions.map((item) => (
73 |
handleClickSuggestion(item?.name)}
76 | key={item.id}
77 | >
78 |
85 |
86 | {item.name}
87 |
88 |
89 | ))}
90 |
91 |
92 | Fully open source!{" "}
93 |
94 |
99 | Star it on github.
100 |
101 |
102 |
103 |
104 |
105 |
110 |
115 |
116 | >
117 | );
118 | };
119 |
120 | export default Hero;
121 |
--------------------------------------------------------------------------------
/components/InitialInputArea.tsx:
--------------------------------------------------------------------------------
1 | import { FC, KeyboardEvent } from "react";
2 | import TypeAnimation from "./TypeAnimation";
3 | import Image from "next/image";
4 |
5 | type TInputAreaProps = {
6 | promptValue: string;
7 | setPromptValue: React.Dispatch>;
8 | disabled?: boolean;
9 | handleChat: (messages?: { role: string; content: string }[]) => void;
10 | ageGroup: string;
11 | setAgeGroup: React.Dispatch>;
12 | handleInitialChat: () => void;
13 | };
14 |
15 | const InitialInputArea: FC = ({
16 | promptValue,
17 | setPromptValue,
18 | disabled,
19 | handleInitialChat,
20 | ageGroup,
21 | setAgeGroup,
22 | }) => {
23 | const handleKeyDown = (e: KeyboardEvent) => {
24 | if (e.key === "Enter") {
25 | if (e.shiftKey) {
26 | return;
27 | } else {
28 | e.preventDefault();
29 | handleInitialChat();
30 | }
31 | }
32 | };
33 |
34 | return (
35 | {
38 | e.preventDefault();
39 | handleInitialChat();
40 | }}
41 | >
42 |
43 |
setPromptValue(e.target.value)}
51 | rows={1}
52 | />
53 |
54 | setAgeGroup(e.target.value)}
60 | >
61 | Elementary School
62 | Middle School
63 | High School
64 | College
65 | Undergrad
66 | Graduate
67 |
68 |
69 |
70 |
75 | {disabled && (
76 |
77 |
78 |
79 | )}
80 |
81 |
89 | Search
90 |
91 |
92 | );
93 | };
94 |
95 | export default InitialInputArea;
96 |
--------------------------------------------------------------------------------
/components/Sources.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 |
3 | export default function Sources({
4 | sources,
5 | isLoading,
6 | }: {
7 | sources: { name: string; url: string }[];
8 | isLoading: boolean;
9 | }) {
10 | return (
11 |
12 |
13 |
14 | sources:{" "}
15 |
16 |
17 |
18 | {isLoading ? (
19 | <>
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 | >
30 | ) : sources.length > 0 ? (
31 | sources.map((source) => (
32 |
33 | ))
34 | ) : (
35 |
Could not fetch sources.
36 | )}
37 |
38 |
39 | );
40 | }
41 |
42 | const SourceCard = ({ source }: { source: { name: string; url: string } }) => {
43 | return (
44 |
67 | );
68 | };
69 |
--------------------------------------------------------------------------------
/components/TypeAnimation.tsx:
--------------------------------------------------------------------------------
1 | const TypeAnimation = () => {
2 | return (
3 |
4 |
5 |
6 |
7 |
8 | );
9 | };
10 |
11 | export default TypeAnimation;
12 |
--------------------------------------------------------------------------------
/next.config.mjs:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | images: {
4 | remotePatterns: [
5 | {
6 | hostname: "www.google.com",
7 | },
8 | ],
9 | },
10 | };
11 |
12 | export default nextConfig;
13 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "llama-tutor",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "dev": "next dev",
7 | "build": "next build",
8 | "start": "next start",
9 | "lint": "next lint"
10 | },
11 | "dependencies": {
12 | "@headlessui/react": "^2.1.2",
13 | "@mozilla/readability": "^0.5.0",
14 | "eventsource-parser": "^1.1.2",
15 | "jsdom": "^24.1.0",
16 | "llama3-tokenizer-js": "^1.1.3",
17 | "next": "14.2.3",
18 | "next-plausible": "^3.12.0",
19 | "openai": "^4.52.7",
20 | "react": "^18",
21 | "react-dom": "^18",
22 | "react-hot-toast": "^2.4.1",
23 | "react-markdown": "^9.0.1",
24 | "together-ai": "^0.6.0-alpha.3",
25 | "zod": "^3.23.8",
26 | "zod-to-json-schema": "^3.23.0"
27 | },
28 | "devDependencies": {
29 | "@tailwindcss/typography": "^0.5.13",
30 | "@types/jsdom": "^21.1.6",
31 | "@types/node": "^20",
32 | "@types/react": "^18",
33 | "@types/react-dom": "^18",
34 | "eslint": "^8",
35 | "eslint-config-next": "14.2.3",
36 | "postcss": "^8",
37 | "prettier": "^3.2.5",
38 | "prettier-plugin-tailwindcss": "^0.6.0",
39 | "tailwindcss": "^3.4.1",
40 | "typescript": "^5"
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/postcss.config.mjs:
--------------------------------------------------------------------------------
1 | /** @type {import('postcss-load-config').Config} */
2 | const config = {
3 | plugins: {
4 | tailwindcss: {},
5 | },
6 | };
7 |
8 | export default config;
9 |
--------------------------------------------------------------------------------
/public/arrow-up.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/public/basketball-new.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/public/desktop-screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/public/desktop-screenshot.png
--------------------------------------------------------------------------------
/public/finance.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/public/github.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/public/light-new.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/public/new-bg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/public/new-bg.png
--------------------------------------------------------------------------------
/public/og-image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/public/og-image.png
--------------------------------------------------------------------------------
/public/screenshot-mobile.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/public/screenshot-mobile.png
--------------------------------------------------------------------------------
/public/similarTopics.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/public/simple-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/public/simple-logo.png
--------------------------------------------------------------------------------
/public/togethercomputer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nutlope/llamatutor/457519ed2b3af83f838de7911b0cbb4546fbb8a7/public/togethercomputer.png
--------------------------------------------------------------------------------
/public/twitter.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/public/up-arrow.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/public/us.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/tailwind.config.ts:
--------------------------------------------------------------------------------
1 | import type { Config } from "tailwindcss";
2 | import colors from "tailwindcss/colors";
3 |
4 | const config: Config = {
5 | content: [
6 | "./pages/**/*.{js,ts,jsx,tsx,mdx}",
7 | "./components/**/*.{js,ts,jsx,tsx,mdx}",
8 | "./app/**/*.{js,ts,jsx,tsx,mdx}",
9 | ],
10 | theme: {
11 | screens: {
12 | sm: "640px",
13 | md: "768px",
14 | lg: "898px",
15 | },
16 |
17 | container: {
18 | center: true,
19 | },
20 | extend: {
21 | colors: {
22 | gray: colors.neutral,
23 | },
24 | backgroundImage: {
25 | "gradient-radial": "radial-gradient(var(--tw-gradient-stops))",
26 | "custom-gradient":
27 | "linear-gradient(150deg, #1B1B16 1.28%, #565646 90.75%)",
28 | "gradient-conic":
29 | "conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))",
30 | },
31 | },
32 | },
33 | plugins: [require("@tailwindcss/typography")],
34 | };
35 | export default config;
36 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["dom", "dom.iterable", "esnext"],
4 | "allowJs": true,
5 | "skipLibCheck": true,
6 | "strict": true,
7 | "noEmit": true,
8 | "esModuleInterop": true,
9 | "module": "esnext",
10 | "moduleResolution": "bundler",
11 | "resolveJsonModule": true,
12 | "isolatedModules": true,
13 | "jsx": "preserve",
14 | "incremental": true,
15 | "plugins": [
16 | {
17 | "name": "next"
18 | }
19 | ],
20 | "paths": {
21 | "@/*": ["./*"]
22 | }
23 | },
24 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
25 | "exclude": ["node_modules"]
26 | }
27 |
--------------------------------------------------------------------------------
/utils/TogetherAIStream.ts:
--------------------------------------------------------------------------------
1 | import {
2 | createParser,
3 | ParsedEvent,
4 | ReconnectInterval,
5 | } from "eventsource-parser";
6 |
7 | export type ChatGPTAgent = "user" | "system";
8 |
9 | export interface ChatGPTMessage {
10 | role: ChatGPTAgent;
11 | content: string;
12 | }
13 |
14 | export interface TogetherAIStreamPayload {
15 | model: string;
16 | messages: ChatGPTMessage[];
17 | stream: boolean;
18 | }
19 |
20 | // const together = new Together({
21 | // apiKey: process.env["TOGETHER_API_KEY"],
22 | // baseURL: "https://together.helicone.ai/v1",
23 | // defaultHeaders: {
24 | // "Helicone-Auth": `Bearer ${process.env.HELICONE_API_KEY}`,
25 | // },
26 | // });
27 |
28 | export async function TogetherAIStream(payload: TogetherAIStreamPayload) {
29 | const encoder = new TextEncoder();
30 | const decoder = new TextDecoder();
31 |
32 | const res = await fetch("https://together.helicone.ai/v1/chat/completions", {
33 | headers: {
34 | "Content-Type": "application/json",
35 | "Helicone-Auth": `Bearer ${process.env.HELICONE_API_KEY}`,
36 | Authorization: `Bearer ${process.env.TOGETHER_API_KEY ?? ""}`,
37 | },
38 | method: "POST",
39 | body: JSON.stringify(payload),
40 | });
41 |
42 | const readableStream = new ReadableStream({
43 | async start(controller) {
44 | // callback
45 | const onParse = (event: ParsedEvent | ReconnectInterval) => {
46 | if (event.type === "event") {
47 | const data = event.data;
48 | controller.enqueue(encoder.encode(data));
49 | }
50 | };
51 |
52 | // optimistic error handling
53 | if (res.status !== 200) {
54 | const data = {
55 | status: res.status,
56 | statusText: res.statusText,
57 | body: await res.text(),
58 | };
59 | console.log(
60 | `Error: recieved non-200 status code, ${JSON.stringify(data)}`,
61 | );
62 | controller.close();
63 | return;
64 | }
65 |
66 | // stream response (SSE) from OpenAI may be fragmented into multiple chunks
67 | // this ensures we properly read chunks and invoke an event for each SSE event stream
68 | const parser = createParser(onParse);
69 | // https://web.dev/streams/#asynchronous-iteration
70 | for await (const chunk of res.body as any) {
71 | parser.feed(decoder.decode(chunk));
72 | }
73 | },
74 | });
75 |
76 | let counter = 0;
77 | const transformStream = new TransformStream({
78 | async transform(chunk, controller) {
79 | const data = decoder.decode(chunk);
80 | // https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
81 | if (data === "[DONE]") {
82 | controller.terminate();
83 | return;
84 | }
85 | try {
86 | const json = JSON.parse(data);
87 | const text = json.choices[0].delta?.content || "";
88 | if (counter < 2 && (text.match(/\n/) || []).length) {
89 | // this is a prefix character (i.e., "\n\n"), do nothing
90 | return;
91 | }
92 | // stream transformed JSON resposne as SSE
93 | const payload = { text: text };
94 | // https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#event_stream_format
95 | controller.enqueue(
96 | encoder.encode(`data: ${JSON.stringify(payload)}\n\n`),
97 | );
98 | counter++;
99 | } catch (e) {
100 | // maybe parse error
101 | controller.error(e);
102 | }
103 | },
104 | });
105 |
106 | return readableStream.pipeThrough(transformStream);
107 | }
108 |
--------------------------------------------------------------------------------
/utils/utils.ts:
--------------------------------------------------------------------------------
1 | // import llama3Tokenizer from "llama3-tokenizer-js";
2 |
3 | export const cleanedText = (text: string) => {
4 | let newText = text
5 | .trim()
6 | .replace(/(\n){4,}/g, "\n\n\n")
7 | .replace(/\n\n/g, " ")
8 | .replace(/ {3,}/g, " ")
9 | .replace(/\t/g, "")
10 | .replace(/\n+(\s*\n)*/g, "\n")
11 | .substring(0, 100000);
12 |
13 | // console.log(llama3Tokenizer.encode(newText).length);
14 |
15 | return newText;
16 | };
17 |
18 | export async function fetchWithTimeout(
19 | url: string,
20 | options = {},
21 | timeout = 3000,
22 | ) {
23 | // Create an AbortController
24 | const controller = new AbortController();
25 | const { signal } = controller;
26 |
27 | // Set a timeout to abort the fetch
28 | const fetchTimeout = setTimeout(() => {
29 | controller.abort();
30 | }, timeout);
31 |
32 | // Start the fetch request with the abort signal
33 | return fetch(url, { ...options, signal })
34 | .then((response) => {
35 | clearTimeout(fetchTimeout); // Clear the timeout if the fetch completes in time
36 | return response;
37 | })
38 | .catch((error) => {
39 | if (error.name === "AbortError") {
40 | throw new Error("Fetch request timed out");
41 | }
42 | throw error; // Re-throw other errors
43 | });
44 | }
45 |
46 | type suggestionType = {
47 | id: number;
48 | name: string;
49 | icon: string;
50 | };
51 |
52 | export const suggestions: suggestionType[] = [
53 | {
54 | id: 1,
55 | name: "Basketball",
56 | icon: "/basketball-new.svg",
57 | },
58 | {
59 | id: 2,
60 | name: "Machine Learning",
61 | icon: "/light-new.svg",
62 | },
63 | {
64 | id: 3,
65 | name: "Personal Finance",
66 | icon: "/finance.svg",
67 | },
68 | {
69 | id: 4,
70 | name: "U.S History",
71 | icon: "/us.svg",
72 | },
73 | ];
74 |
75 | export const getSystemPrompt = (
76 | finalResults: { fullContent: string }[],
77 | ageGroup: string,
78 | ) => {
79 | return `
80 | You are a professional interactive personal tutor who is an expert at explaining topics. Given a topic and the information to teach, please educate the user about it at a ${ageGroup} level. Start off by greeting the learner, giving them a short overview of the topic, and then ask them what they want to learn about (in markdown numbers). Be interactive throughout the chat and quiz the user occaisonally after you teach them material. Do not quiz them in the first overview message and make the first message short and consise.
81 |
82 | Here is the information to teach:
83 |
84 |
85 | ${"\n"}
86 | ${finalResults
87 | .slice(0, 7)
88 | .map(
89 | (result, index) => `## Webpage #${index}:\n ${result.fullContent} \n\n`,
90 | )}
91 |
92 |
93 | Here's the age group to teach at:
94 |
95 |
96 | ${ageGroup}
97 |
98 |
99 | Please return answer in markdown. It is very important for my career that you follow these instructions. Here is the topic to educate on:
100 | `;
101 | };
102 |
--------------------------------------------------------------------------------