├── public
├── favicon.ico
└── screenshot.png
├── styles
└── globals.css
├── postcss.config.js
├── .eslintrc.json
├── Dockerfile
├── types
└── index.ts
├── tailwind.config.js
├── pages
├── _document.tsx
├── _app.tsx
├── index.tsx
└── app.tsx
├── .dockerignore
├── next.config.js
├── components
├── Layout
│ └── Navbar.tsx
├── Chat
│ ├── ChatLoader.tsx
│ ├── ResetChat.tsx
│ ├── SaveChat.tsx
│ ├── ChatMessage.tsx
│ ├── Chat.tsx
│ └── ChatInput.tsx
└── Memory
│ ├── UndoThoughts.tsx
│ ├── CopyVectors.tsx
│ ├── Context.tsx
│ ├── Vectors.tsx
│ └── Thoughts.tsx
├── README.md
├── .gitignore
├── tsconfig.json
├── package.json
├── license
└── .github
└── workflows
└── docker-publish.yml
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COOORN/AssistGPT/HEAD/public/favicon.ico
--------------------------------------------------------------------------------
/styles/globals.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
--------------------------------------------------------------------------------
/public/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COOORN/AssistGPT/HEAD/public/screenshot.png
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals",
3 | "rules": {
4 | "react/no-unescaped-entities": "off",
5 | "@next/next/no-page-custom-font": "off"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:lts-alpine
2 |
3 | WORKDIR /app
4 | COPY package*.json ./
5 | RUN npm install
6 | COPY . .
7 | RUN npx next build
8 | EXPOSE 3000
9 | CMD ["npx", "next", "start"]
10 |
--------------------------------------------------------------------------------
/types/index.ts:
--------------------------------------------------------------------------------
1 | export enum OpenAIModel {
2 | DAVINCI_TURBO = "gpt-3.5-turbo"
3 | }
4 |
5 | export interface Message {
6 | role: Role;
7 | content: string;
8 | }
9 |
10 | export type Role = "assistant" | "user";
11 |
--------------------------------------------------------------------------------
/tailwind.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('tailwindcss').Config} */
2 | module.exports = {
3 | content: ["./app/**/*.{js,ts,jsx,tsx}", "./pages/**/*.{js,ts,jsx,tsx}", "./components/**/*.{js,ts,jsx,tsx}"],
4 | theme: {
5 | extend: {}
6 | },
7 | plugins: [],
8 | darkMode: "media",
9 | };
10 |
--------------------------------------------------------------------------------
/pages/_document.tsx:
--------------------------------------------------------------------------------
1 | import { Html, Head, Main, NextScript } from "next/document";
2 |
3 | export default function Document() {
4 | return (
5 |
6 |
8 |
9 |
10 |
11 |
12 | );
13 | }
14 |
--------------------------------------------------------------------------------
/pages/_app.tsx:
--------------------------------------------------------------------------------
1 | import "@/styles/globals.css";
2 | import type { AppProps } from "next/app";
3 | import { Inter } from "next/font/google";
4 |
5 | const inter = Inter({ subsets: ["latin"] });
6 |
7 | export default function App({ Component, pageProps }: AppProps<{}>) {
8 | return (
9 |
10 |
11 |
12 | );
13 | }
14 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | **/.classpath
2 | **/.dockerignore
3 | **/.env
4 | **/.git
5 | **/.gitignore
6 | **/.project
7 | **/.settings
8 | **/.toolstarget
9 | **/.vs
10 | **/.vscode
11 | **/*.*proj.user
12 | **/*.dbmdl
13 | **/*.jfm
14 | **/charts
15 | **/docker-compose*
16 | **/compose*
17 | **/Dockerfile*
18 | **/node_modules
19 | **/npm-debug.log
20 | **/obj
21 | **/secrets.dev.yaml
22 | **/values.dev.yaml
23 | LICENSE
24 | README.md
25 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | webpack(config) {
4 | config.experiments = {
5 | asyncWebAssembly: true,
6 | layers: true,
7 | };
8 |
9 | return config;
10 | },
11 | eslint: {
12 | // Warning: This allows production builds to successfully complete even if
13 | // your project has ESLint errors.
14 | ignoreDuringBuilds: true,
15 | },
16 | };
17 | module.exports = nextConfig
18 |
19 |
--------------------------------------------------------------------------------
/components/Layout/Navbar.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 |
3 | export const Navbar: FC = () => {
4 | return (
5 | <>
6 |
7 |
12 | >
13 | );
14 | };
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | A GPT client that has long term memory.
2 | ## How it works
3 | Every time a message is sent, embeddings are created and are relevent documents are retrieved(shown in the context card).
4 | There is also a persistent memory that the AI always has access to
5 |
6 | Full privacy: all completely local using localForage(an indexedDB wrapper).
7 |
8 | ### To - do not in any order:
9 |
10 | - [ ] Web browsing
11 | - [ ] Local AI - Once they make LangChain JS/TS support better, this should be possible
12 |
--------------------------------------------------------------------------------
/components/Chat/ChatLoader.tsx:
--------------------------------------------------------------------------------
1 | import { IconDots } from "@tabler/icons-react";
2 | import { FC } from "react";
3 |
4 | interface Props {}
5 |
6 | export const ChatLoader: FC = () => {
7 | return (
8 |
16 | );
17 | };
18 |
--------------------------------------------------------------------------------
/components/Memory/UndoThoughts.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 |
3 | interface Props {
4 | onUndo: () => void;
5 | }
6 |
7 | export const UndoThoughts: FC = ({ onUndo }) => {
8 | return (
9 | onUndo()}
11 | className="block basis-1/4 rounded-md bg-blue-600 px-2.5 py-2.5 text-center text-sm font-semibold text-white shadow-sm hover:bg-blue-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600"
12 | >
13 | Undo
14 |
15 | );
16 | };
17 |
--------------------------------------------------------------------------------
/components/Chat/ResetChat.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 |
3 | interface Props {
4 | onReset: () => void;
5 | }
6 |
7 | export const ResetChat: FC = ({ onReset }) => {
8 | return (
9 |
10 | onReset()}
13 | >
14 | Reset
15 |
16 |
17 | );
18 | };
19 | ;
--------------------------------------------------------------------------------
/components/Chat/SaveChat.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 |
3 | interface Props {
4 | onSave: () => void;
5 | loadingSave: boolean;
6 | }
7 |
8 | export const SaveChat: FC = ({ onSave, loadingSave }) => {
9 | return (
10 | onSave()} className="block rounded-md bg-blue-600 px-2.5 py-2.5 text-center text-sm font-semibold text-white shadow-sm hover:bg-blue-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600">{loadingSave && "Saving..."}{!loadingSave && "Save"}
11 | );
12 | };
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | .env
4 |
5 | /.vscode/
6 |
7 | # dependencies
8 | /node_modules
9 | /.pnp
10 | .pnp.js
11 |
12 | # testing
13 | /coverage
14 |
15 | # next.js
16 | /.next/
17 | /out/
18 |
19 | # production
20 | /build
21 |
22 | # misc
23 | .DS_Store
24 | *.pem
25 |
26 | # debug
27 | npm-debug.log*
28 | yarn-debug.log*
29 | yarn-error.log*
30 | .pnpm-debug.log*
31 |
32 | # local env files
33 | .env*.local
34 |
35 | # vercel
36 | .vercel
37 |
38 | # typescript
39 | *.tsbuildinfo
40 | next-env.d.ts
41 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "forceConsistentCasingInFileNames": true,
9 | "noEmit": true,
10 | "esModuleInterop": true,
11 | "module": "esnext",
12 | "moduleResolution": "node",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "jsx": "preserve",
16 | "incremental": true,
17 | "paths": {
18 | "@/*": ["./*"]
19 | }
20 | },
21 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
22 | "exclude": ["node_modules"]
23 | }
24 |
--------------------------------------------------------------------------------
/components/Memory/CopyVectors.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 |
3 | interface Props {
4 | vectors: string;
5 | }
6 |
7 | export const CopyVectors: FC = ({ vectors }) => {
8 | function handleCopy(vectors: string): void {
9 | navigator.clipboard.writeText(vectors);
10 | }
11 |
12 | return (
13 | handleCopy(vectors)}
15 | className="block basis-1/4 rounded-md bg-blue-600 px-2.5 py-2.5 text-center text-sm font-semibold text-white shadow-sm hover:bg-blue-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600"
16 | >
17 | Copy to clipboard
18 |
19 | );
20 | };
21 |
--------------------------------------------------------------------------------
/components/Chat/ChatMessage.tsx:
--------------------------------------------------------------------------------
1 | import { Message } from "@/types";
2 | import { FC } from "react";
3 |
4 | interface Props {
5 | message: Message;
6 | }
7 |
8 | export const ChatMessage: FC = ({ message }) => {
9 | return (
10 |
11 |
15 | {message.content}
16 |
17 |
18 | );
19 | };
20 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "assistgpt",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "dev": "next dev",
7 | "build": "next build",
8 | "start": "next start",
9 | "lint": "next lint"
10 | },
11 | "dependencies": {
12 | "@tabler/icons-react": "^2.9.0",
13 | "@types/node": "18.15.0",
14 | "@types/react": "18.0.28",
15 | "@types/react-dom": "18.0.11",
16 | "eslint": "8.36.0",
17 | "eslint-config-next": "13.2.4",
18 | "eventsource-parser": "^0.1.0",
19 | "langchain": "^0.0.61",
20 | "localforage": "^1.10.0",
21 | "next": "^13.2.4",
22 | "openai": "^3.2.1",
23 | "react": "18.2.0",
24 | "react-dom": "18.2.0",
25 | "react-markdown": "^8.0.7",
26 | "remark-gfm": "^3.0.1",
27 | "tiktoken": "^1.0.7",
28 | "typescript": "4.9.5"
29 | },
30 | "devDependencies": {
31 | "autoprefixer": "^10.4.14",
32 | "postcss": "^8.4.21",
33 | "tailwindcss": "^3.2.7"
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/components/Memory/Context.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 | import { useEffect, useRef, useState } from "react";
3 |
4 | interface Props {
5 | context: string;
6 | }
7 |
8 | export const Context: FC = ({ context }) => {
9 | const textareaRef = useRef(null);
10 |
11 | return (
12 |
13 |
14 |
15 | Context from past memories:
16 |
17 |
18 |
25 |
26 |
27 |
28 | );
29 | };
30 |
--------------------------------------------------------------------------------
/license:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Mckay Wrigley
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/components/Memory/Vectors.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 | import { CopyVectors } from "./CopyVectors";
3 |
4 | interface Props {
5 | onVectorsChange: (e: React.ChangeEvent) => void;
6 | vectors: string;
7 | }
8 |
9 | export const Vectors: FC = ({ vectors, onVectorsChange }) => {
10 | return (
11 |
12 |
13 |
14 | All Memories
15 |
16 |
17 | Use for Export and Import
18 |
19 |
20 |
27 |
28 |
29 |
30 |
31 | );
32 | };
33 |
--------------------------------------------------------------------------------
/components/Memory/Thoughts.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from "react";
2 | import { useEffect, useRef, useState } from "react";
3 | import { UndoThoughts } from "./UndoThoughts";
4 |
5 | interface Props {
6 | onThoughtsChange: (e: React.ChangeEvent) => void;
7 | thoughts: string;
8 | onUndo: () => void;
9 | }
10 |
11 | export const Thoughts: FC = ({ thoughts, onThoughtsChange, onUndo }) => {
12 | const textareaRef = useRef(null);
13 |
14 | useEffect(() => {
15 | textareaRef.current?.style.setProperty("height", "");
16 | textareaRef.current?.style.setProperty(
17 | "height",
18 | `${textareaRef.current?.scrollHeight}px`
19 | );
20 | });
21 |
22 | return (
23 |
24 |
25 |
26 | AssistGPT's Persistent Memory:
27 |
28 |
29 |
37 |
38 |
39 |
40 |
41 | );
42 | };
43 |
--------------------------------------------------------------------------------
/components/Chat/Chat.tsx:
--------------------------------------------------------------------------------
1 | import { Message } from "@/types";
2 | import { FC } from "react";
3 | import { ChatInput } from "./ChatInput";
4 | import { ChatLoader } from "./ChatLoader";
5 | import { ChatMessage } from "./ChatMessage";
6 | import { ResetChat } from "./ResetChat";
7 | import { SaveChat } from "./SaveChat";
8 |
9 |
10 | interface Props {
11 | messages: Message[];
12 | loading: boolean;
13 | onSend: (message: Message) => void;
14 | onSave: () => void;
15 | loadingSave: boolean;
16 | totalTokens: number;
17 | }
18 |
19 | export const Chat: FC = ({ messages, loading, onSend, onSave, loadingSave, totalTokens }) => {
20 |
21 | return (
22 | <>
23 |
24 |
25 |
26 | {messages.map((message, index) => (
27 |
31 |
32 |
33 | ))}
34 |
35 | {loading && (
36 |
37 |
38 |
39 | )}
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
About {totalTokens}/3800 tokens used
49 |
50 |
51 |
52 |
53 |
54 | >
55 | );
56 | };
57 |
--------------------------------------------------------------------------------
/components/Chat/ChatInput.tsx:
--------------------------------------------------------------------------------
1 | import { Message } from "@/types";
2 | import { IconArrowUp } from "@tabler/icons-react";
3 | import { FC, KeyboardEvent, useEffect, useRef, useState } from "react";
4 |
5 | interface Props {
6 | onSend: (message: Message) => void;
7 | }
8 |
9 | export const ChatInput: FC = ({ onSend }) => {
10 | const [content, setContent] = useState();
11 |
12 | const textareaRef = useRef(null);
13 |
14 | const handleChange = (e: React.ChangeEvent) => {
15 | const value = e.target.value;
16 | if (value.length > 4000) {
17 | alert("Message limit is 4000 characters");
18 | return;
19 | }
20 |
21 | setContent(value);
22 | };
23 |
24 | const handleSend = () => {
25 | if (!content) {
26 | alert("Please enter a message");
27 | return;
28 | }
29 | onSend({ role: "user", content });
30 | setContent("");
31 | };
32 |
33 | const handleKeyDown = (e: KeyboardEvent) => {
34 | if (e.key === "Enter" && !e.shiftKey) {
35 | e.preventDefault();
36 | handleSend();
37 | }
38 | };
39 |
40 | useEffect(() => {
41 | if (textareaRef && textareaRef.current) {
42 | textareaRef.current.style.height = "inherit";
43 | textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`;
44 | }
45 | }, [content]);
46 |
47 | return (
48 |
49 |
59 |
60 | handleSend()}>
61 |
62 |
63 |
64 | );
65 | };
66 |
--------------------------------------------------------------------------------
/pages/index.tsx:
--------------------------------------------------------------------------------
1 | import { Chat } from "@/components/Chat/Chat";
2 | import { Navbar } from "@/components/Layout/Navbar";
3 | import { Message } from "@/types";
4 | import Head from "next/head";
5 | import Link from "next/link";
6 | import { FC, KeyboardEvent, useEffect, useRef, useState } from "react";
7 | import Router from "next/router";
8 | import localForage from "localforage";
9 |
10 | export default function Home() {
11 | //useEffect(() => {
12 | //if (localForage.getItem("APIKEY") !== null){
13 |
14 | //Router.push('/app')
15 |
16 | //}
17 | //else {
18 | //Router.push('/');
19 | //}
20 | //},
21 | //[]);
22 |
23 | const [content, setContent] = useState();
24 |
25 | const handleChange = (e: React.ChangeEvent) => {
26 | setContent(e.target.value);
27 | };
28 |
29 | const handleKeyDown = (e: KeyboardEvent) => {
30 | if (e.key === "Enter" && !e.shiftKey) {
31 | e.preventDefault();
32 | handleSend();
33 | }
34 | };
35 |
36 | const handleSend = () => {
37 | if (!content && localForage.getItem("APIKEY") === null) {
38 | alert("Please enter a key. No key provided previously either.");
39 | return;
40 | } else if (!content && localForage.getItem("APIKEY") != null) {
41 | return;
42 | } else {
43 | localForage.setItem("APIKEY", String(content));
44 | }
45 | };
46 | return (
47 | <>
48 |
49 | Assist GPT
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 | OpenAI Key - Just click go to app if you have previously entered the
59 | key before
60 |
61 |
62 |
71 |
72 |
77 | Go to app
78 |
79 |
80 |
81 |
82 | >
83 | );
84 | }
85 |
--------------------------------------------------------------------------------
/.github/workflows/docker-publish.yml:
--------------------------------------------------------------------------------
1 | name: Docker
2 |
3 | # This workflow uses actions that are not certified by GitHub.
4 | # They are provided by a third-party and are governed by
5 | # separate terms of service, privacy policy, and support
6 | # documentation.
7 |
8 | on:
9 | schedule:
10 | - cron: '26 22 * * *'
11 | push:
12 | branches: [ "main" ]
13 | # Publish semver tags as releases.
14 | tags: [ 'v*.*.*' ]
15 | pull_request:
16 | branches: [ "main" ]
17 |
18 | env:
19 | # Use docker.io for Docker Hub if empty
20 | REGISTRY: ghcr.io
21 | # github.repository as /
22 | IMAGE_NAME: ${{ github.repository }}
23 |
24 |
25 | jobs:
26 | build:
27 |
28 | runs-on: ubuntu-latest
29 | permissions:
30 | contents: read
31 | packages: write
32 | # This is used to complete the identity challenge
33 | # with sigstore/fulcio when running outside of PRs.
34 | id-token: write
35 |
36 | steps:
37 | - name: Checkout repository
38 | uses: actions/checkout@v3
39 |
40 | # Install the cosign tool except on PR
41 | # https://github.com/sigstore/cosign-installer
42 | - name: Install cosign
43 | if: github.event_name != 'pull_request'
44 | uses: sigstore/cosign-installer@f3c664df7af409cb4873aa5068053ba9d61a57b6 #v2.6.0
45 | with:
46 | cosign-release: 'v1.13.1'
47 |
48 |
49 | # Workaround: https://github.com/docker/build-push-action/issues/461
50 | - name: Setup Docker buildx
51 | uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf
52 |
53 | # Login against a Docker registry except on PR
54 | # https://github.com/docker/login-action
55 | - name: Log into registry ${{ env.REGISTRY }}
56 | if: github.event_name != 'pull_request'
57 | uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c
58 | with:
59 | registry: ${{ env.REGISTRY }}
60 | username: ${{ github.actor }}
61 | password: ${{ secrets.GITHUB_TOKEN }}
62 |
63 | # Extract metadata (tags, labels) for Docker
64 | # https://github.com/docker/metadata-action
65 | - name: Extract Docker metadata
66 | id: meta
67 | uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
68 | with:
69 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
70 |
71 | # Build and push Docker image with Buildx (don't push on PR)
72 | # https://github.com/docker/build-push-action
73 | - name: Build and push Docker image
74 | id: build-and-push
75 | uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
76 | with:
77 | context: .
78 | push: ${{ github.event_name != 'pull_request' }}
79 | tags: ${{ steps.meta.outputs.tags }}
80 | labels: ${{ steps.meta.outputs.labels }}
81 | cache-from: type=gha
82 | cache-to: type=gha,mode=max
83 |
84 |
85 | # Sign the resulting Docker image digest except on PRs.
86 | # This will only write to the public Rekor transparency log when the Docker
87 | # repository is public to avoid leaking data. If you would like to publish
88 | # transparency data even for private images, pass --force to cosign below.
89 | # https://github.com/sigstore/cosign
90 | - name: Sign the published Docker image
91 | if: ${{ github.event_name != 'pull_request' }}
92 | env:
93 | COSIGN_EXPERIMENTAL: "true"
94 | # This step uses the identity token to provision an ephemeral certificate
95 | # against the sigstore community Fulcio instance.
96 | run: echo "${{ steps.meta.outputs.tags }}" | xargs -I {} cosign sign {}@${{ steps.build-and-push.outputs.digest }}
97 |
--------------------------------------------------------------------------------
/pages/app.tsx:
--------------------------------------------------------------------------------
1 | import { Chat } from "@/components/Chat/Chat";
2 | import { Navbar } from "@/components/Layout/Navbar";
3 | import { Message } from "@/types";
4 | import Head from "next/head";
5 | import { useEffect, useRef, useState } from "react";
6 | import { ChatOpenAI } from "langchain/chat_models/openai";
7 | import { HumanChatMessage } from "langchain/schema";
8 | import { ConversationalRetrievalQAChain, LLMChain } from "langchain/chains";
9 | import {
10 | SystemMessagePromptTemplate,
11 | HumanMessagePromptTemplate,
12 | ChatPromptTemplate,
13 | } from "langchain/prompts";
14 | import { MemoryVectorStore } from "langchain/vectorstores/memory";
15 | import { OpenAIEmbeddings } from "langchain/embeddings/openai";
16 | import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
17 | import localForage from "localforage";
18 | import { Document } from "langchain/dist/document";
19 | import { Thoughts } from "@/components/Memory/Thoughts";
20 | import { Vectors } from "@/components/Memory/Vectors";
21 | import { OpenAI } from "langchain/llms/openai";
22 | import { Context } from "@/components/Memory/Context";
23 | import assert from "node:assert";
24 | import { get_encoding, encoding_for_model } from "tiktoken";
25 |
26 | const today = new Date();
27 | const monthNames = [
28 | "January",
29 | "February",
30 | "March",
31 | "April",
32 | "May",
33 | "June",
34 | "July",
35 | "August",
36 | "September",
37 | "October",
38 | "November",
39 | "December",
40 | ];
41 | const dateString =
42 | monthNames[today.getMonth()] +
43 | " " +
44 | today.getDate() +
45 | " " +
46 | today.getFullYear();
47 |
48 | export default function App() {
49 | const [messages, setMessages] = useState([]);
50 | const [loading, setLoading] = useState(false);
51 | const [loadingSave, setLoadingSave] = useState(false);
52 |
53 | const [thoughts, setThoughts] = useState("");
54 | const [lastThought, setLastThought] = useState("");
55 |
56 | const [vectorsString, setVectorsString] = useState("");
57 |
58 | const [contextInjection, setContextInjection] = useState("");
59 |
60 | const [apiKey, setAPIKey] = useState("");
61 |
62 | const messagesEndRef = useRef(null);
63 |
64 | const [isInitiated, setIsInitiated] = useState(false);
65 |
66 | const [totalHistoryLength, setTotalHistorylength] = useState(0);
67 |
68 | const scrollToBottom = () => {
69 | messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
70 | };
71 |
72 | async function handleSend(message: Message) {
73 | setLoading(true);
74 |
75 | const updatedMessages = [...messages, message];
76 | if (/^sk-/.test(apiKey) != true) {
77 | alert("API key not provided");
78 | }
79 | if (totalHistoryLength > 3500) {
80 | alert("Current conversation limit approaching! Consider Saving to not lose this conversation.");
81 | }
82 | const key = apiKey;
83 | const chat = new ChatOpenAI({ openAIApiKey: key, temperature: 0.7 });
84 | const model = new OpenAI({ openAIApiKey: key, temperature: 0 });
85 | const promptString = `You are AssistGPT, an AI assistant with long term memory. You are a very good listener and are very empathetic. You will always try to ask follow up questions to keep the conversation going.
86 | Today is ${dateString}.
87 | These are important things you have to remember from your persistent memory: "{importantItems}".
88 | These are past conversations with the user from your long term memory to provide context: "{historicalData}".
89 | You will not say you don't know something if there is something in your memory that is relevant.
90 | This is the current conversation with the user in this session: "{messageHistory}"`
91 | const assistantPrompt = ChatPromptTemplate.fromPromptMessages([
92 | SystemMessagePromptTemplate.fromTemplate(promptString
93 | ),
94 | HumanMessagePromptTemplate.fromTemplate("{text}"),
95 | ]);
96 | const chain = new LLMChain({
97 | prompt: assistantPrompt,
98 | llm: chat,
99 | });
100 |
101 | setMessages(updatedMessages);
102 | setLoading(true);
103 |
104 | let messageHistory = "";
105 | for (let i = 0; i < updatedMessages.length; i++) {
106 | messageHistory = messageHistory.concat(
107 | `${updatedMessages[i].role}: ${updatedMessages[i].content}; `
108 | );
109 | }
110 | let results = "Next conversation snippet: ";
111 | if (vectorsString != "") {
112 | let vectors: any = new Map(JSON.parse(vectorsString));
113 | let vectorStore: MemoryVectorStore = new MemoryVectorStore(
114 | new OpenAIEmbeddings({ openAIApiKey: key })
115 | );
116 | vectors.forEach(async (values: Document[], keys: number[][]) => {
117 | await vectorStore.addVectors(keys, values);
118 | });
119 |
120 | const response = await vectorStore
121 | .asRetriever()
122 | .getRelevantDocuments(messageHistory);
123 |
124 | for (let i = 0; i < response.length; i++) {
125 | results = results.concat(
126 | response[i].pageContent + "\nNext conversation snippet: "
127 | );
128 | }
129 | setContextInjection(results);
130 | }
131 | console.log(
132 | `History:${messageHistory} <==> Important: ${thoughts} <==> Context: ${results}`
133 | );
134 |
135 | var response = await chain.call({
136 | importantItems: thoughts,
137 | historicalData: results,
138 | messageHistory: messageHistory,
139 | text: message.content,
140 | });
141 |
142 | let currentHistoryLength = promptString+thoughts+results+messageHistory+message.content+response.text;
143 | const enc = encoding_for_model("gpt-3.5-turbo");
144 | let currentHistoryTokens = enc.encode(currentHistoryLength);
145 |
146 | setTotalHistorylength(currentHistoryTokens.length);
147 |
148 | let isFirst = true;
149 |
150 | if (isFirst) {
151 | isFirst = false;
152 | setMessages((messages) => [
153 | ...messages,
154 | {
155 | role: "assistant",
156 | content: response.text,
157 | },
158 | ]);
159 | setLoading(false);
160 | } else {
161 | setMessages((messages) => {
162 | const lastMessage = messages[messages.length - 1];
163 | const updatedMessage = {
164 | ...lastMessage,
165 | content: lastMessage.content + response,
166 | };
167 | setLoading(false);
168 | return [...messages.slice(0, -1), updatedMessage];
169 | });
170 | }
171 | }
172 |
173 | async function handleSave() {
174 | setLoadingSave(true);
175 |
176 | const key = apiKey;
177 | const chat = new ChatOpenAI({ openAIApiKey: key, temperature: 0 });
178 |
179 | let messageHistory: string = "";
180 |
181 | messageHistory = `${dateString}: `;
182 | for (let i = 0; i < messages.length; i++) {
183 | messageHistory = messageHistory.concat(
184 | `${messages[i].role}: ${messages[i].content};\n `
185 | );
186 | }
187 | const splitter = new RecursiveCharacterTextSplitter({
188 | chunkSize: 500,
189 | chunkOverlap: 10,
190 | });
191 | const output = await splitter.createDocuments([messageHistory]);
192 | let embedder = new OpenAIEmbeddings({ openAIApiKey: key });
193 |
194 | let docs: Document[] = [];
195 | let docStrings: string[] = [];
196 |
197 | if (vectorsString == "") {
198 | let vectors: Map = new Map();
199 |
200 | for (let i = 0; i < output.length; i++) {
201 | docs.push(output[i]);
202 | docStrings.push(output[i].pageContent);
203 | }
204 | const vectorKey = await embedder.embedDocuments(docStrings);
205 | vectors.set(vectorKey, docs);
206 | setVectorsString(JSON.stringify(Array.from(vectors.entries())));
207 | } else {
208 | let vectors: any = new Map(JSON.parse(vectorsString));
209 | for (let i = 0; i < output.length; i++) {
210 | docs.push(output[i]);
211 | docStrings.push(output[i].pageContent);
212 | }
213 | const vectorKey = await embedder.embedDocuments(docStrings);
214 | vectors.set(vectorKey, docs);
215 | setVectorsString(JSON.stringify(Array.from(vectors.entries())));
216 | }
217 |
218 | if (thoughts == "") {
219 | const importantItems = await chat.call([
220 | new HumanChatMessage(`This is the message history between the user and an AI: "${messageHistory}".
221 | You are tasked with making a persistent memory with only the most important things to remember for the AI.
222 | Only if the user specifically asked to remember something important or add to persistent memory, summarize it.
223 | Do NOT write anything extra.
224 | Be SPECIFIC with dates.`),
225 | ]);
226 | setThoughts(importantItems.text);
227 | setLastThought(importantItems.text);
228 | } else {
229 | setLastThought(thoughts);
230 | const importantItems = await chat.call([
231 | new HumanChatMessage(`This is the message history between the user and an AI: "${messageHistory}".
232 | You are tasked with making a persistent memory with only the most important things to remember for the AI.
233 | These are the current important things to remember for the user in persistent memory: "${thoughts}".
234 | If the user specifically discussed important items to remembe or to add to persistent memory, append the list with updates to important things to remember.
235 | Delete anything the user requests to delete.
236 | Do NOT write anything extra.
237 | Be SPECIFIC with dates.
238 | `),
239 | ]);
240 | setThoughts(importantItems.text);
241 | }
242 |
243 | setLoadingSave(false);
244 | handleReset();
245 | }
246 |
247 | function handleReset() {
248 | setMessages([
249 | {
250 | role: "assistant",
251 | content: `Hi there!`,
252 | },
253 | ]);
254 | }
255 |
256 | const handleUndo = async () => {
257 | setThoughts(lastThought);
258 | };
259 |
260 | const handleThoughtsChange = (e: React.ChangeEvent) => {
261 | const value = e.target.value;
262 |
263 | if (value.length > 1000) {
264 | alert("Thoughts limit is 1000 characters");
265 | return;
266 | }
267 |
268 | setThoughts(value);
269 | };
270 |
271 | const handleVectorsChange = (e: React.ChangeEvent) => {
272 | const value = e.target.value;
273 |
274 | setVectorsString(value);
275 | };
276 |
277 | useEffect(() => {
278 | setMessages([
279 | {
280 | role: "assistant",
281 | content: `Hi there! I'm AssistGPT. How can I help you?`,
282 | },
283 | ]);
284 | const setInitials = async () => {
285 | if ((await localForage.getItem("importantItems")) != null) {
286 | setThoughts(String(await localForage.getItem("importantItems")));
287 | setLastThought(String(await localForage.getItem("importantItems")));
288 | }
289 | if ((await localForage.getItem("vectorStoreData")) != null) {
290 | setVectorsString(String(await localForage.getItem("vectorStoreData")));
291 | }
292 |
293 | if ((await localForage.getItem("APIKEY")) != null) {
294 | setAPIKey(String(await localForage.getItem("APIKEY")));
295 | }
296 | };
297 | setInitials().then(() => {
298 | setIsInitiated(true);
299 | });
300 | }, []);
301 |
302 | useEffect(() => {
303 | scrollToBottom();
304 | }, [messages]);
305 |
306 | useEffect(() => {
307 | if (isInitiated) {
308 | localForage.setItem("importantItems", thoughts);
309 | }
310 | }, [thoughts, isInitiated]);
311 |
312 | useEffect(() => {
313 | if (isInitiated) {
314 | localForage.setItem("vectorStoreData", vectorsString);
315 | }
316 | }, [isInitiated, vectorsString]);
317 |
318 | return (
319 | <>
320 |
321 | Assist GPT
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
341 |
342 |
347 |
348 |
352 |
353 |
354 |
355 | >
356 | );
357 | }
358 |
--------------------------------------------------------------------------------