├── .env.local.example
├── public
├── favicon.ico
└── vercel.svg
├── pages
├── index.tsx
├── _document.tsx
├── _app.tsx
├── api
│ ├── edit.ts
│ ├── embedding.ts
│ └── completion.ts
└── demos
│ ├── editor.tsx
│ ├── pig-latin.tsx
│ ├── movie-to-emoji.tsx
│ ├── docstring.tsx
│ ├── tweet-sentiment-classifier.tsx
│ ├── sql-generator.tsx
│ ├── chatbot.tsx
│ ├── fake-review-generator.tsx
│ ├── temperature.tsx
│ ├── top-p.tsx
│ └── search.tsx
├── next.config.js
├── .gitignore
├── .pre-commit-config.yaml
├── components
├── layout.tsx
├── completion.tsx
└── navbar.tsx
├── tsconfig.json
├── package.json
├── .github
└── workflows
│ └── node.js.yml
├── LICENSE.md
├── README.md
├── .eslintrc.js
└── libs
└── openai-client.ts
/.env.local.example:
--------------------------------------------------------------------------------
1 | OPENAI_API_KEY=asdf
2 |
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/babldev/gpt3-demo/HEAD/public/favicon.ico
--------------------------------------------------------------------------------
/pages/index.tsx:
--------------------------------------------------------------------------------
1 | export default function Home() {
2 | return 'Welcome to the GPT-3 interactive demo! Select a demo on the left.';
3 | }
4 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | reactStrictMode: true,
4 | swcMinify: true,
5 | }
6 |
7 | module.exports = nextConfig
8 |
--------------------------------------------------------------------------------
/pages/_document.tsx:
--------------------------------------------------------------------------------
1 | import { createGetInitialProps } from '@mantine/next';
2 | import Document, {
3 | Head, Html, Main, NextScript,
4 | } from 'next/document';
5 |
6 | const getInitialProps = createGetInitialProps();
7 |
8 | export default class Gpt3Document extends Document {
9 | static getInitialProps = getInitialProps;
10 |
11 | render() {
12 | return (
13 |
14 |
16 |
17 |
18 |
19 |
20 | );
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 |
22 | # debug
23 | npm-debug.log*
24 | yarn-debug.log*
25 | yarn-error.log*
26 | .pnpm-debug.log*
27 |
28 | # local env files
29 | .env*.local
30 |
31 | # vercel
32 | .vercel
33 |
34 | # typescript
35 | *.tsbuildinfo
36 | next-env.d.ts
37 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.1.0
4 | hooks:
5 | - id: trailing-whitespace
6 | - id: end-of-file-fixer
7 | - id: check-yaml
8 | - id: check-added-large-files
9 | - repo: https://github.com/pre-commit/mirrors-eslint
10 | rev: v8.12.0
11 | hooks:
12 | - id: eslint
13 | files: \.[jt]sx?$ # *.js, *.jsx, *.ts and *.tsx
14 | types: [file]
15 | args: ['--fix']
16 | - repo: local
17 | hooks:
18 | - id: typescript-check
19 | name: typescript-check
20 | entry: npx tsc
21 | language: system
22 | types_or: [ts, tsx]
23 | pass_filenames: false
24 |
--------------------------------------------------------------------------------
/components/layout.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | AppShell, Header, Title,
3 | } from '@mantine/core';
4 | import React from 'react';
5 | import AppNavbar from './navbar';
6 |
7 | export default function Layout({ children }: { children: React.ReactNode }) {
8 | return (
9 | }
12 | header={(
13 |
16 | )}
17 | styles={(theme) => ({
18 | main: { backgroundColor: theme.colorScheme === 'dark' ? theme.colors.dark[8] : theme.colors.gray[0] },
19 | })}
20 | >
21 | { children }
22 |
23 | );
24 | }
25 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "jsx": "preserve",
4 | "target": "es5",
5 | "lib": [
6 | "dom",
7 | "dom.iterable",
8 | "esnext"
9 | ],
10 | "allowJs": false,
11 | "skipLibCheck": true,
12 | "strict": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noEmit": true,
15 | "incremental": true,
16 | "esModuleInterop": true,
17 | "module": "esnext",
18 | "moduleResolution": "node",
19 | "resolveJsonModule": true,
20 | "isolatedModules": true,
21 | "experimentalDecorators": true,
22 | "baseUrl": "./"
23 | },
24 | "include": [
25 | "next-env.d.ts",
26 | "**/*.ts",
27 | "**/*.tsx"
28 | ],
29 | "exclude": [
30 | "node_modules"
31 | ]
32 | }
33 |
--------------------------------------------------------------------------------
/pages/_app.tsx:
--------------------------------------------------------------------------------
1 | import { AppProps } from 'next/app';
2 | import Head from 'next/head';
3 | import { MantineProvider } from '@mantine/core';
4 | import Layout from 'components/layout';
5 |
6 | export default function App(props: AppProps) {
7 | const { Component, pageProps } = props;
8 |
9 | return (
10 | <>
11 |
12 | GPT-3 Demo
13 |
14 |
15 |
16 |
24 |
25 |
26 |
27 |
28 | >
29 | );
30 | }
31 |
--------------------------------------------------------------------------------
/pages/api/edit.ts:
--------------------------------------------------------------------------------
1 | import type { NextApiRequest, NextApiResponse } from 'next';
2 | import {
3 | Configuration, CreateEditRequest, CreateEditResponse, OpenAIApi,
4 | } from 'openai';
5 |
6 | const configuration = new Configuration({
7 | apiKey: process.env.OPENAI_API_KEY,
8 | });
9 | const openai = new OpenAIApi(configuration);
10 | type APIError = { message: string };
11 |
12 | export default async function handler(
13 | req: NextApiRequest,
14 | res: NextApiResponse,
15 | ) {
16 | if (req.method !== 'POST') {
17 | res.status(405).send({ message: 'Only POST requests allowed' });
18 | return;
19 | }
20 |
21 | // This demo repo "trusts" the input and does not validate.
22 | const body = req.body as CreateEditRequest;
23 |
24 | const edit = await openai.createEdit(body);
25 | res.status(200).json(edit.data);
26 | }
27 |
--------------------------------------------------------------------------------
/pages/api/embedding.ts:
--------------------------------------------------------------------------------
1 | import type { NextApiRequest, NextApiResponse } from 'next';
2 | import {
3 | Configuration, CreateEmbeddingResponse, CreateEmbeddingRequest, OpenAIApi,
4 | } from 'openai';
5 |
6 | const configuration = new Configuration({
7 | apiKey: process.env.OPENAI_API_KEY,
8 | });
9 | const openai = new OpenAIApi(configuration);
10 | type APIError = { message: string };
11 |
12 | export default async function handler(
13 | req: NextApiRequest,
14 | res: NextApiResponse,
15 | ) {
16 | if (req.method !== 'POST') {
17 | res.status(405).send({ message: 'Only POST requests allowed' });
18 | return;
19 | }
20 |
21 | // This demo repo "trusts" the input and does not validate.
22 | const body = req.body as CreateEmbeddingRequest;
23 |
24 | const completion = await openai.createEmbedding(body);
25 | res.status(200).json(completion.data);
26 | }
27 |
--------------------------------------------------------------------------------
/pages/api/completion.ts:
--------------------------------------------------------------------------------
1 | import type { NextApiRequest, NextApiResponse } from 'next';
2 | import {
3 | Configuration, CreateCompletionRequest, CreateCompletionResponse, OpenAIApi,
4 | } from 'openai';
5 |
6 | const configuration = new Configuration({
7 | apiKey: process.env.OPENAI_API_KEY,
8 | });
9 | const openai = new OpenAIApi(configuration);
10 | type APIError = { message: string };
11 |
12 | export default async function handler(
13 | req: NextApiRequest,
14 | res: NextApiResponse,
15 | ) {
16 | if (req.method !== 'POST') {
17 | res.status(405).send({ message: 'Only POST requests allowed' });
18 | return;
19 | }
20 |
21 | // This demo repo "trusts" the input and does not validate.
22 | const body = req.body as CreateCompletionRequest;
23 |
24 | const completion = await openai.createCompletion(body);
25 | res.status(200).json(completion.data);
26 | }
27 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "gpt3-demo",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "dev": "next dev",
7 | "build": "next build",
8 | "start": "next start",
9 | "lint": "next lint"
10 | },
11 | "dependencies": {
12 | "@mantine/core": "^5.2.3",
13 | "@mantine/form": "^5.2.4",
14 | "@mantine/hooks": "^5.2.3",
15 | "@mantine/next": "^5.2.3",
16 | "@mantine/prism": "^5.2.6",
17 | "@tabler/icons": "^1.86.0",
18 | "next": "12.2.5",
19 | "openai": "^3.0.0",
20 | "react": "18.2.0",
21 | "react-dom": "18.2.0"
22 | },
23 | "devDependencies": {
24 | "@types/node": "18.7.13",
25 | "@types/react": "18.0.17",
26 | "@types/react-dom": "18.0.6",
27 | "eslint": "8.22.0",
28 | "eslint-config-airbnb": "^19.0.4",
29 | "eslint-config-airbnb-typescript": "^17.0.0",
30 | "eslint-config-next": "12.2.5",
31 | "typescript": "4.7.4"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/.github/workflows/node.js.yml:
--------------------------------------------------------------------------------
1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
3 |
4 | name: pre-commit
5 |
6 | on:
7 | push:
8 | branches: [ "master" ]
9 | pull_request:
10 | branches: [ "master" ]
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 | strategy:
15 | matrix:
16 | node-version: [18.x]
17 | python-version: ["3.10"]
18 | steps:
19 | - uses: actions/checkout@v3
20 | - name: Use Node.js ${{ matrix.node-version }}
21 | uses: actions/setup-node@v3
22 | with:
23 | node-version: ${{ matrix.node-version }}
24 | cache: 'npm'
25 | - run: python -m pip install pre-commit
26 | - run: npm ci
27 | - run: pre-commit run --all-files
28 |
--------------------------------------------------------------------------------
/public/vercel.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Brady Law
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/components/completion.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import {
3 | Code, Mark, Paper, Table,
4 | } from '@mantine/core';
5 | import { CreateCompletionRequest } from 'openai';
6 |
7 | interface GPTCompletionProps {
8 | request: CreateCompletionRequest;
9 | result?: string;
10 | }
11 |
12 | export default function GPTCompletion({ request, result }: GPTCompletionProps) {
13 | return (
14 |
15 |
16 | { request.prompt }
17 | { result }
18 |
19 |
20 |
21 | { Object.entries(request).map(([key, value]) => {
22 | if (key === 'prompt') {
23 | return null;
24 | }
25 | return (
26 |
27 | { key }
28 | { value }
29 |
30 | );
31 | })}
32 |
33 |
34 |
35 | );
36 | }
37 | GPTCompletion.defaultProps = {
38 | result: undefined,
39 | };
40 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # gpt3-demo
2 |
3 | A collection of interactive GPT-3 demos in a NextJS+Mantine web app.
4 |
5 |
6 |
8 |
9 |
10 | ## Bootstrap
11 |
12 | Run the webserver locally to interact with the demo
13 |
14 | ### Node 18.* recommended
15 |
16 | ```sh
17 | node --version
18 | v18.7.0
19 | ```
20 |
21 | Other versions may work but your mileage may vary. Installable here: https://nodejs.org/en/download/current/
22 |
23 | ### Open AI token required
24 |
25 | Create one here: [https://openai.com/api/](https://openai.com/api/)
26 |
27 | ```sh
28 | cp .env.local.example .env.local
29 | ```
30 |
31 | Declare your API token in `.env.local`
32 |
33 | ### Codex note
34 |
35 | Codex is in private beta and requires approval from Open AI. See details here: https://openai.com/blog/openai-codex/
36 |
37 | ### Starting the server
38 |
39 | ```sh
40 | npm install
41 | npm run dev
42 | ```
43 |
44 | If you are seeing HTTP Code 429 errors, make sure your OpenAI account is in good standing (trial could be expired).
45 |
46 | ## Pre-commit setup
47 | ```sh
48 | python3 -m pip install pre-commit
49 | pre-commit --install
50 | ```
51 |
--------------------------------------------------------------------------------
/.eslintrc.js:
--------------------------------------------------------------------------------
1 | // @ts-check
2 |
3 | /**
4 | * @type {import('eslint').Linter.Config}
5 | **/
6 | module.exports = {
7 | env: {
8 | browser: true,
9 | es2021: true,
10 | },
11 | extends: [
12 | 'airbnb',
13 | 'airbnb-typescript',
14 | 'plugin:@next/next/recommended',
15 | 'plugin:@typescript-eslint/recommended',
16 | 'plugin:import/recommended',
17 | 'plugin:import/typescript',
18 | 'next/core-web-vitals'
19 | ],
20 | parser: '@typescript-eslint/parser',
21 | parserOptions: {
22 | ecmaFeatures: {
23 | jsx: true,
24 | },
25 | ecmaVersion: 'latest',
26 | sourceType: 'module',
27 | project: './tsconfig.json'
28 | },
29 | settings: {
30 | 'import/parsers': {
31 | '@typescript-eslint/parser': ['.ts', '.tsx']
32 | },
33 | 'import/resolver': {
34 | 'node': {
35 | 'paths': ['./']
36 | }
37 | }
38 | },
39 | plugins: [
40 | '@typescript-eslint'
41 | ],
42 | ignorePatterns: ['.eslintrc.js', 'next.config.js'],
43 | rules: {
44 | 'import/no-unresolved': 'error',
45 | 'no-unused-vars': 'error',
46 | 'import/prefer-default-export': 'off',
47 | 'no-console': 'off',
48 | 'react/jsx-props-no-spreading': 'off',
49 | 'no-constant-condition': 'off',
50 | 'no-continue': 'off',
51 | 'no-underscore-dangle': 'off',
52 | },
53 | };
54 |
--------------------------------------------------------------------------------
/libs/openai-client.ts:
--------------------------------------------------------------------------------
1 | import {
2 | CreateCompletionRequest, CreateCompletionResponse, CreateEditRequest,
3 | CreateEditResponse, CreateEmbeddingRequest, CreateEmbeddingResponse,
4 | } from 'openai';
5 |
6 | async function getCompletion(request: CreateCompletionRequest): Promise {
7 | return fetch('/api/completion', {
8 | method: 'POST',
9 | headers: {
10 | 'Content-Type': 'application/json',
11 | },
12 | body: JSON.stringify(request),
13 | }).then(
14 | (response) => response.json() as CreateCompletionResponse,
15 | );
16 | }
17 |
18 | async function getEdit(request: CreateEditRequest): Promise {
19 | return fetch('/api/edit', {
20 | method: 'POST',
21 | headers: {
22 | 'Content-Type': 'application/json',
23 | },
24 | body: JSON.stringify(request),
25 | }).then(
26 | (response) => response.json() as CreateEditResponse,
27 | );
28 | }
29 |
30 | async function getEmbedding(request: CreateEmbeddingRequest): Promise {
31 | return fetch('/api/embedding', {
32 | method: 'POST',
33 | headers: {
34 | 'Content-Type': 'application/json',
35 | },
36 | body: JSON.stringify(request),
37 | }).then(
38 | (response) => response.json() as CreateEmbeddingResponse,
39 | );
40 | }
41 |
42 | export { getCompletion, getEdit, getEmbedding };
43 |
--------------------------------------------------------------------------------
/pages/demos/editor.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Box, Button, Divider, Paper, Stack, Text, Textarea, Title,
5 | } from '@mantine/core';
6 | import { useForm } from '@mantine/form';
7 | import GPTCompletion from 'components/completion';
8 | import { CreateEditRequest } from 'openai';
9 | import { getEdit } from 'libs/openai-client';
10 |
11 | interface FormValues {
12 | input: string;
13 | }
14 |
15 | function generatePrompt({ input }: FormValues): CreateEditRequest {
16 | return {
17 | model: 'text-davinci-edit-001',
18 | input,
19 | instruction: 'Fix capitalization, typos, and grammatical issues.',
20 | temperature: 0,
21 | };
22 | }
23 |
24 | export default function Editor() {
25 | const form = useForm({
26 | initialValues: {
27 | input: 'i update page With new assetz. it loks better noww. '
28 | + 'sry for ani typos i was in a hurry and had to tipe quickly',
29 | },
30 | validate: {
31 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
32 | },
33 | });
34 | const [loading, setLoading] = React.useState(false);
35 | const [result, setResult] = React.useState(undefined);
36 | const [openaiRequest, setOpenaiRequest] = React
37 | .useState(undefined);
38 |
39 | const onSubmit = async (values: FormValues) => {
40 | setLoading(true);
41 | const request = generatePrompt(values);
42 | setOpenaiRequest(request);
43 | await getEdit(request).then((completion) => {
44 | setResult(completion && completion.choices
45 | ? completion.choices[0].text
46 | : 'No result, check the logs.');
47 | }).finally(() => {
48 | setLoading(false);
49 | });
50 | };
51 |
52 | return (
53 |
54 |
76 |
77 | );
78 | }
79 |
--------------------------------------------------------------------------------
/pages/demos/pig-latin.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Box, Button, Divider, Paper, Stack, Text, TextInput, Title,
5 | } from '@mantine/core';
6 | import { useForm } from '@mantine/form';
7 | import GPTCompletion from 'components/completion';
8 | import { CreateCompletionRequest } from 'openai';
9 | import { getCompletion } from 'libs/openai-client';
10 |
11 | interface FormValues {
12 | input: string;
13 | }
14 |
15 | function generatePrompt({ input }: FormValues): CreateCompletionRequest {
16 | const prompt = `Convert into pig latin:
17 |
18 | Normal: ${input}
19 | Pig latin:`;
20 |
21 | return {
22 | model: 'text-davinci-002',
23 | prompt,
24 | max_tokens: 60,
25 | temperature: 0.8,
26 | stop: '\n',
27 | };
28 | }
29 |
30 | export default function PigLatinTranslator() {
31 | const form = useForm({
32 | initialValues: {
33 | input: 'Do you speak pig latin?',
34 | },
35 | validate: {
36 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
37 | },
38 | });
39 | const [loading, setLoading] = React.useState(false);
40 | const [result, setResult] = React.useState(undefined);
41 | const [openaiRequest, setOpenaiRequest] = React
42 | .useState(undefined);
43 |
44 | const onSubmit = async (values: FormValues) => {
45 | setLoading(true);
46 | const request = generatePrompt(values);
47 | setOpenaiRequest(request);
48 | await getCompletion(request).then((completion) => {
49 | setResult(completion && completion.choices
50 | ? completion.choices[0].text
51 | : 'No result, check the logs.');
52 | }).finally(() => {
53 | setLoading(false);
54 | });
55 | };
56 |
57 | return (
58 |
59 |
60 |
61 |
67 | Translate!
68 | { (result && !loading)
69 | && (
70 | <>
71 |
72 | Result
73 |
74 | { result }
75 |
76 | Prompt
77 | { openaiRequest && }
78 | >
79 | )}
80 |
81 |
82 |
83 | );
84 | }
85 |
--------------------------------------------------------------------------------
/pages/demos/movie-to-emoji.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Box, Button, Divider, Paper, Stack, Text, TextInput, Title,
5 | } from '@mantine/core';
6 | import { useForm } from '@mantine/form';
7 | import GPTCompletion from 'components/completion';
8 | import { CreateCompletionRequest } from 'openai';
9 | import { getCompletion } from 'libs/openai-client';
10 |
11 | interface FormValues {
12 | movie: string;
13 | }
14 |
15 | function generatePrompt({ movie }: FormValues): CreateCompletionRequest {
16 | // Borrowed from gpt3 examples https://beta.openai.com/examples/default-movie-to-emoji
17 | const prompt = `Convert movie titles into emoji.
18 |
19 | Back to the Future: 👨👴🚗🕒
20 | Batman: 🤵🦇
21 | Transformers: 🚗🤖
22 | ${movie}:`;
23 |
24 | return {
25 | model: 'text-davinci-002',
26 | prompt,
27 | max_tokens: 60,
28 | temperature: 0.8,
29 | stop: '\n',
30 | };
31 | }
32 |
33 | export default function MovieToEmoji() {
34 | const form = useForm({
35 | initialValues: {
36 | movie: 'The Matrix',
37 | },
38 | validate: {
39 | movie: (value) => (value.length > 0 ? null : 'Invalid movie'),
40 | },
41 | });
42 | const [loading, setLoading] = React.useState(false);
43 | const [result, setResult] = React.useState(undefined);
44 | const [openaiRequest, setOpenaiRequest] = React
45 | .useState(undefined);
46 |
47 | const onSubmit = async (values: FormValues) => {
48 | setLoading(true);
49 | const request = generatePrompt(values);
50 | setOpenaiRequest(request);
51 | await getCompletion(request).then((completion) => {
52 | setResult(completion && completion.choices
53 | ? completion.choices[0].text
54 | : 'No result, check the logs.');
55 | }).finally(() => {
56 | setLoading(false);
57 | });
58 | };
59 |
60 | return (
61 |
62 |
63 |
64 |
70 | Generate!
71 | { (result && !loading)
72 | && (
73 | <>
74 |
75 | Result
76 |
77 | { result }
78 |
79 | Prompt
80 | { openaiRequest && }
81 | >
82 | )}
83 |
84 |
85 |
86 | );
87 | }
88 |
--------------------------------------------------------------------------------
/pages/demos/docstring.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import { Prism } from '@mantine/prism';
4 | import {
5 | Box, Button, Divider, Paper, Stack, Textarea, Title,
6 | } from '@mantine/core';
7 | import { useForm } from '@mantine/form';
8 | import GPTCompletion from 'components/completion';
9 | import { CreateCompletionRequest } from 'openai';
10 | import { getCompletion } from 'libs/openai-client';
11 |
12 | interface FormValues {
13 | input: string;
14 | }
15 |
16 | const defaultCode = `def factorial(input: int) -> int:
17 | """Multiplies a number by every positive number less than it."""
18 | return input > 1 ? factorial(input - 1) * input : 1
19 |
20 | def poorly_named_func(input: float) -> str:
21 | """[insert]
22 | return '{:0.2f}'.format(input)`;
23 |
24 | function generatePrompt({ input }: FormValues): CreateCompletionRequest {
25 | const parts = input.split('[insert]');
26 | if (parts.length !== 2) {
27 | throw new Error('Invalid input, must contain [insert]');
28 | }
29 |
30 | return {
31 | model: 'code-davinci-002',
32 | prompt: `// code.py\n${parts[0]}`,
33 | suffix: parts[1],
34 | max_tokens: 60,
35 | temperature: 0,
36 | };
37 | }
38 |
39 | export default function Docstring() {
40 | const form = useForm({
41 | initialValues: {
42 | input: defaultCode,
43 | },
44 | validate: {
45 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
46 | },
47 | });
48 | const [loading, setLoading] = React.useState(false);
49 | const [result, setResult] = React.useState(undefined);
50 | const [openaiRequest, setOpenaiRequest] = React
51 | .useState(undefined);
52 |
53 | const onSubmit = async (values: FormValues) => {
54 | setLoading(true);
55 | const request = generatePrompt(values);
56 | setOpenaiRequest(request);
57 | await getCompletion(request).then((completion) => {
58 | setResult(completion && completion.choices
59 | ? completion.choices[0].text
60 | : 'No result, check the logs.');
61 | }).finally(() => {
62 | setLoading(false);
63 | });
64 | };
65 |
66 | return (
67 |
68 |
69 |
70 |
76 | Document!
77 | { (result && !loading)
78 | && (
79 | <>
80 |
81 | Result
82 |
83 |
84 | { `${openaiRequest?.prompt}${result}${openaiRequest?.suffix}` }
85 |
86 |
87 | Prompt
88 | { openaiRequest && }
89 | >
90 | )}
91 |
92 |
93 |
94 | );
95 | }
96 |
--------------------------------------------------------------------------------
/pages/demos/tweet-sentiment-classifier.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Box, Button, Divider, Paper, Stack, Text, TextInput, Title,
5 | } from '@mantine/core';
6 | import { useForm } from '@mantine/form';
7 | import GPTCompletion from 'components/completion';
8 | import { CreateCompletionRequest } from 'openai';
9 | import { getCompletion } from 'libs/openai-client';
10 | import { IconThumbDown, IconThumbUp } from '@tabler/icons';
11 |
12 | interface FormValues {
13 | input: string;
14 | }
15 |
16 | function generatePrompt({ input }: FormValues): CreateCompletionRequest {
17 | const prompt = `User tweet: ${input}
18 |
19 | Tweet sentiment rating [Positive, Neutral, Negative]:`;
20 |
21 | return {
22 | model: 'text-davinci-002',
23 | prompt,
24 | max_tokens: 60,
25 | temperature: 0.8,
26 | stop: '\n',
27 | };
28 | }
29 |
30 | export default function TweetSentimentTranslator() {
31 | const form = useForm({
32 | initialValues: {
33 | input: 'Next I’m buying Coca-Cola to put the cocaine back in',
34 | },
35 | validate: {
36 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
37 | },
38 | });
39 | const [loading, setLoading] = React.useState(false);
40 | const [result, setResult] = React.useState(undefined);
41 | const [openaiRequest, setOpenaiRequest] = React
42 | .useState(undefined);
43 |
44 | const onSubmit = async (values: FormValues) => {
45 | setLoading(true);
46 | const request = generatePrompt(values);
47 | setOpenaiRequest(request);
48 | await getCompletion(request).then((completion) => {
49 | setResult(completion && completion.choices
50 | ? completion.choices[0].text
51 | : 'No result, check the logs.');
52 | }).finally(() => {
53 | setLoading(false);
54 | });
55 | };
56 |
57 | return (
58 |
59 |
60 |
61 |
66 | Classify!
67 | { (result && !loading)
68 | && (
69 | <>
70 |
71 | Result
72 |
73 |
74 | { result.toLowerCase().indexOf('positive') >= 0
75 | && }
76 | { result.toLowerCase().indexOf('negative') >= 0
77 | && }
78 | { result }
79 |
80 |
81 | GPT-3 Prompt
82 | { openaiRequest && }
83 | >
84 | )}
85 |
86 |
87 |
88 | );
89 | }
90 |
--------------------------------------------------------------------------------
/pages/demos/sql-generator.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import { Prism } from '@mantine/prism';
4 | import {
5 | Box, Button, Divider, Paper, Stack, TextInput, Title,
6 | } from '@mantine/core';
7 | import { useForm } from '@mantine/form';
8 | import GPTCompletion from 'components/completion';
9 | import { CreateCompletionRequest } from 'openai';
10 | import { getCompletion } from 'libs/openai-client';
11 |
12 | interface FormValues {
13 | input: string;
14 | }
15 |
16 | const schema = `// Postgres SQL
17 | CREATE TABLE "User" (
18 | "id" TEXT NOT NULL,
19 | "email" TEXT NOT NULL,
20 | "firstName" TEXT NOT NULL,
21 | "lastName" TEXT NOT NULL,
22 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
23 | "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
24 | );
25 |
26 | CREATE TABLE "UserVisit" (
27 | "id" TEXT NOT NULL,
28 | "visitedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
29 | );`;
30 |
31 | function generatePrompt({ input }: FormValues): CreateCompletionRequest {
32 | const prompt = `${schema};
33 |
34 | // ${input}
35 | SELECT`;
36 |
37 | return {
38 | model: 'code-davinci-002',
39 | prompt,
40 | max_tokens: 60,
41 | temperature: 0,
42 | stop: ';',
43 | };
44 | }
45 |
46 | export default function SQLGenerator() {
47 | const form = useForm({
48 | initialValues: {
49 | input: 'Count the unique number of users with weekly visits to the website within the past 12 months.',
50 | },
51 | validate: {
52 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
53 | },
54 | });
55 | const [loading, setLoading] = React.useState(false);
56 | const [result, setResult] = React.useState(undefined);
57 | const [openaiRequest, setOpenaiRequest] = React
58 | .useState(undefined);
59 |
60 | const onSubmit = async (values: FormValues) => {
61 | setLoading(true);
62 | const request = generatePrompt(values);
63 | setOpenaiRequest(request);
64 | await getCompletion(request).then((completion) => {
65 | setResult(completion && completion.choices
66 | ? completion.choices[0].text
67 | : 'No result, check the logs.');
68 | }).finally(() => {
69 | setLoading(false);
70 | });
71 | };
72 |
73 | return (
74 |
75 |
76 |
77 | DB Schema
78 |
79 |
80 | { schema }
81 |
82 |
83 |
88 | Translate!
89 | { (result && !loading)
90 | && (
91 | <>
92 |
93 | Result
94 |
95 | {`SELECT${result};`}
96 |
97 | Prompt
98 | { openaiRequest && }
99 | >
100 | )}
101 |
102 |
103 |
104 | );
105 | }
106 |
--------------------------------------------------------------------------------
/pages/demos/chatbot.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Box, Button, Divider, Grid, Paper, Stack, Text, TextInput, Title,
5 | } from '@mantine/core';
6 | import { useForm } from '@mantine/form';
7 | import GPTCompletion from 'components/completion';
8 | import { CreateCompletionRequest } from 'openai';
9 | import { getCompletion } from 'libs/openai-client';
10 |
11 | interface FormValues {
12 | input: string;
13 | }
14 |
15 | const conversationStart = `GPT responds to the user in a sarcastic tone of voice.
16 |
17 | GPT=You there, human. What do you want?`;
18 |
19 | function generatePrompt(
20 | { input }: FormValues,
21 | existingConversation: string,
22 | ): CreateCompletionRequest {
23 | const prompt = `${existingConversation}
24 | User=${input}
25 | GPT=`;
26 |
27 | return {
28 | model: 'text-davinci-002',
29 | prompt,
30 | max_tokens: 120,
31 | temperature: 0.8,
32 | };
33 | }
34 |
35 | export default function Chatbot() {
36 | const form = useForm({
37 | initialValues: {
38 | input: "Hey, how's your day going?",
39 | },
40 | validate: {
41 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
42 | },
43 | });
44 | const [loading, setLoading] = React.useState(false);
45 | const [conversation, setConversation] = React.useState(conversationStart);
46 | const [result, setResult] = React.useState(undefined);
47 | const [openaiRequest, setOpenaiRequest] = React
48 | .useState(undefined);
49 |
50 | const onSubmit = async (values: FormValues) => {
51 | setLoading(true);
52 | const request = generatePrompt(values, conversation);
53 | setOpenaiRequest(request);
54 | await getCompletion(request).then((completion) => {
55 | const completionText = completion && completion.choices && completion.choices[0].text?.trim();
56 | setResult(completionText || 'No result, check the logs.');
57 | setConversation(`${request.prompt}${completionText}`);
58 | }).finally(() => {
59 | setLoading(false);
60 | });
61 | };
62 |
63 | return (
64 |
65 |
66 |
67 |
68 | { conversation.split('\n').map((line) => {
69 | if (line.startsWith('User=')) {
70 | return (
71 |
72 | User:
73 | {' '}
74 | {line.substring(5)}
75 |
76 | );
77 | }
78 | if (line.startsWith('GPT=')) {
79 | return (
80 |
81 | GPT:
82 | {' '}
83 | {line.substring(4)}
84 |
85 | );
86 | }
87 | return null;
88 | }) }
89 |
90 |
91 |
92 |
96 |
97 |
98 | Send
99 |
100 |
101 |
102 | { result && (
103 | <>
104 |
105 | GPT-3 Prompt
106 | { openaiRequest && }
107 | >
108 | )}
109 |
110 |
111 |
112 | );
113 | }
114 |
--------------------------------------------------------------------------------
/pages/demos/fake-review-generator.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Box, Button, Divider, Paper, Select, Stack, Text, TextInput, Title,
5 | } from '@mantine/core';
6 | import { useForm } from '@mantine/form';
7 | import GPTCompletion from 'components/completion';
8 | import { CreateCompletionRequest } from 'openai';
9 | import { getCompletion } from 'libs/openai-client';
10 |
11 | interface FormValues {
12 | product: string;
13 | starRating: number;
14 | }
15 |
16 | function generatePrompt({ product, starRating }: FormValues): CreateCompletionRequest {
17 | const prompt = `A collection of product reviews from Amazon.com.
18 |
19 | Product description: ${product}
20 |
21 | ===
22 |
23 | 5 customer reviews, rating the product ${starRating} out of 5 stars in a paragraph or less:
24 | -`;
25 |
26 | return {
27 | model: 'text-davinci-002',
28 | prompt,
29 | max_tokens: 300,
30 | temperature: 0.9,
31 | // stop: '\n',
32 | presence_penalty: 1.0,
33 | frequency_penalty: 1.5,
34 | };
35 | }
36 |
37 | interface ReviewResultProps {
38 | result: string;
39 | form: FormValues;
40 | }
41 | function ReviewResult({ result, form }: ReviewResultProps) {
42 | const reviews = result.split('\n-');
43 | return (
44 | <>
45 | { reviews.map((review) => (
46 |
47 |
48 | { '⭐'.repeat(form.starRating) }
49 | {' '}
50 | { review }
51 |
52 |
53 | )) }
54 | >
55 | );
56 | }
57 |
58 | export default function FakeReviewGenerator() {
59 | const form = useForm({
60 | initialValues: {
61 | product: 'Sharpie S-Gel, Gel Pens, Medium Point (0.7mm), Assorted Colors, 12 Count',
62 | starRating: 5,
63 | },
64 | validate: {
65 | product: (product) => (product.length > 0 ? null : 'Invalid product'),
66 | },
67 | });
68 | const [loading, setLoading] = React.useState(false);
69 | const [result, setResult] = React.useState(undefined);
70 | const [openaiRequest, setOpenaiRequest] = React
71 | .useState(undefined);
72 |
73 | const onSubmit = async (values: FormValues) => {
74 | setLoading(true);
75 | const request = generatePrompt(values);
76 | setOpenaiRequest(request);
77 | await getCompletion(request).then((completion) => {
78 | setResult(completion && completion.choices
79 | ? completion.choices[0].text
80 | : 'No result, check the logs.');
81 | }).finally(() => {
82 | setLoading(false);
83 | });
84 | };
85 |
86 | const zeroToFour = Array.from(Array(5).keys());
87 |
88 | return (
89 |
90 |
91 |
92 |
97 | {
101 | const stars = 6 - (key + 1);
102 | return {
103 | label: stars === 1 ? '1 star' : `${stars} stars`,
104 | value: stars,
105 | };
106 | })}
107 | {...form.getInputProps('starRating')}
108 | />
109 | Review!
110 | { (result && !loading)
111 | && (
112 | <>
113 |
114 | Result
115 |
116 | Prompt
117 | { openaiRequest && }
118 | >
119 | )}
120 |
121 |
122 |
123 | );
124 | }
125 |
--------------------------------------------------------------------------------
/components/navbar.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Box, Group, Navbar, Stack, Text, ThemeIcon, UnstyledButton,
3 | } from '@mantine/core';
4 | import {
5 | IconBrandHipchat, IconBrandPython, IconBulb, IconDatabase, IconEdit,
6 | IconMoodSmileBeam, IconMovie, IconPig, IconSearch, IconStar,
7 | } from '@tabler/icons';
8 | import Link from 'next/link';
9 | import { useRouter } from 'next/router';
10 | import React from 'react';
11 |
12 | // Add your pages here!
13 | const productExamples = [
14 | {
15 | icon: ,
16 | label: 'Movie to emoji',
17 | sublabel: 'Knowledge',
18 | path: 'movie-to-emoji',
19 | },
20 | {
21 | icon: ,
22 | label: 'Chatbot',
23 | sublabel: 'Conversation',
24 | path: 'chatbot',
25 | },
26 | {
27 | icon: ,
28 | label: 'Tweet sentiment classifier',
29 | sublabel: 'Classification',
30 | path: 'tweet-sentiment-classifier',
31 | },
32 | {
33 | icon: ,
34 | label: 'Pig latin translator',
35 | sublabel: 'Translation',
36 | path: 'pig-latin',
37 | },
38 | {
39 | icon: ,
40 | label: 'Fake review generator',
41 | sublabel: 'Generation',
42 | path: 'fake-review-generator',
43 | },
44 | {
45 | icon: ,
46 | label: 'Copy editor',
47 | sublabel: 'Editing',
48 | path: 'editor',
49 | },
50 | {
51 | icon: ,
52 | label: 'SQL generator',
53 | sublabel: 'Codex',
54 | path: 'sql-generator',
55 | },
56 | {
57 | icon: ,
58 | label: 'Docstring insertion',
59 | sublabel: 'Codex, insertion',
60 | path: 'docstring',
61 | },
62 | {
63 | icon: ,
64 | label: 'Search relevance',
65 | sublabel: 'Embeddings',
66 | path: 'search',
67 | },
68 | ];
69 |
70 | const paramExamples = [
71 | {
72 | icon: ,
73 | label: 'Travel advice by temperature',
74 | sublabel: 'Temperature',
75 | path: 'temperature',
76 | },
77 | {
78 | icon: ,
79 | label: 'Travel advice by top_p',
80 | sublabel: 'top_p',
81 | path: 'top-p',
82 | },
83 | ];
84 |
85 | interface MainLinkProps {
86 | icon: React.ReactNode;
87 | path: string;
88 | color?: string;
89 | label: string;
90 | sublabel?: string;
91 | }
92 |
93 | function MainLink({
94 | icon, color, label, path, sublabel,
95 | }: MainLinkProps) {
96 | const router = useRouter();
97 | const linkPath = `/demos/${path}`;
98 | const currentPath = router.pathname === linkPath;
99 | return (
100 |
101 | ({
103 | display: 'block',
104 | width: '100%',
105 | padding: 6,
106 | borderRadius: theme.radius.sm,
107 | color: theme.black,
108 | backgroundColor: currentPath ? theme.colors.blue[1] : undefined,
109 | '&:hover': {
110 | backgroundColor: theme.colors.blue[2],
111 | },
112 | })}
113 | >
114 |
115 |
116 | {icon}
117 |
118 |
119 |
120 | {label}
121 | { sublabel && { sublabel } }
122 |
123 |
124 |
125 |
126 | );
127 | }
128 | MainLink.defaultProps = {
129 | color: undefined,
130 | sublabel: undefined,
131 | };
132 |
133 | export default function AppNavbar() {
134 | return (
135 |
136 |
137 |
138 | Use cases
139 | { productExamples.map((link) => ) }
140 |
141 |
142 | Parameters
143 | { paramExamples.map((link) => ) }
144 |
145 |
146 |
147 | );
148 | }
149 |
--------------------------------------------------------------------------------
/pages/demos/temperature.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Blockquote,
5 | Box, Button, Code, Divider, Paper, Stack, Text, TextInput, Title,
6 | } from '@mantine/core';
7 | import { useForm } from '@mantine/form';
8 | import GPTCompletion from 'components/completion';
9 | import { CreateCompletionRequest, CreateCompletionResponse } from 'openai';
10 | import { getCompletion } from 'libs/openai-client';
11 |
12 | interface FormValues {
13 | input: string;
14 | }
15 |
16 | interface TemperatureResult {
17 | result: CreateCompletionResponse;
18 | request: CreateCompletionRequest;
19 | temperature: number;
20 | }
21 |
22 | function generatePrompt({ input }: FormValues, temperature: number): CreateCompletionRequest {
23 | const prompt = `Question: What is a fun thing to do in San Francisco?
24 | Answer: Ride a bike along the Golden Gate Bridge.
25 |
26 | Question: What is a fun thing to do in ${input}?
27 | Answer:`;
28 |
29 | return {
30 | model: 'text-davinci-002',
31 | prompt,
32 | max_tokens: 40,
33 | temperature,
34 | n: 3,
35 | stop: '\n',
36 | };
37 | }
38 |
39 | export default function TopP() {
40 | const form = useForm({
41 | initialValues: {
42 | input: 'Edinburgh',
43 | },
44 | validate: {
45 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
46 | },
47 | });
48 | const [loading, setLoading] = React.useState(false);
49 | const [results, setResults] = React.useState(undefined);
50 |
51 | const onSubmit = async (values: FormValues) => {
52 | setLoading(true);
53 | const temps = [0, 0.5, 1.0];
54 | const requests = temps.map((temp) => generatePrompt(values, temp));
55 | await Promise.all(requests.map(getCompletion)).then((completions) => {
56 | const completionResults = completions.map((completion, index) => ({
57 | result: completion,
58 | request: requests[index],
59 | temperature: temps[index],
60 | }));
61 | setResults(completionResults);
62 | }).finally(() => {
63 | setLoading(false);
64 | });
65 | };
66 |
67 | return (
68 |
69 |
70 | According to
71 | {' '}
72 |
77 | OpenAI docs
78 |
79 | {' '}
80 | on
81 | temperature
82 | :
83 | ({ body: { fontSize: theme.fontSizes.sm } })}>
84 | Higher values means the model will take more risks.
85 | Try 0.9 for more creative applications, and 0
86 | (argmax sampling) for ones with a well-defined answer.
87 |
88 |
89 |
90 |
91 |
97 | Answer!
98 | { (results && !loading)
99 | && results.map(({ result, request, temperature }) => (
100 | <>
101 |
102 |
103 | Temperature
104 | {' '}
105 | { temperature }
106 | {' '}
107 | Result
108 |
109 |
110 |
111 | { result.choices?.map((choice) => (
112 | { choice.text }
113 | ))}
114 |
115 |
116 | Prompt
117 | { request && (
118 |
126 | ) }
127 | >
128 | ))}
129 |
130 |
131 |
132 | );
133 | }
134 |
--------------------------------------------------------------------------------
/pages/demos/top-p.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import {
4 | Blockquote,
5 | Box, Button, Code, Divider, Paper, Stack, Text, TextInput, Title,
6 | } from '@mantine/core';
7 | import { useForm } from '@mantine/form';
8 | import GPTCompletion from 'components/completion';
9 | import { CreateCompletionRequest, CreateCompletionResponse } from 'openai';
10 | import { getCompletion } from 'libs/openai-client';
11 |
12 | interface FormValues {
13 | input: string;
14 | }
15 |
16 | interface TopPResult {
17 | result: CreateCompletionResponse;
18 | request: CreateCompletionRequest;
19 | top_p: number;
20 | }
21 |
22 | function generatePrompt({ input }: FormValues, top_p: number): CreateCompletionRequest {
23 | const prompt = `Question: What is a fun thing to do in San Francisco?
24 | Answer: Ride a bike along the Golden Gate Bridge.
25 |
26 | Question: What is a fun thing to do in ${input}?
27 | Answer:`;
28 |
29 | return {
30 | model: 'text-davinci-002',
31 | prompt,
32 | max_tokens: 40,
33 | top_p,
34 | n: 3,
35 | stop: '\n',
36 | };
37 | }
38 |
39 | export default function TopP() {
40 | const form = useForm({
41 | initialValues: {
42 | input: 'Edinburgh',
43 | },
44 | validate: {
45 | input: (value) => (value.length > 0 ? null : 'Invalid input'),
46 | },
47 | });
48 | const [loading, setLoading] = React.useState(false);
49 | const [results, setResults] = React.useState(undefined);
50 |
51 | const onSubmit = async (values: FormValues) => {
52 | setLoading(true);
53 | const topPs = [0.1, 0.5, 1.0];
54 | const requests = topPs.map((top_p) => generatePrompt(values, top_p));
55 | await Promise.all(requests.map(getCompletion)).then((completions) => {
56 | const completionResults = completions.map((completion, index) => ({
57 | result: completion,
58 | request: requests[index],
59 | top_p: topPs[index],
60 | }));
61 | setResults(completionResults);
62 | }).finally(() => {
63 | setLoading(false);
64 | });
65 | };
66 |
67 | return (
68 |
69 |
70 | According to
71 | {' '}
72 |
77 | OpenAI docs
78 |
79 | {' '}
80 | on
81 | top_p
82 | :
83 | ({ body: { fontSize: theme.fontSizes.sm } })}>
84 | An alternative to sampling with temperature,
85 | called nucleus sampling, where the model considers
86 | the results of the tokens with top_p probability mass.
87 | So 0.1 means only the tokens comprising the top 10%
88 | probability mass are considered.
89 |
90 |
91 |
92 |
93 |
99 | Answer!
100 | { (results && !loading)
101 | && results.map(({ result, request, top_p }) => (
102 | <>
103 |
104 |
105 | TopP
106 | {' '}
107 | { top_p }
108 | {' '}
109 | Result
110 |
111 |
112 |
113 | { result.choices?.map((choice) => (
114 | { choice.text }
115 | ))}
116 |
117 |
118 | Prompt
119 | { request && (
120 |
128 | ) }
129 | >
130 | ))}
131 |
132 |
133 |
134 | );
135 | }
136 |
--------------------------------------------------------------------------------
/pages/demos/search.tsx:
--------------------------------------------------------------------------------
1 | import React, { useCallback, useEffect } from 'react';
2 |
3 | import {
4 | Badge,
5 | Box, Button, Divider, Paper, SegmentedControl, Stack, Table, Textarea, Title,
6 | } from '@mantine/core';
7 | import { useForm } from '@mantine/form';
8 | import { CreateEmbeddingRequest, CreateEmbeddingResponse } from 'openai';
9 | import { getEmbedding } from 'libs/openai-client';
10 |
11 | const userFeedback = [
12 | "I'm not able to run the gpt-3 demo, there is an npm install error.",
13 | 'I really trying out enjoyed the chatbot demo.',
14 | 'Could you add another demo that showcases chat features?',
15 | 'What is the difference between the davinci and curie models?',
16 | 'Does the Codex model really improve the quality of the results?',
17 | 'Mantine makes it really easy to add UX elements.',
18 | 'Would love to see more examples of editing in addition to completions.',
19 | "What's the point of this app? It's no better than the GPT-3 Playground.",
20 | ];
21 |
22 | const categories = [
23 | 'Bug',
24 | 'Feature request',
25 | 'Positive sentiment',
26 | 'Negative sentiment',
27 | ];
28 |
29 | interface FormValues {
30 | userFeedback: string;
31 | categories: string;
32 | }
33 |
34 | interface EmbeddingResult {
35 | feedbackEmbeddings: {
36 | embedding: CreateEmbeddingResponse;
37 | feedback: string;
38 | }[];
39 | categoryEmbeddings: {
40 | embedding: CreateEmbeddingResponse;
41 | category: string;
42 | }[];
43 | }
44 |
45 | interface SearchRanking {
46 | feedback: string;
47 | relevance: number;
48 | }
49 |
50 | function generateEmbeddingDocument(input: string): CreateEmbeddingRequest {
51 | return {
52 | model: 'text-search-davinci-doc-001',
53 | input,
54 | };
55 | }
56 |
57 | function generateEmbeddingQuery(input: string): CreateEmbeddingRequest {
58 | return {
59 | model: 'text-search-davinci-query-001',
60 | input,
61 | };
62 | }
63 |
64 | export default function Search() {
65 | const form = useForm({
66 | initialValues: {
67 | userFeedback: userFeedback.join('\n'),
68 | categories: categories.join('\n'),
69 | },
70 | validate: {
71 | userFeedback: (value) => (value.length > 0 ? null : 'Invalid input'),
72 | categories: (value) => (value.length > 0 ? null : 'Invalid input'),
73 | },
74 | });
75 | const [loading, setLoading] = React.useState(false);
76 | const [result, setResult] = React.useState(undefined);
77 | const [rankings, setRankings] = React.useState(undefined);
78 |
79 | const onSubmit = async (values: FormValues) => {
80 | setLoading(true);
81 |
82 | const feedbackList = values.userFeedback.trim().split('\n');
83 | const categoriesList = values.categories.trim().split('\n');
84 |
85 | const feedbackEmbeddingTasks = feedbackList.map(
86 | (feedback) => getEmbedding(generateEmbeddingDocument(feedback)),
87 | );
88 | const categoriesEmbeddingTasks = categoriesList.map(
89 | (category) => getEmbedding(generateEmbeddingQuery(category)),
90 | );
91 |
92 | const feedbackEmbeddings = await Promise.all(feedbackEmbeddingTasks);
93 | const categoriesEmbeddings = await Promise.all(categoriesEmbeddingTasks);
94 |
95 | // TODO: Set embeddings from the API response
96 | setResult({
97 | feedbackEmbeddings: feedbackList.map((feedback, index) => ({
98 | embedding: feedbackEmbeddings[index],
99 | feedback,
100 | })),
101 | categoryEmbeddings: categoriesList.map((category, index) => ({
102 | embedding: categoriesEmbeddings[index],
103 | category,
104 | })),
105 | });
106 | setLoading(false);
107 | };
108 |
109 | const sortByQuery = useCallback((query: string) => {
110 | if (!result) {
111 | return;
112 | }
113 | const queryEmbedding = result.categoryEmbeddings.find(
114 | (category) => category.category === query,
115 | );
116 | const searchResults = result.feedbackEmbeddings.map((feedback) => {
117 | const docEmbeddingData = feedback?.embedding?.data;
118 | const queryEmbeddingData = queryEmbedding?.embedding?.data;
119 |
120 | if (!docEmbeddingData || !queryEmbeddingData) {
121 | return {
122 | feedback: feedback.feedback,
123 | relevance: 0,
124 | };
125 | }
126 |
127 | const docEmbeddingArray = docEmbeddingData[0].embedding;
128 | const queryEmbeddingArray = queryEmbeddingData[0].embedding;
129 |
130 | if (!docEmbeddingArray || !queryEmbeddingArray) {
131 | return {
132 | feedback: feedback.feedback,
133 | relevance: 0,
134 | };
135 | }
136 |
137 | // Calculate cosine similarity of queryEmbedding and docEmbedding
138 | const dotProduct = docEmbeddingArray.reduce(
139 | (acc, curr, index) => acc + curr * queryEmbeddingArray[index],
140 | 0,
141 | );
142 | const docNorm = Math.sqrt(docEmbeddingArray.reduce((acc, curr) => acc + curr * curr, 0));
143 | const queryNorm = Math.sqrt(queryEmbeddingArray.reduce((acc, curr) => acc + curr * curr, 0));
144 | const cosineSimilarity = dotProduct / (docNorm * queryNorm);
145 |
146 | return {
147 | feedback: feedback.feedback,
148 | relevance: cosineSimilarity,
149 | };
150 | });
151 | searchResults.sort((a, b) => b.relevance - a.relevance);
152 | setRankings(searchResults);
153 | }, [result]);
154 |
155 | useEffect(() => {
156 | if (result) {
157 | sortByQuery(result.categoryEmbeddings[0].category);
158 | }
159 | }, [result, sortByQuery]);
160 |
161 | return (
162 |
163 |
164 |
165 |
171 |
177 | Index!
178 | { (result && !loading)
179 | && (
180 | <>
181 |
182 | Result
183 |
184 | ({
186 | label: category,
187 | value: category,
188 | }))}
189 | onChange={sortByQuery}
190 | />
191 |
192 |
193 |
194 | Feedback
195 | Relevance
196 |
197 |
198 |
199 | { rankings?.map(({ relevance, feedback }) => (
200 |
201 | {feedback}
202 |
203 |
204 | {
205 | // Round to 1 decimal place
206 | (Math.round(relevance * 1000) / 10).toFixed(1)
207 | }
208 | %
209 |
210 |
211 |
212 | )) }
213 |
214 |
215 |
216 | >
217 | )}
218 |
219 |
220 |
221 | );
222 | }
223 |
--------------------------------------------------------------------------------