├── frontend ├── .gitignore ├── postcss.config.js ├── next.config.js ├── next-env.d.ts ├── next.conf.js ├── pages │ ├── _app.tsx │ ├── api │ │ └── hello.ts │ ├── _document.tsx │ └── index.tsx ├── tailwind.config.js ├── tsconfig.json ├── components │ └── FileInput.tsx ├── package.json ├── public │ ├── next.svg │ ├── griddark.svg │ ├── grid.svg │ └── animationData.json ├── README.md ├── styles │ ├── globals.css │ └── Home.module.css └── utils │ └── Gradient.js ├── server ├── .gitignore ├── __pycache__ │ ├── index.cpython-310.pyc │ ├── index.cpython-39.pyc │ ├── generate.cpython-39.pyc │ ├── server.cpython-310.pyc │ ├── generate.cpython-310.pyc │ ├── summarize.cpython-310.pyc │ ├── summarize.cpython-39.pyc │ ├── preprocessing.cpython-39.pyc │ └── preprocessing.cpython-310.pyc ├── requirements.txt ├── Makefile ├── scripts │ └── setup.sh ├── pyproject.toml ├── summarize.py ├── index.py ├── preprocessing.py └── generate.py ├── .gitignore └── README.md /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | .vercel 2 | -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | .vercel 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vercel 2 | .env 3 | .next 4 | node_modules 5 | __pycache__ -------------------------------------------------------------------------------- /frontend/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /server/__pycache__/index.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/index.cpython-310.pyc -------------------------------------------------------------------------------- /server/__pycache__/index.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/index.cpython-39.pyc -------------------------------------------------------------------------------- /server/__pycache__/generate.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/generate.cpython-39.pyc -------------------------------------------------------------------------------- /server/__pycache__/server.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/server.cpython-310.pyc -------------------------------------------------------------------------------- /server/__pycache__/generate.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/generate.cpython-310.pyc -------------------------------------------------------------------------------- /server/__pycache__/summarize.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/summarize.cpython-310.pyc -------------------------------------------------------------------------------- /server/__pycache__/summarize.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/summarize.cpython-39.pyc -------------------------------------------------------------------------------- /server/__pycache__/preprocessing.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/preprocessing.cpython-39.pyc -------------------------------------------------------------------------------- /server/__pycache__/preprocessing.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mcantillon21/quotifyai/HEAD/server/__pycache__/preprocessing.cpython-310.pyc -------------------------------------------------------------------------------- /frontend/next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = { 3 | reactStrictMode: true, 4 | } 5 | 6 | module.exports = nextConfig 7 | -------------------------------------------------------------------------------- /frontend/next-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /// 3 | 4 | // NOTE: This file should not be edited 5 | // see https://nextjs.org/docs/basic-features/typescript for more information. 6 | -------------------------------------------------------------------------------- /frontend/next.conf.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | 3 | module.exports = { 4 | reactStrictMode: true, 5 | env: { 6 | NEXT_PUBLIC_OPENAI_API_KEY: process.env.NEXT_PUBLIC_OPENAI_API_KEY, 7 | }, 8 | }; 9 | 10 | -------------------------------------------------------------------------------- /server/requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==2.2.2 2 | Flask_Cors==3.0.10 3 | pinecone_client==2.0.13 4 | matplotlib==3.6.2 5 | requests==2.28.1 6 | fastapi==0.88.0 7 | uvicorn 8 | flask 9 | langchain==0.0.70 10 | openai==0.26.4 11 | modal-client 12 | tiktoken -------------------------------------------------------------------------------- /frontend/pages/_app.tsx: -------------------------------------------------------------------------------- 1 | import '@/styles/globals.css' 2 | import type { AppProps } from 'next/app' 3 | import * as React from 'react' 4 | import { ChakraProvider } from '@chakra-ui/react' 5 | 6 | export default function App({ Component, pageProps }: AppProps) { 7 | return 8 | 9 | 10 | } 11 | -------------------------------------------------------------------------------- /frontend/pages/api/hello.ts: -------------------------------------------------------------------------------- 1 | // Next.js API route support: https://nextjs.org/docs/api-routes/introduction 2 | import type { NextApiRequest, NextApiResponse } from 'next' 3 | 4 | type Data = { 5 | name: string 6 | } 7 | 8 | export default function handler( 9 | req: NextApiRequest, 10 | res: NextApiResponse 11 | ) { 12 | res.status(200).json({ name: 'John Doe' }) 13 | } 14 | -------------------------------------------------------------------------------- /frontend/tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: [ 4 | "./app/**/*.{js,ts,jsx,tsx}", 5 | "./pages/**/*.{js,ts,jsx,tsx}", 6 | "./components/**/*.{js,ts,jsx,tsx}", 7 | 8 | // Or if using `src` directory: 9 | "./src/**/*.{js,ts,jsx,tsx}", 10 | ], 11 | theme: { 12 | extend: {}, 13 | }, 14 | plugins: [], 15 | } -------------------------------------------------------------------------------- /server/Makefile: -------------------------------------------------------------------------------- 1 | server: 2 | uvicorn index:app --reload 3 | 4 | setup: 5 | sh scripts/setup.sh 6 | 7 | type-check: 8 | mypy . 9 | 10 | docker-build-local: 11 | docker build --tag server_image_local . 12 | 13 | docker-build-linux: 14 | docker build --platform linux/amd64 --tag server_image_linux . 15 | 16 | docker-run-local: 17 | docker run --publish 80:80 -e OPENAI_API_KEY=$$OPENAI_API_KEY server_image_local -------------------------------------------------------------------------------- /server/scripts/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This is used for coloring output of this script 3 | 4 | echo "Running Setup Script" 5 | 6 | # Notify the user if an OpenAI key is not detected 7 | if [[ -z "${OPENAI_API_KEY}" ]]; then 8 | echo -e "${RED}OPENAI_API_KEY was not detected in environment variable. Please ensure it's located in .env${NC}" 9 | fi 10 | 11 | # Install the punkt file for nltk 12 | python3 <"] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.10" 10 | langchain = "^0.0.82" 11 | openai = "^0.26.5" 12 | nltk = "^3.8.1" 13 | faiss-cpu = "^1.7.3" 14 | bs4 = "^0.0.1" 15 | unstructured = "^0.4.8" 16 | fastapi = "^0.91.0" 17 | uvicorn = "^0.20.0" 18 | python-multipart = "^0.0.5" 19 | pypdf = "^3.4.0" 20 | poetry-dotenv-plugin = "^0.1.0" 21 | pydantic = "^1.10.4" 22 | tiktoken = "^0.2.0" 23 | mypy = "^1.0.0" 24 | 25 | 26 | [build-system] 27 | requires = ["poetry-core"] 28 | build-backend = "poetry.core.masonry.api" 29 | -------------------------------------------------------------------------------- /frontend/pages/_document.tsx: -------------------------------------------------------------------------------- 1 | import { Html, Head, Main, NextScript } from 'next/document' 2 | import { ColorModeScript } from '@chakra-ui/react' 3 | 4 | // 1. import `extendTheme` function 5 | import { extendTheme, type ThemeConfig } from "@chakra-ui/react"; 6 | 7 | // 2. Add your color mode config 8 | const config: ThemeConfig = { 9 | initialColorMode: "dark", 10 | useSystemColorMode: false, 11 | }; 12 | 13 | // 3. extend the theme 14 | const theme = extendTheme({ config }); 15 | 16 | export default function Document() { 17 | return ( 18 | 19 | 20 | 21 | 22 |
23 | 24 | 25 | 26 | ) 27 | } 28 | -------------------------------------------------------------------------------- /frontend/components/FileInput.tsx: -------------------------------------------------------------------------------- 1 | interface Props { 2 | name: string; 3 | accept: string; 4 | className?: string; 5 | disabled?: boolean; 6 | id: string; 7 | onChange: (file: FileList | null) => void; 8 | children?: any; 9 | } 10 | 11 | export const FileInput = ({ name, accept, id, onChange, className = "", disabled = false, children }: Props) => { 12 | return ( 13 |
14 | {children} 15 | onChange(evt.target.files)} 20 | disabled={disabled} 21 | accept={accept} 22 | style={{ 23 | position: "absolute", 24 | top: 0, 25 | display: "hidden", 26 | left: 0, 27 | opacity: 0, 28 | width: "100%", 29 | height: "100%", 30 | zIndex: -1, 31 | }} 32 | /> 33 |
34 | ); 35 | }; -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "quote-finder", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@chakra-ui/react": "^2.5.1", 13 | "@emotion/react": "^11.10.5", 14 | "@emotion/styled": "^11.10.5", 15 | "@next/font": "13.1.6", 16 | "@types/node": "18.13.0", 17 | "@types/react": "18.0.28", 18 | "@types/react-dom": "18.0.11", 19 | "axios": "^1.3.3", 20 | "eslint": "8.34.0", 21 | "eslint-config-next": "13.1.6", 22 | "framer-motion": "^9.0.4", 23 | "langchain": "^0.0.10", 24 | "lottie-react": "^2.3.1", 25 | "next": "^13.1.6", 26 | "openai": "^3.1.0", 27 | "react": "18.2.0", 28 | "react-dom": "18.2.0", 29 | "react-simple-typewriter": "^5.0.1", 30 | "react-toggle-dark-mode": "^1.1.1", 31 | "styled-components": "^5.3.6", 32 | "typescript": "4.9.5" 33 | }, 34 | "devDependencies": { 35 | "@types/styled-components": "^5.1.26", 36 | "autoprefixer": "^10.4.13", 37 | "dotenv": "^16.0.3", 38 | "postcss": "^8.4.21", 39 | "tailwindcss": "^1.9.6", 40 | "tailwindcss-plugins": "^0.3.0" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /frontend/public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). 2 | 3 | ## Getting Started 4 | 5 | First, run the development server: 6 | 7 | ```bash 8 | npm run dev 9 | # or 10 | yarn dev 11 | # or 12 | pnpm dev 13 | ``` 14 | 15 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 16 | 17 | You can start editing the page by modifying `pages/index.tsx`. The page auto-updates as you edit the file. 18 | 19 | [API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.ts`. 20 | 21 | The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages. 22 | 23 | This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. 24 | 25 | ## Learn More 26 | 27 | To learn more about Next.js, take a look at the following resources: 28 | 29 | - [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. 30 | - [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. 31 | 32 | You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! 33 | 34 | ## Deploy on Vercel 35 | 36 | The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. 37 | 38 | Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. 39 | -------------------------------------------------------------------------------- /server/summarize.py: -------------------------------------------------------------------------------- 1 | from langchain import PromptTemplate 2 | from langchain.chains.summarize import load_summarize_chain 3 | from langchain.chains.question_answering import load_qa_chain 4 | from langchain.llms import OpenAI 5 | from langchain.docstore.document import Document 6 | 7 | base_prompt = """A profound and powerful writer, you have been given a context text and a search query, {0}. You must write an in-depth analysis, highlighting the significance of {0} in larger context's meaning as well as INCLUDE AS MANY SPECIFIC QUOTATIONS AS POSSIBLE (marked with quotes) from the context and note what page you found them from. Try to prioritize quotations in responses that should be about 1000 characters total. 8 | """ 9 | 10 | def summarize_context(search_term: str, contexts: list[str], openai_api_key: str): 11 | try: 12 | if openai_api_key: 13 | llm = OpenAI(temperature=0, openai_api_key=openai_api_key) 14 | else: 15 | llm = OpenAI(temperature=0) 16 | docs = [Document(page_content=context) for context in contexts] 17 | # have to do a little weird acrobatics here because summarize cannot take more than one input 18 | # so have to construct the prompt template string after we interpolate the characters 19 | final_prompt = base_prompt.format(search_term) + "\n{text}\n\nSUMMARY:" 20 | final_prompt_template = PromptTemplate(template = final_prompt, input_variables=["text"]) 21 | llm_summarize = load_summarize_chain(llm, chain_type="map_reduce", return_intermediate_steps=True, map_prompt=final_prompt_template, combine_prompt=final_prompt_template) 22 | global_summary = llm_summarize({"input_documents": docs}, return_only_outputs=True) 23 | if (len(global_summary["output_text"]) > 400): 24 | return global_summary["output_text"] 25 | else: 26 | # To augment the summary with more details that don't get lost, we extract some info from the summaries 27 | doc_summaries = [Document(page_content=summary) for summary in global_summary["intermediate_steps"]] 28 | qa_chain = load_qa_chain(llm, chain_type="stuff") 29 | query = "What is the significance of {0} in the context and quotes (include quotations) to back up your reasoning".format(search_term) 30 | additional_context = qa_chain({"input_documents": doc_summaries, "question": query}, return_only_outputs=True) 31 | return global_summary["output_text"] + additional_context["output_text"] 32 | except Exception as e: 33 | print("Error generating summary: ", e) 34 | raise e 35 | -------------------------------------------------------------------------------- /frontend/styles/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | :root { 6 | --max-width: 1100px; 7 | --border-radius: 12px; 8 | --font-mono: ui-monospace, Menlo, Monaco, 'Cascadia Mono', 'Segoe UI Mono', 9 | 'Roboto Mono', 'Oxygen Mono', 'Ubuntu Monospace', 'Source Code Pro', 10 | 'Fira Mono', 'Droid Sans Mono', 'Courier New', monospace; 11 | 12 | --foreground-rgb: 0, 0, 0; 13 | --background-start-rgb: 214, 219, 220; 14 | --background-end-rgb: 255, 255, 255; 15 | 16 | --primary-glow: conic-gradient( 17 | from 180deg at 50% 50%, 18 | #16abff33 0deg, 19 | #0885ff33 55deg, 20 | #54d6ff33 120deg, 21 | #0071ff33 160deg, 22 | transparent 360deg 23 | ); 24 | --secondary-glow: radial-gradient( 25 | rgba(255, 255, 255, 1), 26 | rgba(255, 255, 255, 0) 27 | ); 28 | 29 | --tile-start-rgb: 239, 245, 249; 30 | --tile-end-rgb: 228, 232, 233; 31 | --tile-border: conic-gradient( 32 | #00000080, 33 | #00000040, 34 | #00000030, 35 | #00000020, 36 | #00000010, 37 | #00000010, 38 | #00000080 39 | ); 40 | 41 | --callout-rgb: 238, 240, 241; 42 | --callout-border-rgb: 172, 175, 176; 43 | --card-rgb: 180, 185, 188; 44 | --card-border-rgb: 131, 134, 135; 45 | } 46 | 47 | @media (prefers-color-scheme: dark) { 48 | :root { 49 | --foreground-rgb: 255, 255, 255; 50 | --background-start-rgb: 0, 0, 0; 51 | --background-end-rgb: 0, 0, 0; 52 | 53 | --primary-glow: radial-gradient(rgba(1, 65, 255, 0.4), rgba(1, 65, 255, 0)); 54 | --secondary-glow: linear-gradient( 55 | to bottom right, 56 | rgba(1, 65, 255, 0), 57 | rgba(1, 65, 255, 0), 58 | rgba(1, 65, 255, 0.3) 59 | ); 60 | 61 | --tile-start-rgb: 2, 13, 46; 62 | --tile-end-rgb: 2, 5, 19; 63 | --tile-border: conic-gradient( 64 | #ffffff80, 65 | #ffffff40, 66 | #ffffff30, 67 | #ffffff20, 68 | #ffffff10, 69 | #ffffff10, 70 | #ffffff80 71 | ); 72 | 73 | --callout-rgb: 20, 20, 20; 74 | --callout-border-rgb: 108, 108, 108; 75 | --card-rgb: 100, 100, 100; 76 | --card-border-rgb: 200, 200, 200; 77 | } 78 | } 79 | 80 | * { 81 | box-sizing: border-box; 82 | padding: 0; 83 | margin: 0; 84 | } 85 | 86 | html, 87 | body { 88 | max-width: 100vw; 89 | overflow-x: hidden; 90 | } 91 | 92 | body { 93 | color: rgb(var(--foreground-rgb)); 94 | background: linear-gradient( 95 | to bottom, 96 | transparent, 97 | rgb(var(--background-end-rgb)) 98 | ) 99 | rgb(var(--background-start-rgb)); 100 | } 101 | 102 | a { 103 | color: inherit; 104 | text-decoration: none; 105 | } 106 | 107 | @media (prefers-color-scheme: dark) { 108 | html { 109 | color-scheme: dark; 110 | } 111 | } 112 | 113 | #gradient-canvas { 114 | width:100%; 115 | height:100%; 116 | --gradient-color-1: #89b3f1; 117 | --gradient-color-2: #34d399; 118 | --gradient-color-3: #3437eb; 119 | --gradient-color-4: #54b7ad; 120 | z-index:0; 121 | } 122 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Quotify 💭 3 | Generate meaningful quotes from books, articles, or literally anything that can be turned into a PDF. 4 | 5 | Website [here](https://quotifyai.com/). Built by [Molly](https://twitter.com/mollycantillon). 6 | 7 | ### Background 8 | 9 | Quotify is a AI-powered quote finder for any text-based PDF, extracting the most relavant quotes to substantiate your claim. What if we didn't have to scour texts for hours in search of a thesis-supporting quote? 10 | 11 | Provided the full length of riveting multi-dimensioned novels, intensive drama telenovella scripts, dense academic journal entries, and everything in between, Quotify finds the most significant parts of the text in relation to your ideated topic. This means you can now find Shakespeare quotes, cited within the text, about free will _and_ environmental activism. 12 | 13 | ### Technical Implementation 14 | Quotify uses the following tools: 15 | 16 | - [PyPDF](https://pypdf2.readthedocs.io/en/3.0.0/) for parsing the uploaded PDF doc into text 17 | - [NLTK](https://www.nltk.org/) for slicing the text into relevant chunks pertaining to the query 18 | - [OpenAI's Embedding Model](https://platform.openai.com/docs/guides/embeddings) for embeddings of words 19 | - [Facebook Research's FAISS library](https://github.com/facebookresearch/faiss) for extracting the most relevant chunks in the text as well as surrounding context 20 | - [Langchain](https://github.com/hwchase17/langchain) for summarization and extraction 21 | - [GPT's Davinci model](https://platform.openai.com/docs/models/overview) for analysis 22 | - [FastAPI](https://fastapi.tiangolo.com/) for backend 23 | - [Modal](https://modal.com/) for serverless deployment 24 | 25 | ### Backend 26 | The backend uses a Makefile for our build process [Poetry](https://python-poetry.org/) as our dependency manager for Python. Install poetry, change directories into the `server` folder, and then run `poetry install` to install all dependencies. Note that we require `python 3.9.13` and that the [Rust Compiler](https://www.rust-lang.org/) must be installed on your machine in order to build certain dependencies. 27 | 28 | Afterwards, run `make setup` to configure your environment to run our application. To run the server run `make server`. 29 | 30 | ### Frontend 31 | The frontend is built using [React](https://reactjs.org/), [Next.js](https://nextjs.org/), [Tailwind CSS](https://tailwindcss.com/) and [Chakra-UI](https://chakra-ui.com). To run the web app locally, change directories into `frontend` nd run `npm install` to install all dependencies. Then run `npm run dev`. 32 | 33 | ## Acknowledgements & Disclaimer 34 | 35 | I learned a lot from [Amir](https://twitter.com/amirbolous) & [Verumlotus's](https://twitter.com/verumlotus) [SweetSerenade](https://www.sweetserenade.xyz/) while building this. 36 | 37 | Please note that this tool is intended as an experimental exploration of quote extraction using advanced AI technology, and should not be relied upon as a replacement for thorough research or analysis in any academic or professional context. While we have taken every effort to ensure accuracy and proper citation of sources, we cannot provide a full guarantee of the correctness of our analyses. 38 | 39 | -------------------------------------------------------------------------------- /server/index.py: -------------------------------------------------------------------------------- 1 | import io 2 | from fastapi import FastAPI, UploadFile, File, HTTPException 3 | from preprocessing import return_relevant_document_context 4 | from generate import generate_quotes 5 | from summarize import summarize_context 6 | from pydantic import BaseModel 7 | from fastapi.middleware.cors import CORSMiddleware 8 | import modal 9 | 10 | NUM_CHUNKS = 10 11 | 12 | def download_punkt(): 13 | import nltk 14 | nltk.download('punkt') 15 | print('downloaded punkt') 16 | 17 | image = modal.Image.debian_slim().pip_install( 18 | # langchain pkgs 19 | "faiss-cpu~=1.7.3", 20 | "langchain~=0.0.7", 21 | "openai~=0.26.3", 22 | "tenacity~=8.2.1", 23 | #others 24 | "pypdf", 25 | "nltk", 26 | "tiktoken", 27 | "pandas" 28 | ).run_function(download_punkt) 29 | 30 | stub = modal.Stub( 31 | name="quotify", 32 | image=image, 33 | secrets=[modal.Secret.from_name("openai-secret")], 34 | ) 35 | 36 | mount_exclude_lst = ['.venv', '.mypy_cache', '__pycache__'] 37 | filter_artifacts = lambda path: False if any([substr in path for substr in mount_exclude_lst]) else True 38 | mount = modal.Mount.from_local_dir("../index", remote_path = "/", condition=filter_artifacts) 39 | 40 | app = FastAPI() 41 | 42 | app.add_middleware( 43 | CORSMiddleware, 44 | allow_origins=["*"], 45 | allow_credentials=True, 46 | allow_methods=["*"], 47 | allow_headers=["*"], 48 | ) 49 | 50 | class CompletionRequestBody(BaseModel): 51 | context: str 52 | 53 | def extract_stream(file: UploadFile = File(...)): 54 | pdf_as_bytes = file.file.read() 55 | # We convert the bytes into a Streamable object of bytes 56 | return io.BytesIO(pdf_as_bytes) 57 | 58 | @app.get("/") 59 | def root(): 60 | return {"hello": "world"} 61 | 62 | # @app.post("/generate-quotes") 63 | # def generate_quotes_completion(): 64 | # try: 65 | # search_term = request.form['search_term'] 66 | # body = request.json 67 | # completion = generate_quotes("poem", search_term, body['context']) 68 | # return { 'completion': completion } 69 | # except Exception as ex: 70 | # print(ex) 71 | # return { 'error': "Error generating completion" } 72 | 73 | @app.post("/generate-quotes-from-pdf") 74 | def generate_quotes_from_pdf( 75 | search_term: str, openai_api_key: str, file: UploadFile = File(...) 76 | ): 77 | try: 78 | stream = extract_stream(file) 79 | # Require OpenAI Key for documents over 50kB 80 | if stream.getbuffer().nbytes > 50000 and not openai_api_key: 81 | raise HTTPException(status_code=413, detail="Please pass in OpenAI key") 82 | # either parse PDF of raw text for testing 83 | relevant_document_context = return_relevant_document_context( 84 | stream, f"Retrieve the most significally relevant quotes in the text about {search_term}", NUM_CHUNKS, openai_api_key=openai_api_key 85 | ) 86 | # print('PARSED REVELANT CONTEXT: ', relevant_document_context) 87 | context_summary = summarize_context(search_term, relevant_document_context, openai_api_key=openai_api_key) 88 | # print('SUMMARIZED CONTEXT: ' + context_summary) 89 | completion = generate_quotes(search_term, context_summary, openai_api_key=openai_api_key) 90 | # print('COMPLETION ' + completion) 91 | return {"summary": context_summary, "completion": completion} 92 | except Exception as ex: 93 | print(ex) 94 | raise ex 95 | finally: 96 | file.file.close() 97 | 98 | # This hooks up our asgi fastapi app to modal 99 | @stub.asgi(image=image, secret=modal.Secret.from_name("openai-secret")) 100 | def fastapi_app(): 101 | app.add_middleware( 102 | CORSMiddleware, 103 | allow_origins=["*"], 104 | allow_credentials=True, 105 | allow_methods=["*"], 106 | allow_headers=["*"], 107 | ) 108 | return app -------------------------------------------------------------------------------- /server/preprocessing.py: -------------------------------------------------------------------------------- 1 | import io 2 | import langchain 3 | from langchain.text_splitter import NLTKTextSplitter 4 | from langchain.embeddings import OpenAIEmbeddings 5 | from langchain.vectorstores import FAISS 6 | from langchain.document_loaders import UnstructuredPDFLoader 7 | from pypdf import PdfReader 8 | 9 | def extract_text(file_stream: io.BytesIO) -> str: 10 | """Accepts a PDF or Image file & extracts the text from the file 11 | 12 | Args: 13 | file (io.BytesIO): Streamble bytes of the file 14 | 15 | Returns: 16 | str: Extracted text 17 | """ 18 | pdf = PdfReader(file_stream) 19 | text = "" 20 | for page in pdf.pages: 21 | text += page.extract_text() 22 | return text 23 | 24 | def chunk_text(text: str) -> list[str]: 25 | """Given a string of text, will chunk the text and return an array of chunked text. 26 | 27 | Args: 28 | text (str): Text to chunks 29 | 30 | Returns: 31 | list[str]: Array of chunks 32 | """ 33 | text_splitter = NLTKTextSplitter(chunk_size=4000, chunk_overlap=40) 34 | chunks = text_splitter.split_text(text) 35 | return chunks 36 | 37 | def get_chunk_embeddings(chunks: list[str]) -> list[list[float]]: 38 | """Given a list of chunks, generates embeddings for each chunk via OpenAI's embedding endpoint 39 | 40 | Args: 41 | chunks (list[str]): A list of chunks to calculate the embeddings of 42 | 43 | Returns: 44 | list[list[float]]: A list of embeddings (vectors of floats) for each chunk 45 | """ 46 | embeddings = OpenAIEmbeddings() 47 | chunk_embeddings = embeddings.embed_documents(chunks) 48 | return chunk_embeddings 49 | 50 | def map_elems_to_index(arr: list[str]) -> dict[str, int]: 51 | """Given an array ARR, will return a dictionary with elements of the array mapped 52 | to their index in the array 53 | 54 | Args: 55 | arr (list[str]): Input array 56 | 57 | Returns: 58 | dict[str, int]: Dictionary mapping elements to index in array 59 | """ 60 | res = dict() 61 | for index in range(len(arr)): 62 | res[arr[index]] = index 63 | return res 64 | 65 | def find_relevant_chunks(prompt_subset: str, chunks: list[str], k: int, openai_api_key: str) -> list[str]: 66 | """Given a prompt subset, a list of chunks, & chunk embeddings, returns the most relevant chunks and their 67 | closest chunk neighbors (within the original chunks list) 68 | 69 | Args: 70 | prompt_subset (str): Subset of the prompt 71 | chunks (list[str]): List of chunks 72 | k (int): Number of relevant chunks to analyze – we return these chunks & chunks surrounding them 73 | 74 | Returns: 75 | list[str]: List of most relevant chunks & surrounding neighbors 76 | """ 77 | # Create a mapping from chunks to their index in the chunks array 78 | chunks_to_index = map_elems_to_index(chunks) 79 | if openai_api_key: 80 | embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key) 81 | else: 82 | embeddings = OpenAIEmbeddings() 83 | 84 | chunk_search = FAISS.from_texts(chunks, embeddings) 85 | chunks_orderered_by_similarity = chunk_search.similarity_search(prompt_subset) 86 | # Take the K most relevant chunks 87 | relevant_chunks: list[str] = [document.page_content for document in chunks_orderered_by_similarity[:k]] 88 | # Now, we want to add the chunks that are +/- 2 chunks from the relevant chunks 89 | # found above 90 | # set of (chunk, index) tuples - we use a set to avoid adding duplicate chunks 91 | chunk_index_pairs: set[tuple[str, int]] = set() 92 | for relevant_chunk in relevant_chunks: 93 | relevant_chunk_index = chunks_to_index[relevant_chunk] 94 | # Grab the chunks +/- 2 from this relevant chunk 95 | for index in range(max(0, relevant_chunk_index - 2), min(relevant_chunk_index + 3, len(chunks) - 1)): 96 | chunk_index_pairs.add((chunks[index], index)) 97 | # Sort the chunks by index 98 | sorted_chunk_index_pairs = sorted(list(chunk_index_pairs), key = lambda x: x[1]) 99 | # Return only the chunks (filter out the index) 100 | return [chunk for chunk, _ in sorted_chunk_index_pairs] 101 | 102 | def return_relevant_document_context(file_stream: io.BytesIO, prompt_subset: str, k: int, openai_api_key: str) -> list[str]: 103 | """Given a file path & prompt, will perform semantic search to return K relevant chunks of the file 104 | 105 | Args: 106 | file_stream (io.BytesIO): Streamble bytes of the file 107 | prompt_subset (str): Prompt subset 108 | k (int): Number of relevant file chunks to return 109 | 110 | Returns: 111 | list[str]: List of K most relevant file chunks 112 | """ 113 | file_text = extract_text(file_stream) 114 | chunks = chunk_text(file_text) 115 | return find_relevant_chunks(prompt_subset, chunks, k, openai_api_key) 116 | 117 | -------------------------------------------------------------------------------- /server/generate.py: -------------------------------------------------------------------------------- 1 | from langchain import PromptTemplate, FewShotPromptTemplate 2 | from langchain.llms import OpenAI 3 | 4 | 5 | example_formatter_template = """ 6 | Search: {search_term} 7 | Context: {context} 8 | Output: {output} 9 | """ 10 | 11 | search_1 = "Vultures" 12 | output1 = """ 13 | Relevant Quotes: 14 | "Slowly, little by little, I lifted the cloth, until a small, small light escaped from under it to fall upon — to fall upon that vulture eye! It was open — wide, wide open, and my anger increased as it looked straight at me. (Poe, 65)" 15 | "His eye was like the eye of a vulture, the eye of one of those terrible birds that watch and wait while an animal dies, and then fall upon the dead body and pull it to pieces to eat it. When the old man looked at me with his vulture eye a cold feeling went up and down my back; even my blood became cold. (Poe, 64)" 16 | """ 17 | context1 = """ 18 | In Edgar Allan Poe's "The Tell-Tale Heart," the presence of vultures is a powerful symbol of the narrator's guilt and fear. The vultures represent death and decay, and serve as a reminder of the narrator's guilt and the consequences of his actions. The vultures also represent the old man's death, as they are often associated with death and decay. The vultures also represent the narrator's fear of being discovered and the consequences of his actions. The vultures serve to emphasize the narrator's guilt and fear, and to remind the reader of the consequences of his actions. Quotes to include could be "Slowly, little by little, I lifted the cloth, until a small, small light escaped from under it to fall upon — to fall upon that vulture eye! It was open — wide, wide open, and my anger increased as it looked straight at me. (Poe, 65)" and 19 | or 20 | "His eye was like the eye of a vulture, the eye of one of those terrible birds that watch and wait while an animal dies, and then fall upon the dead body and pull it to pieces to eat it. When the old man looked at me with his vulture eye a cold feeling went up and down my back; even my blood became cold. (Poe, 64)" 21 | """ 22 | 23 | search_2 = "Windows" 24 | output2 = """ 25 | Relevant Quotes: 26 | "I am getting angry enough to do something desperate. To jump out of the window would be admirable exercise, but the bars are too strong even to try." 27 | "That spoils my ghostliness, I am afraid; but I don’t care—there is something strange about the house—I can feel it. I even said so to John one moonlight evening, but he said what I felt was a draught, and shut the window." 28 | """ 29 | 30 | context2 = """ 31 | In Charlotte Perkins Stetson's "The Yellow Wall-Paper," Windows serve as a symbol of the narrator's longing for a more romantic life, as well as a metaphor for her relationship with her husband. Windows also provide the narrator with a physical barrier between her and the outside world, allowing her to work in peace and observe the beauty of the countryside. Furthermore, Windows can be seen as a metaphor for the narrator's experience in "The Yellow Wall-Paper," as she is confined to a bed and forced to follow a pattern that has no purpose or meaning. By using Windows, users are often confined to a certain set of rules and regulations, and must follow a certain pattern in order to achieve their desired results. In this way, Windows can be seen as a symbol of the mundane and the everyday, and yet it is also a place of refuge and comfort. 32 | Relevant quotes to include are "I am getting angry enough to do something desperate. To jump out of the window would be admirable exercise, but the bars are too strong even to try." and "That spoils my ghostliness, I am afraid; but I don’t care—there is something strange about the house—I can feel it. I even said so to John one moonlight evening, but he said what I felt was a draught, and shut the window." 33 | """ 34 | 35 | examples = [ 36 | { 37 | "search_term": search_1, 38 | "context": context1, 39 | "output": output1, 40 | }, 41 | { 42 | "search_term": search_2, 43 | "context": context2, 44 | "output": output2, 45 | }, 46 | ] 47 | 48 | def generate_quotes(search_term: str, context: str, openai_api_key: str): 49 | try: 50 | if openai_api_key: 51 | llm_complete = OpenAI(model_name="text-davinci-003", n=1, best_of=1, openai_api_key=openai_api_key) 52 | else: 53 | llm_complete = OpenAI(model_name="text-davinci-003", n=1, best_of=1) 54 | example_prompt = PromptTemplate( 55 | input_variables=[ 56 | "search_term", 57 | "context", 58 | "output", 59 | ], 60 | template=example_formatter_template, 61 | ) 62 | few_shot_prompt = FewShotPromptTemplate( 63 | examples=examples, 64 | example_prompt=example_prompt, 65 | prefix="Extract all quotations (anything in quotes) in the context that may relate to the search term. If no quotations are found, return: No relevant quotations found. Check the analysis for some insight.", 66 | suffix="Search: {search_term}\nContext: {context}\nOutput:", 67 | input_variables=["search_term","context"], 68 | example_separator="\n\n", 69 | ) 70 | 71 | final_prompt = few_shot_prompt.format( 72 | search_term=search_term.strip(), 73 | context=context.strip(), 74 | ) 75 | # call API with prompt 76 | return llm_complete(final_prompt) 77 | except Exception as e: 78 | print("Error generating completion: ", e) 79 | raise e -------------------------------------------------------------------------------- /frontend/public/griddark.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /frontend/public/grid.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /frontend/styles/Home.module.css: -------------------------------------------------------------------------------- 1 | .main { 2 | display: flex; 3 | flex-direction: column; 4 | object-fit: fill; 5 | /* justify-content: space-between; */ 6 | padding: 5%; 7 | object-fit: contain; 8 | width: 100%; 9 | } 10 | 11 | .wrapper { 12 | position: relative; 13 | text-align: center; 14 | } 15 | 16 | .wrapper::before { 17 | content: ""; 18 | width: 665px; 19 | height: 470px; 20 | background-size: cover !important; 21 | background: url(/grid.svg); 22 | opacity: 100%; 23 | position: absolute; 24 | top: calc(50% - 235px); 25 | right: calc(50% - 332px); 26 | } 27 | 28 | /* @media (prefers-color-scheme: light) { 29 | .wrapper::before { 30 | background: url(/griddark.svg); 31 | } 32 | } 33 | 34 | @media (prefers-color-scheme: dark) { 35 | .wrapper::before { 36 | background: url(/grid.svg); 37 | } 38 | } */ 39 | 40 | .browserpanel { 41 | width: 80%; 42 | max-width: 30em; 43 | background: radial-gradient( 44 | 63.94% 63.94% at 50% 0%, 45 | rgba(255, 255, 255, 0.12) 0%, 46 | rgba(255, 255, 255, 0) 100% 47 | ), 48 | rgba(255, 255, 255, 0.01); 49 | backdrop-filter: blur(6px); 50 | border-radius: 10px; 51 | position: relative; 52 | margin: auto; 53 | } 54 | 55 | .footer { 56 | background-color: #333; 57 | color: #fff; 58 | padding: 20px; 59 | text-align: center; 60 | position: absolute; 61 | bottom: 0; 62 | width: 100%; 63 | } 64 | 65 | .linear { 66 | font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, 67 | Segoe UI, Roboto, Helvetica Neue, Arial, Noto Sans, sans-serif, 68 | "Apple Color Emoji", "Segoe UI Emoji", Segoe UI Symbol, "Noto Color Emoji"; 69 | font-weight: 750; 70 | /* -webkit-text-fill-color: linear-gradient(60deg, #E21143, #FFB03A) !important; */ 71 | /* background-image: linear-gradient(60deg, red, #fff); */ 72 | background-clip: text; 73 | /* background: linear-gradient( 74 | to right, 75 | #89b3f1 20%, 76 | #34d399 40%, 77 | #34d399 60%, 78 | #89b3f1 80% 79 | ); */ 80 | background: white; 81 | background-size: 200% auto; 82 | color: #000; 83 | /* opacity: 90%; */ 84 | margin: 0 0 0.5rem; 85 | line-height: 1.15; 86 | font-size: 6rem; 87 | display: flex; 88 | justify-content: center; 89 | -webkit-background-clip: text; 90 | -webkit-text-fill-color: transparent; 91 | animation: shine 10s linear infinite; 92 | margin-bottom: 0px; 93 | } 94 | 95 | @keyframes shine { 96 | to { 97 | background-position: 200% center; 98 | } 99 | } 100 | 101 | .description { 102 | display: inherit; 103 | justify-content: center; 104 | align-items: inherit; 105 | font-size: 0.85rem; 106 | max-width: var(--max-width); 107 | width: 100%; 108 | z-index: 2; 109 | font-family: var(--font-mono); 110 | } 111 | 112 | .description a { 113 | display: flex; 114 | justify-content: center; 115 | align-items: center; 116 | gap: 0.5rem; 117 | } 118 | 119 | .description p { 120 | position: relative; 121 | margin: 0; 122 | padding: 1rem; 123 | background-color: rgba(var(--callout-rgb), 0.5); 124 | border: 1px solid rgba(var(--callout-border-rgb), 0.3); 125 | border-radius: var(--border-radius); 126 | } 127 | 128 | .code { 129 | font-weight: 700; 130 | font-family: var(--font-mono); 131 | } 132 | 133 | .grid { 134 | display: grid; 135 | grid-template-columns: repeat(4, minmax(25%, auto)); 136 | width: var(--max-width); 137 | max-width: 100%; 138 | } 139 | 140 | .card { 141 | padding: 1rem 1.2rem; 142 | border-radius: var(--border-radius); 143 | background: rgba(var(--card-rgb), 0); 144 | border: 1px solid rgba(var(--card-border-rgb), 0); 145 | transition: background 200ms, border 200ms; 146 | } 147 | 148 | .card span { 149 | display: inline-block; 150 | transition: transform 200ms; 151 | } 152 | 153 | .card h2 { 154 | font-weight: 600; 155 | margin-bottom: 0.7rem; 156 | } 157 | 158 | .card p { 159 | margin: 0; 160 | opacity: 0.6; 161 | font-size: 0.9rem; 162 | line-height: 1.5; 163 | max-width: 30ch; 164 | } 165 | 166 | .center { 167 | display: flex; 168 | justify-content: center; 169 | align-items: center; 170 | position: relative; 171 | padding: 4rem 0; 172 | } 173 | 174 | .center::before { 175 | background: var(--secondary-glow); 176 | border-radius: 50%; 177 | width: 480px; 178 | height: 360px; 179 | margin-left: -400px; 180 | } 181 | 182 | .center::after { 183 | background: var(--primary-glow); 184 | width: 240px; 185 | height: 180px; 186 | z-index: -1; 187 | } 188 | 189 | .center::before, 190 | .center::after { 191 | content: ""; 192 | left: 50%; 193 | position: absolute; 194 | filter: blur(45px); 195 | transform: translateZ(0); 196 | } 197 | 198 | .logo, 199 | .thirteen { 200 | position: relative; 201 | } 202 | 203 | .thirteen { 204 | display: flex; 205 | justify-content: center; 206 | align-items: center; 207 | width: 75px; 208 | height: 75px; 209 | padding: 25px 10px; 210 | margin-left: 16px; 211 | transform: translateZ(0); 212 | border-radius: var(--border-radius); 213 | overflow: hidden; 214 | box-shadow: 0px 2px 8px -1px #0000001a; 215 | } 216 | 217 | .thirteen::before, 218 | .thirteen::after { 219 | content: ""; 220 | position: absolute; 221 | z-index: -1; 222 | } 223 | 224 | /* Conic Gradient Animation */ 225 | .thirteen::before { 226 | animation: 6s rotate linear infinite; 227 | width: 200%; 228 | height: 200%; 229 | background: var(--tile-border); 230 | } 231 | 232 | /* Inner Square */ 233 | .thirteen::after { 234 | inset: 0; 235 | padding: 1px; 236 | border-radius: var(--border-radius); 237 | background: linear-gradient( 238 | to bottom right, 239 | rgba(var(--tile-start-rgb), 1), 240 | rgba(var(--tile-end-rgb), 1) 241 | ); 242 | background-clip: content-box; 243 | } 244 | 245 | /* Enable hover only on non-touch devices */ 246 | @media (hover: hover) and (pointer: fine) { 247 | .card:hover { 248 | background: rgba(var(--card-rgb), 0.1); 249 | border: 1px solid rgba(var(--card-border-rgb), 0.15); 250 | } 251 | 252 | .card:hover span { 253 | transform: translateX(4px); 254 | } 255 | } 256 | 257 | @media (prefers-reduced-motion) { 258 | .thirteen::before { 259 | animation: none; 260 | } 261 | 262 | .card:hover span { 263 | transform: none; 264 | } 265 | } 266 | 267 | /* Mobile */ 268 | @media (max-width: 700px) { 269 | .content { 270 | padding: 4rem; 271 | } 272 | 273 | .grid { 274 | grid-template-columns: 1fr; 275 | margin-bottom: 120px; 276 | max-width: 320px; 277 | text-align: center; 278 | } 279 | 280 | .card { 281 | padding: 1rem 2.5rem; 282 | } 283 | 284 | .card h2 { 285 | margin-bottom: 0.5rem; 286 | } 287 | 288 | .center { 289 | padding: 8rem 0 6rem; 290 | } 291 | 292 | .center::before { 293 | transform: none; 294 | height: 300px; 295 | } 296 | 297 | .description { 298 | font-size: 0.8rem; 299 | } 300 | 301 | .description a { 302 | padding: 1rem; 303 | } 304 | 305 | .description p, 306 | .description div { 307 | display: flex; 308 | justify-content: center; 309 | position: fixed; 310 | width: 100%; 311 | } 312 | 313 | .description p { 314 | align-items: center; 315 | inset: 0 0 auto; 316 | padding: 2rem 1rem 1.4rem; 317 | border-radius: 0; 318 | border: none; 319 | border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25); 320 | background: linear-gradient( 321 | to bottom, 322 | rgba(var(--background-start-rgb), 1), 323 | rgba(var(--callout-rgb), 0.5) 324 | ); 325 | background-clip: padding-box; 326 | backdrop-filter: blur(24px); 327 | } 328 | 329 | .description div { 330 | align-items: flex-end; 331 | pointer-events: none; 332 | inset: auto 0 0; 333 | padding: 2rem; 334 | height: 200px; 335 | background: linear-gradient( 336 | to bottom, 337 | transparent 0%, 338 | rgb(var(--background-end-rgb)) 40% 339 | ); 340 | z-index: 1; 341 | } 342 | } 343 | 344 | /* Tablet and Smaller Desktop */ 345 | @media (min-width: 701px) and (max-width: 1120px) { 346 | .grid { 347 | grid-template-columns: repeat(2, 50%); 348 | } 349 | } 350 | 351 | @media (prefers-color-scheme: dark) { 352 | .vercelLogo { 353 | filter: invert(1); 354 | } 355 | 356 | .logo, 357 | .thirteen img { 358 | filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70); 359 | } 360 | } 361 | 362 | @keyframes rotate { 363 | from { 364 | transform: rotate(360deg); 365 | } 366 | to { 367 | transform: rotate(0deg); 368 | } 369 | } 370 | -------------------------------------------------------------------------------- /frontend/public/animationData.json: -------------------------------------------------------------------------------- 1 | {"v":"5.5.7","meta":{"g":"LottieFiles AE 0.1.20","a":"","k":"","d":"","tc":""},"fr":29.9700012207031,"ip":0,"op":55.0000022401959,"w":400,"h":400,"nm":"book","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Shape Layer 5","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[200,200,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[0,49.5],[0,-24.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0,0,0,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":7,"ix":5},"lc":1,"lj":1,"ml":4,"bm":1,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":55.0000022401959,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"page animation","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[264.109,208.537,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":0,"s":[{"i":[[17.35,0],[4.957,-7],[0,0],[-18.291,0],[-36.582,0],[0,0]],"o":[[-15.698,0],[0,0],[5.776,-7],[20.216,0],[0,0],[-31.395,0]],"v":[[-33.105,-43],[-64.5,-31],[-64.5,43],[-27.918,31],[64.5,43],[46.209,-31]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[17.35,0],[4.957,-7],[0,0],[-18.291,0],[-36.582,0],[0,0]],"o":[[-15.698,0],[0,0],[5.776,-7],[20.216,0],[0,0],[-31.395,0]],"v":[[-33.105,-43],[-64.5,-31],[-64.5,43],[-27.918,31],[64.5,43],[46.209,-31]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[17.35,0.079],[4.989,-6.977],[0,0],[-18.291,-0.083],[-15.552,8.301],[3.153,13.883]],"o":[[-15.697,-0.071],[0,0],[5.808,-6.974],[20.216,0.092],[-7.552,-8.699],[-26.847,-1.617]],"v":[[-32.91,-43.15],[-64.359,-31.292],[-64.694,42.707],[-28.058,30.873],[65.804,27.542],[46.349,-30.79]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[14.661,-1.007],[4.989,-6.977],[0,0],[-18.057,2.916],[-22.302,15.551],[1.153,9.633]],"o":[[-15.661,1.076],[0,0],[5.808,-6.974],[20.31,-3.28],[-11.802,-22.949],[-17.597,1.883]],"v":[[-33.41,-46.15],[-64.359,-31.292],[-64.694,42.707],[-32.058,25.373],[41.804,8.292],[26.099,-45.04]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[12.411,-10.257],[4.989,-6.977],[0,0],[-15.983,12.327],[-22.302,15.551],[1.153,9.633]],"o":[[-11.657,9.634],[0,0],[5.808,-6.974],[11.06,-8.53],[-11.802,-22.949],[-7.347,9.383]],"v":[[-38.16,-56.15],[-64.359,-31.292],[-64.694,42.707],[-34.558,14.873],[18.054,-23.458],[-5.651,-85.79]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[1.411,-10.507],[4.611,-10.365],[0,0],[-4.809,19.603],[-0.802,13.801],[1.153,9.633]],"o":[[-0.861,6.41],[0,0],[5.808,-6.974],[3.81,-15.53],[-1.802,-13.199],[-1.597,17.633]],"v":[[-55.66,-71.15],[-64.359,-31.292],[-64.694,42.707],[-49.558,-3.627],[-43.446,-50.958],[-51.151,-115.29]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":30,"s":[{"i":[[-3.589,-11.507],[-0.889,-11.365],[0,0],[5.509,19.418],[5.698,13.301],[-0.097,22.633]],"o":[[1.926,6.175],[0,0],[-2.054,-11.364],[-4.69,-16.53],[2.198,-15.199],[5.403,20.133]],"v":[[-72.16,-69.65],[-64.359,-31.292],[-64.694,42.707],[-75.058,-7.627],[-90.446,-52.458],[-87.651,-119.29]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":35,"s":[{"i":[[-9.089,-11.007],[-0.889,-11.365],[0,0],[14.024,14.517],[17.198,3.801],[-4.097,19.633]],"o":[[6.964,8.435],[0,0],[-2.054,-11.364],[-9.69,-10.03],[5.198,-16.199],[15.403,8.133]],"v":[[-79.66,-66.15],[-64.359,-31.292],[-64.694,42.707],[-89.058,-2.127],[-138.946,-26.458],[-120.151,-93.29]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":40,"s":[{"i":[[-11.744,-2.718],[-0.889,-11.365],[0,0],[15.81,1.47],[18.698,4.301],[-4.097,19.633]],"o":[[19.411,4.493],[0,0],[-2.054,-11.364],[-13.886,-1.291],[5.698,-20.699],[16.403,5.633]],"v":[[-93.66,-52.65],[-64.359,-31.292],[-64.694,42.707],[-102.058,19.373],[-160.946,12.542],[-143.151,-61.79]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":45,"s":[{"i":[[-21.589,-0.007],[-0.889,-11.365],[0,0],[15.81,1.47],[21.698,6.801],[-4.097,19.633]],"o":[[13.411,0.005],[0,0],[-2.054,-11.364],[-13.886,-1.291],[5.698,-16.699],[16.403,5.633]],"v":[[-95.66,-49.15],[-64.359,-31.292],[-64.694,42.707],[-102.558,21.873],[-182.946,25.542],[-163.651,-42.29]],"c":true}]},{"t":50.0000020365418,"s":[{"i":[[-21.589,-0.007],[-6.389,-7.865],[0,0],[12.813,0.384],[31.198,-0.62],[-4.097,19.633]],"o":[[13.411,0.005],[0,0],[-8.554,-6.864],[-20.69,-0.62],[5.698,-16.699],[19.903,-0.62]],"v":[[-96.66,-44.15],[-64.359,-31.292],[-64.694,42.707],[-99.558,31.373],[-194.446,43.542],[-175.151,-32.79]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0,0,0,1],"ix":3},"o":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":50,"s":[100]},{"t":52.0000021180034,"s":[0]}],"ix":4},"w":{"a":0,"k":7,"ix":5},"lc":1,"lj":1,"ml":1,"bm":1,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":50,"s":[100]},{"t":52.0000021180034,"s":[0]}],"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0.639,0.62],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":55.0000022401959,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"pages","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[200,209,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-4.957,-7],[-15.698,0],[-31.395,0],[0,0],[20.216,0],[5.776,-7],[18.291,0],[36.582,0],[0,0],[-17.35,0]],"o":[[4.957,-7],[17.35,0],[0,0],[-36.582,0],[-18.291,0],[-5.776,-7],[-20.216,0],[0,0],[31.395,0],[15.698,0]],"v":[[0,-31],[31.395,-43],[110.709,-31],[129,43],[36.582,31],[0,43],[-36.582,31],[-129,43],[-110.709,-31],[-31.395,-43]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0,0,0,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":7,"ix":5},"lc":1,"lj":1,"ml":4,"bm":1,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":55.0000022401959,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"cover","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[200,264,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[6,0],[2,3],[0,0]],"o":[[0,0],[-2,3],[-6,0],[0,0],[0,0]],"v":[[134,-2],[12,-2],[0,2],[-12,-2],[-134,-2]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0,0,0,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":7,"ix":5},"lc":1,"lj":1,"ml":4,"bm":1,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":55.0000022401959,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"bkgr","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[262.668,179.393,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ty":"rc","d":1,"s":{"a":0,"k":[400,400],"ix":2},"p":{"a":0,"k":[0,0],"ix":3},"r":{"a":0,"k":0,"ix":4},"nm":"Rectangle Path 1","mn":"ADBE Vector Shape - Rect","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[-62.668,20.607],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":55.0000022401959,"st":0,"bm":0}],"markers":[]} -------------------------------------------------------------------------------- /frontend/pages/index.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { useEffect, useState, useRef, useMemo } from 'react' 3 | import axios from 'axios' 4 | import Lottie from 'lottie-react' 5 | import Head from 'next/head' 6 | import Image from 'next/image' 7 | import { Inter } from '@next/font/google' 8 | import { 9 | useColorMode, 10 | Box, 11 | Input, 12 | Text, 13 | Button, 14 | useToast, 15 | } from '@chakra-ui/react' 16 | import { DarkModeSwitch } from 'react-toggle-dark-mode' 17 | import { Typewriter } from 'react-simple-typewriter' 18 | import { FileInput } from '../components/FileInput' 19 | import { Gradient } from '@/utils/Gradient' 20 | import styles from '@/styles/Home.module.css' 21 | import animationData from '../public/animationData2.json' 22 | 23 | const inter = Inter({ subsets: ['latin'] }) 24 | 25 | const GradientCanvas = () => { 26 | useEffect(() => { 27 | try { 28 | const gradient = new Gradient() 29 | // @ts-ignore 30 | gradient.initGradient('#gradient-canvas') 31 | } catch {} 32 | }, []) 33 | return ( 34 | 35 | 36 | 37 | ) 38 | } 39 | 40 | export default function Home() { 41 | const [searchTerm, setSearchTerm] = useState('') 42 | const [completion, setCompletion] = useState('') 43 | const [context, setContext] = useState('') 44 | const [loading, setLoading] = useState(false) 45 | const [isOpen, setIsOpen] = useState(false) 46 | const [isDarkMode, setDarkMode] = useState(true) 47 | const [fileName, setFileName] = useState('') 48 | const fileRef = useRef(null) 49 | const [errorMessage, setErrorMesage] = useState(''); 50 | 51 | const toast = useToast() 52 | 53 | const search_terms = useMemo( 54 | () => [ 55 | 'Innocence', 56 | "The significance of Piggy's Glasses", 57 | 'Consensus', 58 | 'Laissez-Faire Quasi-Libertarianism', 59 | ], 60 | [], 61 | ) 62 | const fileNames = useMemo( 63 | () => [ 64 | 'To Kill a Mockingbird', 65 | 'Lord of the Flies', 66 | 'Bitcoin Whitepaper', 67 | 'Atlas Shrugged', 68 | ], 69 | [], 70 | ) 71 | 72 | const toggleOpen = () => { 73 | setIsOpen(!isOpen) 74 | } 75 | 76 | const toggleDarkMode = (checked: boolean) => { 77 | setDarkMode(!checked) 78 | toggleColorMode() 79 | } 80 | 81 | const { colorMode, toggleColorMode } = useColorMode() 82 | 83 | const searchFromAPI = async (searchTerm: string) => { 84 | if (!searchTerm || !fileRef.current) { 85 | toast({ 86 | title: 'Error: Required fields not filled out', 87 | description: 'Please select a file and enter a search term', 88 | status: 'error', 89 | duration: 5000, 90 | isClosable: true, 91 | containerStyle: { 92 | // width: "700px", 93 | maxWidth: '90%', 94 | }, 95 | }) 96 | return; 97 | } 98 | 99 | setLoading(true) 100 | setSearchTerm(searchTerm) 101 | setErrorMesage("") 102 | 103 | try { 104 | const formData = new FormData() 105 | 106 | const headers = { 107 | accept: 'application/json', 108 | 'Access-Control-Allow-Origin': '*', 109 | 'Content-Type': 'multipart/form-data', 110 | 'Access-Control-Allow-Credentials': 'true', 111 | 'Access-Control-Allow-Methods': 'GET,PUT,POST,DELETE,PATCH,OPTIONS', 112 | 'Access-Control-Allow-Headers':'Origin, X-Requested-With, Content-Type, Accept, Authorization, Access-Control-Allow-Credentials, Access-Control-Allow-Origin, Access-Control-Allow-Methods, Access-Control-Allow-Headers' 113 | } 114 | formData.append('file', fileRef.current![0], 'file') 115 | const axiosResponse = await axios.post( 116 | // 'http://127.0.0.1:8000/generate-quotes-from-pdf', 117 | 'https://mcantillon21--quotify-fastapi-app.modal.run/generate-quotes-from-pdf', 118 | formData, 119 | { 120 | headers: headers, 121 | params: { 122 | search_term: searchTerm, 123 | openai_api_key: process.env.NEXT_PUBLIC_OPENAI_API_KEY, 124 | }, 125 | }, 126 | ) 127 | if (axiosResponse.status != 200) { 128 | setErrorMesage("Sorry, we ran into an issue. It's likely we ran out of our OpenAI credits or are being rate limited. Check back soon!") 129 | } 130 | 131 | setCompletion(axiosResponse.data.completion) 132 | setContext(axiosResponse.data.summary) 133 | // console.log(axiosResponse) 134 | setLoading(false) 135 | } 136 | catch (error) { 137 | console.log(error) 138 | setLoading(false) 139 | setErrorMesage("Sorry, we ran into an issue. It's likely we ran out of our OpenAI credits or are being rate limited. Check back soon!") 140 | } 141 | } 142 | 143 | return ( 144 | <> 145 | 146 | 147 | Quotify 148 | 152 | 153 | 157 | 158 | {/* */} 159 | {/* 160 | 167 | */} 168 |
169 |
170 |
Quotify
171 |
172 | 173 | 174 | Find me quotes about{' '} 175 | 176 | 177 | {fileRef.current ? ( 178 | searchTerm || 'search term' 179 | ) : ( 180 | 187 | )}{' '} 188 | 189 | 190 | in{' '} 191 | 192 | 193 | {fileRef.current ? ( 194 | fileName || 'fileName' 195 | ) : ( 196 | 203 | )}{' '} 204 | {' '} 205 | 206 | 207 | 208 |
209 |
210 | setSearchTerm(e.target.value)} 216 | _hover={{ borderColor: '#9B72F2', borderWidth: '1px' }} 217 | focusBorderColor={'#9B72F2'} 218 | /> 219 | {fileName ? ( 220 |
221 |

File: {fileName}

222 | { 227 | fileRef.current = file 228 | setFileName(file?.[0].name || '') 229 | }} 230 | className="ml-2" 231 | > 232 | 237 | 238 |
239 | ) : ( 240 | { 245 | fileRef.current = file 246 | setFileName(file?.[0].name || '') 247 | }} 248 | className="ml-2" 249 | > 250 | 255 | 256 | )} 257 |
258 | 259 | 268 | {errorMessage && ( 269 |
270 | {errorMessage}{" "} 271 |
272 | )} 273 | {loading ? ( 274 | <> 275 |
276 | Estimated wait time: less than 5 minutes 277 |
278 | 283 | 284 | ) : completion === '' ? null : ( 285 | <> 286 |
{completion}
287 | 288 | Click for Additional Analysis 289 | 290 | {isOpen && ( 291 |
{context}
292 | )} 293 | 294 | )} 295 |
296 |
297 |
298 |
299 | {/*
*/} 300 | 301 | ) 302 | } 303 | -------------------------------------------------------------------------------- /frontend/utils/Gradient.js: -------------------------------------------------------------------------------- 1 | // @ts-nocheck 2 | // https://kevinhufnagl.com/wp-content/themes/lightisol/dist/js/min/lightisol-gradient.min.js?ver=1.0 3 | // 4 | // Since the original CodePen https://codepen.io/kevinhufnagl/pen/YzwBemd, 5 | // seems to have been removed by the author I restored this version from 6 | // the article's markup found at 7 | // https://kevinhufnagl.com/how-to-stripe-website-gradient-effect/ 8 | 9 | function normalizeColor(hexCode) { 10 | return [ 11 | ((hexCode >> 16) & 255) / 255, 12 | ((hexCode >> 8) & 255) / 255, 13 | (255 & hexCode) / 255, 14 | ]; 15 | } 16 | ['SCREEN', 'LINEAR_LIGHT'].reduce( 17 | (hexCode, t, n) => Object.assign(hexCode, { [t]: n }), 18 | {} 19 | ); 20 | class MiniGl { 21 | constructor(canvas, width, height, debug = !1) { 22 | try { 23 | const _miniGl = this, 24 | debug_output = 25 | -1 !== 26 | document.location.search 27 | .toLowerCase() 28 | .indexOf('debug=webgl'); 29 | (_miniGl.canvas = canvas), 30 | (_miniGl.gl = _miniGl.canvas.getContext('webgl', { 31 | antialias: !0, 32 | })), 33 | (_miniGl.meshes = []); 34 | const context = _miniGl.gl; 35 | width && height && this.setSize(width, height), 36 | _miniGl.lastDebugMsg, 37 | (_miniGl.debug = 38 | debug && debug_output 39 | ? function (e) { 40 | const t = new Date(); 41 | t - _miniGl.lastDebugMsg > 1e3 && 42 | console.log('---'), 43 | console.log( 44 | t.toLocaleTimeString() + 45 | Array( 46 | Math.max(0, 32 - e.length) 47 | ).join(' ') + 48 | e + 49 | ': ', 50 | ...Array.from(arguments).slice(1) 51 | ), 52 | (_miniGl.lastDebugMsg = t); 53 | } 54 | : () => {}), 55 | Object.defineProperties(_miniGl, { 56 | Material: { 57 | enumerable: !1, 58 | value: class { 59 | constructor( 60 | vertexShaders, 61 | fragments, 62 | uniforms = {} 63 | ) { 64 | const material = this; 65 | function getShaderByType(type, source) { 66 | const shader = context.createShader(type); 67 | return ( 68 | context.shaderSource(shader, source), 69 | context.compileShader(shader), 70 | context.getShaderParameter( 71 | shader, 72 | context.COMPILE_STATUS 73 | ) || 74 | console.error( 75 | context.getShaderInfoLog(shader) 76 | ), 77 | _miniGl.debug( 78 | 'Material.compileShaderSource', 79 | { 80 | source: source, 81 | } 82 | ), 83 | shader 84 | ); 85 | } 86 | function getUniformVariableDeclarations( 87 | uniforms, 88 | type 89 | ) { 90 | return Object.entries(uniforms) 91 | .map(([uniform, value]) => 92 | value.getDeclaration(uniform, type) 93 | ) 94 | .join('\n'); 95 | } 96 | (this.uniforms = uniforms), 97 | (this.uniformInstances = []); 98 | const prefix = 99 | '\n precision highp float;\n '; 100 | (this.vertexSource = `\n ${prefix}\n attribute vec4 position;\n attribute vec2 uv;\n attribute vec2 uvNorm;\n ${getUniformVariableDeclarations( 101 | _miniGl.commonUniforms, 102 | 'vertex' 103 | )}\n ${getUniformVariableDeclarations( 104 | uniforms, 105 | 'vertex' 106 | )}\n ${vertexShaders}\n `), 107 | (this.Source = `\n ${prefix}\n ${getUniformVariableDeclarations( 108 | _miniGl.commonUniforms, 109 | 'fragment' 110 | )}\n ${getUniformVariableDeclarations( 111 | uniforms, 112 | 'fragment' 113 | )}\n ${fragments}\n `), 114 | (this.vertexShader = getShaderByType( 115 | context.VERTEX_SHADER, 116 | this.vertexSource 117 | )), 118 | (this.fragmentShader = getShaderByType( 119 | context.FRAGMENT_SHADER, 120 | this.Source 121 | )), 122 | (this.program = context.createProgram()), 123 | context.attachShader( 124 | this.program, 125 | this.vertexShader 126 | ), 127 | context.attachShader( 128 | this.program, 129 | this.fragmentShader 130 | ), 131 | context.linkProgram(this.program), 132 | context.getProgramParameter( 133 | this.program, 134 | context.LINK_STATUS 135 | ) || 136 | console.error( 137 | context.getProgramInfoLog( 138 | this.program 139 | ) 140 | ), 141 | context.useProgram(this.program), 142 | this.attachUniforms( 143 | void 0, 144 | _miniGl.commonUniforms 145 | ), 146 | this.attachUniforms(void 0, this.uniforms); 147 | } 148 | attachUniforms(name, uniforms) { 149 | const material = this; 150 | void 0 === name 151 | ? Object.entries(uniforms).forEach( 152 | ([name, uniform]) => { 153 | material.attachUniforms( 154 | name, 155 | uniform 156 | ); 157 | } 158 | ) 159 | : 'array' == uniforms.type 160 | ? uniforms.value.forEach((uniform, i) => 161 | material.attachUniforms( 162 | `${name}[${i}]`, 163 | uniform 164 | ) 165 | ) 166 | : 'struct' == uniforms.type 167 | ? Object.entries(uniforms.value).forEach( 168 | ([uniform, i]) => 169 | material.attachUniforms( 170 | `${name}.${uniform}`, 171 | i 172 | ) 173 | ) 174 | : (_miniGl.debug( 175 | 'Material.attachUniforms', 176 | { 177 | name: name, 178 | uniform: uniforms, 179 | } 180 | ), 181 | material.uniformInstances.push({ 182 | uniform: uniforms, 183 | location: context.getUniformLocation( 184 | material.program, 185 | name 186 | ), 187 | })); 188 | } 189 | }, 190 | }, 191 | Uniform: { 192 | enumerable: !1, 193 | value: class { 194 | constructor(e) { 195 | (this.type = 'float'), 196 | Object.assign(this, e), 197 | (this.typeFn = 198 | { 199 | float: '1f', 200 | int: '1i', 201 | vec2: '2fv', 202 | vec3: '3fv', 203 | vec4: '4fv', 204 | mat4: 'Matrix4fv', 205 | }[this.type] || '1f'), 206 | this.update(); 207 | } 208 | update(value) { 209 | void 0 !== this.value && 210 | context[`uniform${this.typeFn}`]( 211 | value, 212 | 0 === this.typeFn.indexOf('Matrix') 213 | ? this.transpose 214 | : this.value, 215 | 0 === this.typeFn.indexOf('Matrix') 216 | ? this.value 217 | : null 218 | ); 219 | } 220 | getDeclaration(name, type, length) { 221 | const uniform = this; 222 | if (uniform.excludeFrom !== type) { 223 | if ('array' === uniform.type) 224 | try { 225 | return ( 226 | uniform.value[0].getDeclaration( 227 | name, 228 | type, 229 | uniform.value.length 230 | ) + 231 | `\nconst int ${name}_length = ${uniform.value.length};` 232 | ); 233 | } catch {} 234 | if ('struct' === uniform.type) { 235 | let name_no_prefix = name.replace( 236 | 'u_', 237 | '' 238 | ); 239 | return ( 240 | (name_no_prefix = 241 | name_no_prefix 242 | .charAt(0) 243 | .toUpperCase() + 244 | name_no_prefix.slice(1)), 245 | `uniform struct ${name_no_prefix} \n {\n` + 246 | Object.entries(uniform.value) 247 | .map(([name, uniform]) => 248 | uniform 249 | .getDeclaration( 250 | name, 251 | type 252 | ) 253 | .replace( 254 | /^uniform/, 255 | '' 256 | ) 257 | ) 258 | .join('') + 259 | `\n} ${name}${ 260 | length > 0 261 | ? `[${length}]` 262 | : '' 263 | };` 264 | ); 265 | } 266 | return `uniform ${uniform.type} ${name}${ 267 | length > 0 ? `[${length}]` : '' 268 | };`; 269 | } 270 | } 271 | }, 272 | }, 273 | PlaneGeometry: { 274 | enumerable: !1, 275 | value: class { 276 | constructor(width, height, n, i, orientation) { 277 | context.createBuffer(), 278 | (this.attributes = { 279 | position: new _miniGl.Attribute({ 280 | target: context.ARRAY_BUFFER, 281 | size: 3, 282 | }), 283 | uv: new _miniGl.Attribute({ 284 | target: context.ARRAY_BUFFER, 285 | size: 2, 286 | }), 287 | uvNorm: new _miniGl.Attribute({ 288 | target: context.ARRAY_BUFFER, 289 | size: 2, 290 | }), 291 | index: new _miniGl.Attribute({ 292 | target: context.ELEMENT_ARRAY_BUFFER, 293 | size: 3, 294 | type: context.UNSIGNED_SHORT, 295 | }), 296 | }), 297 | this.setTopology(n, i), 298 | this.setSize(width, height, orientation); 299 | } 300 | setTopology(e = 1, t = 1) { 301 | const n = this; 302 | (n.xSegCount = e), 303 | (n.ySegCount = t), 304 | (n.vertexCount = 305 | (n.xSegCount + 1) * (n.ySegCount + 1)), 306 | (n.quadCount = 307 | n.xSegCount * n.ySegCount * 2), 308 | (n.attributes.uv.values = new Float32Array( 309 | 2 * n.vertexCount 310 | )), 311 | (n.attributes.uvNorm.values = 312 | new Float32Array(2 * n.vertexCount)), 313 | (n.attributes.index.values = 314 | new Uint16Array(3 * n.quadCount)); 315 | for (let e = 0; e <= n.ySegCount; e++) 316 | for (let t = 0; t <= n.xSegCount; t++) { 317 | const i = e * (n.xSegCount + 1) + t; 318 | if ( 319 | ((n.attributes.uv.values[2 * i] = 320 | t / n.xSegCount), 321 | (n.attributes.uv.values[2 * i + 1] = 322 | 1 - e / n.ySegCount), 323 | (n.attributes.uvNorm.values[2 * i] = 324 | (t / n.xSegCount) * 2 - 1), 325 | (n.attributes.uvNorm.values[ 326 | 2 * i + 1 327 | ] = 1 - (e / n.ySegCount) * 2), 328 | t < n.xSegCount && e < n.ySegCount) 329 | ) { 330 | const s = e * n.xSegCount + t; 331 | (n.attributes.index.values[6 * s] = 332 | i), 333 | (n.attributes.index.values[ 334 | 6 * s + 1 335 | ] = i + 1 + n.xSegCount), 336 | (n.attributes.index.values[ 337 | 6 * s + 2 338 | ] = i + 1), 339 | (n.attributes.index.values[ 340 | 6 * s + 3 341 | ] = i + 1), 342 | (n.attributes.index.values[ 343 | 6 * s + 4 344 | ] = i + 1 + n.xSegCount), 345 | (n.attributes.index.values[ 346 | 6 * s + 5 347 | ] = i + 2 + n.xSegCount); 348 | } 349 | } 350 | n.attributes.uv.update(), 351 | n.attributes.uvNorm.update(), 352 | n.attributes.index.update(), 353 | _miniGl.debug('Geometry.setTopology', { 354 | uv: n.attributes.uv, 355 | uvNorm: n.attributes.uvNorm, 356 | index: n.attributes.index, 357 | }); 358 | } 359 | setSize(width = 1, height = 1, orientation = 'xz') { 360 | const geometry = this; 361 | (geometry.width = width), 362 | (geometry.height = height), 363 | (geometry.orientation = orientation), 364 | (geometry.attributes.position.values && 365 | geometry.attributes.position.values 366 | .length === 367 | 3 * geometry.vertexCount) || 368 | (geometry.attributes.position.values = 369 | new Float32Array( 370 | 3 * geometry.vertexCount 371 | )); 372 | const o = width / -2, 373 | r = height / -2, 374 | segment_width = width / geometry.xSegCount, 375 | segment_height = 376 | height / geometry.ySegCount; 377 | for ( 378 | let yIndex = 0; 379 | yIndex <= geometry.ySegCount; 380 | yIndex++ 381 | ) { 382 | const t = r + yIndex * segment_height; 383 | for ( 384 | let xIndex = 0; 385 | xIndex <= geometry.xSegCount; 386 | xIndex++ 387 | ) { 388 | const r = o + xIndex * segment_width, 389 | l = 390 | yIndex * 391 | (geometry.xSegCount + 1) + 392 | xIndex; 393 | (geometry.attributes.position.values[ 394 | 3 * l + 395 | 'xyz'.indexOf(orientation[0]) 396 | ] = r), 397 | (geometry.attributes.position.values[ 398 | 3 * l + 399 | 'xyz'.indexOf( 400 | orientation[1] 401 | ) 402 | ] = -t); 403 | } 404 | } 405 | geometry.attributes.position.update(), 406 | _miniGl.debug('Geometry.setSize', { 407 | position: geometry.attributes.position, 408 | }); 409 | } 410 | }, 411 | }, 412 | Mesh: { 413 | enumerable: !1, 414 | value: class { 415 | constructor(geometry, material) { 416 | const mesh = this; 417 | (mesh.geometry = geometry), 418 | (mesh.material = material), 419 | (mesh.wireframe = !1), 420 | (mesh.attributeInstances = []), 421 | Object.entries( 422 | mesh.geometry.attributes 423 | ).forEach(([e, attribute]) => { 424 | mesh.attributeInstances.push({ 425 | attribute: attribute, 426 | location: attribute.attach( 427 | e, 428 | mesh.material.program 429 | ), 430 | }); 431 | }), 432 | _miniGl.meshes.push(mesh), 433 | _miniGl.debug('Mesh.constructor', { 434 | mesh: mesh, 435 | }); 436 | } 437 | draw() { 438 | context.useProgram(this.material.program), 439 | this.material.uniformInstances.forEach( 440 | ({ uniform: e, location: t }) => 441 | e.update(t) 442 | ), 443 | this.attributeInstances.forEach( 444 | ({ attribute: e, location: t }) => 445 | e.use(t) 446 | ), 447 | context.drawElements( 448 | this.wireframe 449 | ? context.LINES 450 | : context.TRIANGLES, 451 | this.geometry.attributes.index.values 452 | .length, 453 | context.UNSIGNED_SHORT, 454 | 0 455 | ); 456 | } 457 | remove() { 458 | _miniGl.meshes = _miniGl.meshes.filter( 459 | (e) => e != this 460 | ); 461 | } 462 | }, 463 | }, 464 | Attribute: { 465 | enumerable: !1, 466 | value: class { 467 | constructor(e) { 468 | (this.type = context.FLOAT), 469 | (this.normalized = !1), 470 | (this.buffer = context.createBuffer()), 471 | Object.assign(this, e), 472 | this.update(); 473 | } 474 | update() { 475 | void 0 !== this.values && 476 | (context.bindBuffer( 477 | this.target, 478 | this.buffer 479 | ), 480 | context.bufferData( 481 | this.target, 482 | this.values, 483 | context.STATIC_DRAW 484 | )); 485 | } 486 | attach(e, t) { 487 | const n = context.getAttribLocation(t, e); 488 | return ( 489 | this.target === context.ARRAY_BUFFER && 490 | (context.enableVertexAttribArray(n), 491 | context.vertexAttribPointer( 492 | n, 493 | this.size, 494 | this.type, 495 | this.normalized, 496 | 0, 497 | 0 498 | )), 499 | n 500 | ); 501 | } 502 | use(e) { 503 | context.bindBuffer(this.target, this.buffer), 504 | this.target === context.ARRAY_BUFFER && 505 | (context.enableVertexAttribArray(e), 506 | context.vertexAttribPointer( 507 | e, 508 | this.size, 509 | this.type, 510 | this.normalized, 511 | 0, 512 | 0 513 | )); 514 | } 515 | }, 516 | }, 517 | }); 518 | const a = [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1]; 519 | _miniGl.commonUniforms = { 520 | projectionMatrix: new _miniGl.Uniform({ 521 | type: 'mat4', 522 | value: a, 523 | }), 524 | modelViewMatrix: new _miniGl.Uniform({ 525 | type: 'mat4', 526 | value: a, 527 | }), 528 | resolution: new _miniGl.Uniform({ 529 | type: 'vec2', 530 | value: [1, 1], 531 | }), 532 | aspectRatio: new _miniGl.Uniform({ type: 'float', value: 1 }), 533 | }; 534 | } catch {} 535 | } 536 | setSize(e = 640, t = 480) { 537 | (this.width = e), 538 | (this.height = t), 539 | (this.canvas.width = e), 540 | (this.canvas.height = t), 541 | this.gl.viewport(0, 0, e, t), 542 | (this.commonUniforms.resolution.value = [e, t]), 543 | (this.commonUniforms.aspectRatio.value = e / t), 544 | this.debug('MiniGL.setSize', { width: e, height: t }); 545 | } 546 | setOrthographicCamera(e = 0, t = 0, n = 0, i = -2e3, s = 2e3) { 547 | (this.commonUniforms.projectionMatrix.value = [ 548 | 2 / this.width, 549 | 0, 550 | 0, 551 | 0, 552 | 0, 553 | 2 / this.height, 554 | 0, 555 | 0, 556 | 0, 557 | 0, 558 | 2 / (i - s), 559 | 0, 560 | e, 561 | t, 562 | n, 563 | 1, 564 | ]), 565 | this.debug( 566 | 'setOrthographicCamera', 567 | this.commonUniforms.projectionMatrix.value 568 | ); 569 | } 570 | render() { 571 | this.gl.clearColor(0, 0, 0, 0), 572 | this.gl.clearDepth(1), 573 | this.meshes.forEach((e) => e.draw()); 574 | } 575 | } 576 | function e(object, propertyName, val) { 577 | return ( 578 | propertyName in object 579 | ? Object.defineProperty(object, propertyName, { 580 | value: val, 581 | enumerable: !0, 582 | configurable: !0, 583 | writable: !0, 584 | }) 585 | : (object[propertyName] = val), 586 | object 587 | ); 588 | } 589 | export class Gradient { 590 | constructor(...t) { 591 | e(this, 'el', void 0), 592 | e(this, 'cssVarRetries', 0), 593 | e(this, 'maxCssVarRetries', 200), 594 | e(this, 'angle', 0), 595 | e(this, 'isLoadedClass', !1), 596 | e(this, 'isScrolling', !1), 597 | e(this, 'scrollingTimeout', void 0), 598 | e(this, 'scrollingRefreshDelay', 200), 599 | e(this, 'isIntersecting', !1), 600 | e(this, 'shaderFiles', void 0), 601 | e(this, 'vertexShader', void 0), 602 | e(this, 'sectionColors', void 0), 603 | e(this, 'computedCanvasStyle', void 0), 604 | e(this, 'conf', void 0), 605 | e(this, 'uniforms', void 0), 606 | e(this, 't', 1253106), 607 | e(this, 'last', 0), 608 | e(this, 'width', void 0), 609 | e(this, 'minWidth', 1111), 610 | e(this, 'height', 600), 611 | e(this, 'xSegCount', void 0), 612 | e(this, 'ySegCount', void 0), 613 | e(this, 'mesh', void 0), 614 | e(this, 'material', void 0), 615 | e(this, 'geometry', void 0), 616 | e(this, 'minigl', void 0), 617 | e(this, 'scrollObserver', void 0), 618 | e(this, 'amp', 320), 619 | e(this, 'seed', 5), 620 | e(this, 'freqX', 14e-5), 621 | e(this, 'freqY', 29e-5), 622 | e(this, 'freqDelta', 1e-5), 623 | e(this, 'activeColors', [1, 1, 1, 1]), 624 | e(this, 'isMetaKey', !1), 625 | e(this, 'isGradientLegendVisible', !1), 626 | e(this, 'isMouseDown', !1), 627 | e(this, 'handleScroll', () => { 628 | clearTimeout(this.scrollingTimeout), 629 | (this.scrollingTimeout = setTimeout( 630 | this.handleScrollEnd, 631 | this.scrollingRefreshDelay 632 | )), 633 | this.isGradientLegendVisible && this.hideGradientLegend(), 634 | this.conf.playing && 635 | ((this.isScrolling = !0), this.pause()); 636 | }), 637 | e(this, 'handleScrollEnd', () => { 638 | (this.isScrolling = !1), this.isIntersecting && this.play(); 639 | }), 640 | e(this, 'resize', () => { 641 | (this.width = window.innerWidth), 642 | this.minigl.setSize(this.width, this.height), 643 | this.minigl.setOrthographicCamera(), 644 | (this.xSegCount = Math.ceil( 645 | this.width * this.conf.density[0] 646 | )), 647 | (this.ySegCount = Math.ceil( 648 | this.height * this.conf.density[1] 649 | )), 650 | this.mesh.geometry.setTopology( 651 | this.xSegCount, 652 | this.ySegCount 653 | ), 654 | this.mesh.geometry.setSize(this.width, this.height), 655 | (this.mesh.material.uniforms.u_shadow_power.value = 656 | this.width < 600 ? 5 : 6); 657 | }), 658 | e(this, 'handleMouseDown', (e) => { 659 | this.isGradientLegendVisible && 660 | ((this.isMetaKey = e.metaKey), 661 | (this.isMouseDown = !0), 662 | !1 === this.conf.playing && 663 | requestAnimationFrame(this.animate)); 664 | }), 665 | e(this, 'handleMouseUp', () => { 666 | this.isMouseDown = !1; 667 | }), 668 | e(this, 'animate', (e) => { 669 | if (!this.shouldSkipFrame(e) || this.isMouseDown) { 670 | if ( 671 | ((this.t += Math.min(e - this.last, 1e3 / 15)), 672 | (this.last = e), 673 | this.isMouseDown) 674 | ) { 675 | let e = 160; 676 | this.isMetaKey && (e = -160), (this.t += e); 677 | } 678 | (this.mesh.material.uniforms.u_time.value = this.t), 679 | this.minigl.render(); 680 | } 681 | if (0 !== this.last && this.isStatic) 682 | return this.minigl.render(), void this.disconnect(); 683 | (this.conf.playing || this.isMouseDown) && 684 | requestAnimationFrame(this.animate); 685 | }), 686 | e(this, 'addIsLoadedClass', () => { 687 | !this.isLoadedClass && 688 | ((this.isLoadedClass = !0), 689 | this.el.classList.add('isLoaded'), 690 | setTimeout(() => { 691 | try { 692 | this.el.parentElement.classList.add('isLoaded'); 693 | } catch {} 694 | }, 3e3)); 695 | }), 696 | e(this, 'pause', () => { 697 | this.conf.playing = !1; 698 | }), 699 | e(this, 'play', () => { 700 | requestAnimationFrame(this.animate), (this.conf.playing = !0); 701 | }), 702 | e( 703 | this, 704 | 'initGradient', 705 | (selector) => ( 706 | (this.el = document.querySelector(selector)), 707 | this.connect(), 708 | this 709 | ) 710 | ); 711 | } 712 | async connect() { 713 | (this.shaderFiles = { 714 | vertex: 'varying vec3 v_color;\n\nvoid main() {\n float time = u_time * u_global.noiseSpeed;\n\n vec2 noiseCoord = resolution * uvNorm * u_global.noiseFreq;\n\n vec2 st = 1. - uvNorm.xy;\n\n //\n // Tilting the plane\n //\n\n // Front-to-back tilt\n float tilt = resolution.y / 2.0 * uvNorm.y;\n\n // Left-to-right angle\n float incline = resolution.x * uvNorm.x / 2.0 * u_vertDeform.incline;\n\n // Up-down shift to offset incline\n float offset = resolution.x / 2.0 * u_vertDeform.incline * mix(u_vertDeform.offsetBottom, u_vertDeform.offsetTop, uv.y);\n\n //\n // Vertex noise\n //\n\n float noise = snoise(vec3(\n noiseCoord.x * u_vertDeform.noiseFreq.x + time * u_vertDeform.noiseFlow,\n noiseCoord.y * u_vertDeform.noiseFreq.y,\n time * u_vertDeform.noiseSpeed + u_vertDeform.noiseSeed\n )) * u_vertDeform.noiseAmp;\n\n // Fade noise to zero at edges\n noise *= 1.0 - pow(abs(uvNorm.y), 2.0);\n\n // Clamp to 0\n noise = max(0.0, noise);\n\n vec3 pos = vec3(\n position.x,\n position.y + tilt + incline + noise - offset,\n position.z\n );\n\n //\n // Vertex color, to be passed to fragment shader\n //\n\n if (u_active_colors[0] == 1.) {\n v_color = u_baseColor;\n }\n\n for (int i = 0; i < u_waveLayers_length; i++) {\n if (u_active_colors[i + 1] == 1.) {\n WaveLayers layer = u_waveLayers[i];\n\n float noise = smoothstep(\n layer.noiseFloor,\n layer.noiseCeil,\n snoise(vec3(\n noiseCoord.x * layer.noiseFreq.x + time * layer.noiseFlow,\n noiseCoord.y * layer.noiseFreq.y,\n time * layer.noiseSpeed + layer.noiseSeed\n )) / 2.0 + 0.5\n );\n\n v_color = blendNormal(v_color, layer.color, pow(noise, 4.));\n }\n }\n\n //\n // Finish\n //\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(pos, 1.0);\n}', 715 | noise: '//\n// Description : Array and textureless GLSL 2D/3D/4D simplex\n// noise functions.\n// Author : Ian McEwan, Ashima Arts.\n// Maintainer : stegu\n// Lastmod : 20110822 (ijm)\n// License : Copyright (C) 2011 Ashima Arts. All rights reserved.\n// Distributed under the MIT License. See LICENSE file.\n// https://github.com/ashima/webgl-noise\n// https://github.com/stegu/webgl-noise\n//\n\nvec3 mod289(vec3 x) {\n return x - floor(x * (1.0 / 289.0)) * 289.0;\n}\n\nvec4 mod289(vec4 x) {\n return x - floor(x * (1.0 / 289.0)) * 289.0;\n}\n\nvec4 permute(vec4 x) {\n return mod289(((x*34.0)+1.0)*x);\n}\n\nvec4 taylorInvSqrt(vec4 r)\n{\n return 1.79284291400159 - 0.85373472095314 * r;\n}\n\nfloat snoise(vec3 v)\n{\n const vec2 C = vec2(1.0/6.0, 1.0/3.0) ;\n const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);\n\n// First corner\n vec3 i = floor(v + dot(v, C.yyy) );\n vec3 x0 = v - i + dot(i, C.xxx) ;\n\n// Other corners\n vec3 g = step(x0.yzx, x0.xyz);\n vec3 l = 1.0 - g;\n vec3 i1 = min( g.xyz, l.zxy );\n vec3 i2 = max( g.xyz, l.zxy );\n\n // x0 = x0 - 0.0 + 0.0 * C.xxx;\n // x1 = x0 - i1 + 1.0 * C.xxx;\n // x2 = x0 - i2 + 2.0 * C.xxx;\n // x3 = x0 - 1.0 + 3.0 * C.xxx;\n vec3 x1 = x0 - i1 + C.xxx;\n vec3 x2 = x0 - i2 + C.yyy; // 2.0*C.x = 1/3 = C.y\n vec3 x3 = x0 - D.yyy; // -1.0+3.0*C.x = -0.5 = -D.y\n\n// Permutations\n i = mod289(i);\n vec4 p = permute( permute( permute(\n i.z + vec4(0.0, i1.z, i2.z, 1.0 ))\n + i.y + vec4(0.0, i1.y, i2.y, 1.0 ))\n + i.x + vec4(0.0, i1.x, i2.x, 1.0 ));\n\n// Gradients: 7x7 points over a square, mapped onto an octahedron.\n// The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294)\n float n_ = 0.142857142857; // 1.0/7.0\n vec3 ns = n_ * D.wyz - D.xzx;\n\n vec4 j = p - 49.0 * floor(p * ns.z * ns.z); // mod(p,7*7)\n\n vec4 x_ = floor(j * ns.z);\n vec4 y_ = floor(j - 7.0 * x_ ); // mod(j,N)\n\n vec4 x = x_ *ns.x + ns.yyyy;\n vec4 y = y_ *ns.x + ns.yyyy;\n vec4 h = 1.0 - abs(x) - abs(y);\n\n vec4 b0 = vec4( x.xy, y.xy );\n vec4 b1 = vec4( x.zw, y.zw );\n\n //vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0;\n //vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0;\n vec4 s0 = floor(b0)*2.0 + 1.0;\n vec4 s1 = floor(b1)*2.0 + 1.0;\n vec4 sh = -step(h, vec4(0.0));\n\n vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;\n vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;\n\n vec3 p0 = vec3(a0.xy,h.x);\n vec3 p1 = vec3(a0.zw,h.y);\n vec3 p2 = vec3(a1.xy,h.z);\n vec3 p3 = vec3(a1.zw,h.w);\n\n//Normalise gradients\n vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));\n p0 *= norm.x;\n p1 *= norm.y;\n p2 *= norm.z;\n p3 *= norm.w;\n\n// Mix final noise value\n vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);\n m = m * m;\n return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),\n dot(p2,x2), dot(p3,x3) ) );\n}', 716 | blend: '//\n// https://github.com/jamieowen/glsl-blend\n//\n\n// Normal\n\nvec3 blendNormal(vec3 base, vec3 blend) {\n\treturn blend;\n}\n\nvec3 blendNormal(vec3 base, vec3 blend, float opacity) {\n\treturn (blendNormal(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Screen\n\nfloat blendScreen(float base, float blend) {\n\treturn 1.0-((1.0-base)*(1.0-blend));\n}\n\nvec3 blendScreen(vec3 base, vec3 blend) {\n\treturn vec3(blendScreen(base.r,blend.r),blendScreen(base.g,blend.g),blendScreen(base.b,blend.b));\n}\n\nvec3 blendScreen(vec3 base, vec3 blend, float opacity) {\n\treturn (blendScreen(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Multiply\n\nvec3 blendMultiply(vec3 base, vec3 blend) {\n\treturn base*blend;\n}\n\nvec3 blendMultiply(vec3 base, vec3 blend, float opacity) {\n\treturn (blendMultiply(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Overlay\n\nfloat blendOverlay(float base, float blend) {\n\treturn base<0.5?(2.0*base*blend):(1.0-2.0*(1.0-base)*(1.0-blend));\n}\n\nvec3 blendOverlay(vec3 base, vec3 blend) {\n\treturn vec3(blendOverlay(base.r,blend.r),blendOverlay(base.g,blend.g),blendOverlay(base.b,blend.b));\n}\n\nvec3 blendOverlay(vec3 base, vec3 blend, float opacity) {\n\treturn (blendOverlay(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Hard light\n\nvec3 blendHardLight(vec3 base, vec3 blend) {\n\treturn blendOverlay(blend,base);\n}\n\nvec3 blendHardLight(vec3 base, vec3 blend, float opacity) {\n\treturn (blendHardLight(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Soft light\n\nfloat blendSoftLight(float base, float blend) {\n\treturn (blend<0.5)?(2.0*base*blend+base*base*(1.0-2.0*blend)):(sqrt(base)*(2.0*blend-1.0)+2.0*base*(1.0-blend));\n}\n\nvec3 blendSoftLight(vec3 base, vec3 blend) {\n\treturn vec3(blendSoftLight(base.r,blend.r),blendSoftLight(base.g,blend.g),blendSoftLight(base.b,blend.b));\n}\n\nvec3 blendSoftLight(vec3 base, vec3 blend, float opacity) {\n\treturn (blendSoftLight(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Color dodge\n\nfloat blendColorDodge(float base, float blend) {\n\treturn (blend==1.0)?blend:min(base/(1.0-blend),1.0);\n}\n\nvec3 blendColorDodge(vec3 base, vec3 blend) {\n\treturn vec3(blendColorDodge(base.r,blend.r),blendColorDodge(base.g,blend.g),blendColorDodge(base.b,blend.b));\n}\n\nvec3 blendColorDodge(vec3 base, vec3 blend, float opacity) {\n\treturn (blendColorDodge(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Color burn\n\nfloat blendColorBurn(float base, float blend) {\n\treturn (blend==0.0)?blend:max((1.0-((1.0-base)/blend)),0.0);\n}\n\nvec3 blendColorBurn(vec3 base, vec3 blend) {\n\treturn vec3(blendColorBurn(base.r,blend.r),blendColorBurn(base.g,blend.g),blendColorBurn(base.b,blend.b));\n}\n\nvec3 blendColorBurn(vec3 base, vec3 blend, float opacity) {\n\treturn (blendColorBurn(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Vivid Light\n\nfloat blendVividLight(float base, float blend) {\n\treturn (blend<0.5)?blendColorBurn(base,(2.0*blend)):blendColorDodge(base,(2.0*(blend-0.5)));\n}\n\nvec3 blendVividLight(vec3 base, vec3 blend) {\n\treturn vec3(blendVividLight(base.r,blend.r),blendVividLight(base.g,blend.g),blendVividLight(base.b,blend.b));\n}\n\nvec3 blendVividLight(vec3 base, vec3 blend, float opacity) {\n\treturn (blendVividLight(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Lighten\n\nfloat blendLighten(float base, float blend) {\n\treturn max(blend,base);\n}\n\nvec3 blendLighten(vec3 base, vec3 blend) {\n\treturn vec3(blendLighten(base.r,blend.r),blendLighten(base.g,blend.g),blendLighten(base.b,blend.b));\n}\n\nvec3 blendLighten(vec3 base, vec3 blend, float opacity) {\n\treturn (blendLighten(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Linear burn\n\nfloat blendLinearBurn(float base, float blend) {\n\t// Note : Same implementation as BlendSubtractf\n\treturn max(base+blend-1.0,0.0);\n}\n\nvec3 blendLinearBurn(vec3 base, vec3 blend) {\n\t// Note : Same implementation as BlendSubtract\n\treturn max(base+blend-vec3(1.0),vec3(0.0));\n}\n\nvec3 blendLinearBurn(vec3 base, vec3 blend, float opacity) {\n\treturn (blendLinearBurn(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Linear dodge\n\nfloat blendLinearDodge(float base, float blend) {\n\t// Note : Same implementation as BlendAddf\n\treturn min(base+blend,1.0);\n}\n\nvec3 blendLinearDodge(vec3 base, vec3 blend) {\n\t// Note : Same implementation as BlendAdd\n\treturn min(base+blend,vec3(1.0));\n}\n\nvec3 blendLinearDodge(vec3 base, vec3 blend, float opacity) {\n\treturn (blendLinearDodge(base, blend) * opacity + base * (1.0 - opacity));\n}\n\n// Linear light\n\nfloat blendLinearLight(float base, float blend) {\n\treturn blend<0.5?blendLinearBurn(base,(2.0*blend)):blendLinearDodge(base,(2.0*(blend-0.5)));\n}\n\nvec3 blendLinearLight(vec3 base, vec3 blend) {\n\treturn vec3(blendLinearLight(base.r,blend.r),blendLinearLight(base.g,blend.g),blendLinearLight(base.b,blend.b));\n}\n\nvec3 blendLinearLight(vec3 base, vec3 blend, float opacity) {\n\treturn (blendLinearLight(base, blend) * opacity + base * (1.0 - opacity));\n}', 717 | fragment: 718 | 'varying vec3 v_color;\n\nvoid main() {\n vec3 color = v_color;\n if (u_darken_top == 1.0) {\n vec2 st = gl_FragCoord.xy/resolution.xy;\n color.g -= pow(st.y + sin(-12.0) * st.x, u_shadow_power) * 0.4;\n }\n gl_FragColor = vec4(color, 1.0);\n}', 719 | }), 720 | (this.conf = { 721 | presetName: '', 722 | wireframe: !1, 723 | density: [0.06, 0.16], 724 | zoom: 1, 725 | rotation: 0, 726 | playing: !0, 727 | }), 728 | document.querySelectorAll('canvas').length < 1 729 | ? console.log('DID NOT LOAD HERO STRIPE CANVAS') 730 | : ((this.minigl = new MiniGl(this.el, null, null, !0)), 731 | requestAnimationFrame(() => { 732 | this.el && 733 | ((this.computedCanvasStyle = getComputedStyle( 734 | this.el 735 | )), 736 | this.waitForCssVars()); 737 | })); 738 | } 739 | disconnect() { 740 | this.scrollObserver && 741 | (window.removeEventListener('scroll', this.handleScroll), 742 | window.removeEventListener('mousedown', this.handleMouseDown), 743 | window.removeEventListener('mouseup', this.handleMouseUp), 744 | window.removeEventListener('keydown', this.handleKeyDown), 745 | this.scrollObserver.disconnect()), 746 | window.removeEventListener('resize', this.resize); 747 | } 748 | initMaterial() { 749 | this.uniforms = { 750 | u_time: new this.minigl.Uniform({ value: 0 }), 751 | u_shadow_power: new this.minigl.Uniform({ value: 5 }), 752 | u_darken_top: new this.minigl.Uniform({ 753 | value: '' === this.el.dataset.jsDarkenTop ? 1 : 0, 754 | }), 755 | u_active_colors: new this.minigl.Uniform({ 756 | value: this.activeColors, 757 | type: 'vec4', 758 | }), 759 | u_global: new this.minigl.Uniform({ 760 | value: { 761 | noiseFreq: new this.minigl.Uniform({ 762 | value: [this.freqX, this.freqY], 763 | type: 'vec2', 764 | }), 765 | noiseSpeed: new this.minigl.Uniform({ value: 5e-6 }), 766 | }, 767 | type: 'struct', 768 | }), 769 | u_vertDeform: new this.minigl.Uniform({ 770 | value: { 771 | incline: new this.minigl.Uniform({ 772 | value: Math.sin(this.angle) / Math.cos(this.angle), 773 | }), 774 | offsetTop: new this.minigl.Uniform({ value: -0.5 }), 775 | offsetBottom: new this.minigl.Uniform({ value: -0.5 }), 776 | noiseFreq: new this.minigl.Uniform({ 777 | value: [3, 4], 778 | type: 'vec2', 779 | }), 780 | noiseAmp: new this.minigl.Uniform({ value: this.amp }), 781 | noiseSpeed: new this.minigl.Uniform({ value: 10 }), 782 | noiseFlow: new this.minigl.Uniform({ value: 3 }), 783 | noiseSeed: new this.minigl.Uniform({ value: this.seed }), 784 | }, 785 | type: 'struct', 786 | excludeFrom: 'fragment', 787 | }), 788 | u_baseColor: new this.minigl.Uniform({ 789 | value: this.sectionColors[0], 790 | type: 'vec3', 791 | excludeFrom: 'fragment', 792 | }), 793 | u_waveLayers: new this.minigl.Uniform({ 794 | value: [], 795 | excludeFrom: 'fragment', 796 | type: 'array', 797 | }), 798 | }; 799 | for (let e = 1; e < this.sectionColors.length; e += 1) 800 | this.uniforms.u_waveLayers.value.push( 801 | new this.minigl.Uniform({ 802 | value: { 803 | color: new this.minigl.Uniform({ 804 | value: this.sectionColors[e], 805 | type: 'vec3', 806 | }), 807 | noiseFreq: new this.minigl.Uniform({ 808 | value: [ 809 | 2 + e / this.sectionColors.length, 810 | 3 + e / this.sectionColors.length, 811 | ], 812 | type: 'vec2', 813 | }), 814 | noiseSpeed: new this.minigl.Uniform({ 815 | value: 11 + 0.3 * e, 816 | }), 817 | noiseFlow: new this.minigl.Uniform({ 818 | value: 6.5 + 0.3 * e, 819 | }), 820 | noiseSeed: new this.minigl.Uniform({ 821 | value: this.seed + 10 * e, 822 | }), 823 | noiseFloor: new this.minigl.Uniform({ value: 0.1 }), 824 | noiseCeil: new this.minigl.Uniform({ 825 | value: 0.63 + 0.07 * e, 826 | }), 827 | }, 828 | type: 'struct', 829 | }) 830 | ); 831 | return ( 832 | (this.vertexShader = [ 833 | this.shaderFiles.noise, 834 | this.shaderFiles.blend, 835 | this.shaderFiles.vertex, 836 | ].join('\n\n')), 837 | new this.minigl.Material( 838 | this.vertexShader, 839 | this.shaderFiles.fragment, 840 | this.uniforms 841 | ) 842 | ); 843 | } 844 | initMesh() { 845 | (this.material = this.initMaterial()), 846 | (this.geometry = new this.minigl.PlaneGeometry()), 847 | (this.mesh = new this.minigl.Mesh(this.geometry, this.material)); 848 | } 849 | shouldSkipFrame(e) { 850 | return ( 851 | !!window.document.hidden || 852 | !this.conf.playing || 853 | parseInt(e, 10) % 2 == 0 || 854 | void 0 855 | ); 856 | } 857 | updateFrequency(e) { 858 | (this.freqX += e), (this.freqY += e); 859 | } 860 | toggleColor(index) { 861 | this.activeColors[index] = 0 === this.activeColors[index] ? 1 : 0; 862 | } 863 | showGradientLegend() { 864 | this.width > this.minWidth && 865 | ((this.isGradientLegendVisible = !0), 866 | document.body.classList.add('isGradientLegendVisible')); 867 | } 868 | hideGradientLegend() { 869 | (this.isGradientLegendVisible = !1), 870 | document.body.classList.remove('isGradientLegendVisible'); 871 | } 872 | init() { 873 | this.initGradientColors(), 874 | this.initMesh(), 875 | this.resize(), 876 | requestAnimationFrame(this.animate), 877 | window.addEventListener('resize', this.resize); 878 | } 879 | waitForCssVars() { 880 | if ( 881 | this.computedCanvasStyle && 882 | -1 !== 883 | this.computedCanvasStyle 884 | .getPropertyValue('--gradient-color-1') 885 | .indexOf('#') 886 | ) 887 | this.init(), this.addIsLoadedClass(); 888 | else { 889 | if ( 890 | ((this.cssVarRetries += 1), 891 | this.cssVarRetries > this.maxCssVarRetries) 892 | ) 893 | return ( 894 | (this.sectionColors = [ 895 | 16711680, 16711680, 16711935, 65280, 255, 896 | ]), 897 | void this.init() 898 | ); 899 | requestAnimationFrame(() => this.waitForCssVars()); 900 | } 901 | } 902 | initGradientColors() { 903 | this.sectionColors = [ 904 | '--gradient-color-1', 905 | '--gradient-color-2', 906 | '--gradient-color-3', 907 | '--gradient-color-4', 908 | ] 909 | .map((cssPropertyName) => { 910 | let hex = this.computedCanvasStyle 911 | .getPropertyValue(cssPropertyName) 912 | .trim(); 913 | if (4 === hex.length) { 914 | const hexTemp = hex 915 | .substr(1) 916 | .split('') 917 | .map((hexTemp) => hexTemp + hexTemp) 918 | .join(''); 919 | hex = `#${hexTemp}`; 920 | } 921 | return hex && `0x${hex.substr(1)}`; 922 | }) 923 | .filter(Boolean) 924 | .map(normalizeColor); 925 | } 926 | } 927 | --------------------------------------------------------------------------------